diff --git a/.agents/skills/frontend-query-mutation/references/runtime-rules.md b/.agents/skills/frontend-query-mutation/references/runtime-rules.md
index 02e8b9c2b6..73d6fbdded 100644
--- a/.agents/skills/frontend-query-mutation/references/runtime-rules.md
+++ b/.agents/skills/frontend-query-mutation/references/runtime-rules.md
@@ -64,7 +64,7 @@ export const useUpdateAccessMode = () => {
// Component only adds UI behavior.
updateAccessMode({ appId, mode }, {
- onSuccess: () => Toast.notify({ type: 'success', message: '...' }),
+ onSuccess: () => toast.success('...'),
})
// Avoid putting invalidation knowledge in the component.
@@ -114,10 +114,7 @@ try {
router.push(`/orders/${order.id}`)
}
catch (error) {
- Toast.notify({
- type: 'error',
- message: error instanceof Error ? error.message : 'Unknown error',
- })
+ toast.error(error instanceof Error ? error.message : 'Unknown error')
}
```
diff --git a/.github/workflows/pyrefly-diff.yml b/.github/workflows/pyrefly-diff.yml
index 0b2a7b8e9e..8623d35b04 100644
--- a/.github/workflows/pyrefly-diff.yml
+++ b/.github/workflows/pyrefly-diff.yml
@@ -50,6 +50,17 @@ jobs:
run: |
diff -u /tmp/pyrefly_base.txt /tmp/pyrefly_pr.txt > pyrefly_diff.txt || true
+ - name: Check if line counts match
+ id: line_count_check
+ run: |
+ base_lines=$(wc -l < /tmp/pyrefly_base.txt)
+ pr_lines=$(wc -l < /tmp/pyrefly_pr.txt)
+ if [ "$base_lines" -eq "$pr_lines" ]; then
+ echo "same=true" >> $GITHUB_OUTPUT
+ else
+ echo "same=false" >> $GITHUB_OUTPUT
+ fi
+
- name: Save PR number
run: |
echo ${{ github.event.pull_request.number }} > pr_number.txt
@@ -63,7 +74,7 @@ jobs:
pr_number.txt
- name: Comment PR with pyrefly diff
- if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
+ if: ${{ github.event.pull_request.head.repo.full_name == github.repository && steps.line_count_check.outputs.same == 'false' }}
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/translate-i18n-claude.yml b/.github/workflows/translate-i18n-claude.yml
index aaf51aa606..33af4f36fd 100644
--- a/.github/workflows/translate-i18n-claude.yml
+++ b/.github/workflows/translate-i18n-claude.yml
@@ -1,10 +1,10 @@
name: Translate i18n Files with Claude Code
+# Note: claude-code-action doesn't support push events directly.
+# Push events are bridged by trigger-i18n-sync.yml via repository_dispatch.
on:
- push:
- branches: [main]
- paths:
- - 'web/i18n/en-US/*.json'
+ repository_dispatch:
+ types: [i18n-sync]
workflow_dispatch:
inputs:
files:
@@ -30,7 +30,7 @@ permissions:
concurrency:
group: translate-i18n-${{ github.event_name }}-${{ github.ref }}
- cancel-in-progress: ${{ github.event_name == 'push' }}
+ cancel-in-progress: false
jobs:
translate:
@@ -67,19 +67,113 @@ jobs:
}
" web/i18n-config/languages.ts | sed 's/[[:space:]]*$//')
- if [ "${{ github.event_name }}" = "push" ]; then
- BASE_SHA="${{ github.event.before }}"
- if [ -z "$BASE_SHA" ] || [ "$BASE_SHA" = "0000000000000000000000000000000000000000" ]; then
- BASE_SHA=$(git rev-parse HEAD~1 2>/dev/null || true)
- fi
- HEAD_SHA="${{ github.sha }}"
- if [ -n "$BASE_SHA" ]; then
- CHANGED_FILES=$(git diff --name-only "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' 2>/dev/null | sed -n 's@^.*/@@p' | sed 's/\.json$//' | tr '\n' ' ' | sed 's/[[:space:]]*$//')
- else
- CHANGED_FILES=$(find web/i18n/en-US -maxdepth 1 -type f -name '*.json' -print | sed -n 's@^.*/@@p' | sed 's/\.json$//' | sort | tr '\n' ' ' | sed 's/[[:space:]]*$//')
- fi
+ generate_changes_json() {
+ node <<'NODE'
+ const { execFileSync } = require('node:child_process')
+ const fs = require('node:fs')
+ const path = require('node:path')
+
+ const repoRoot = process.cwd()
+ const baseSha = process.env.BASE_SHA || ''
+ const headSha = process.env.HEAD_SHA || ''
+ const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
+
+ const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
+
+ const readCurrentJson = (fileStem) => {
+ const filePath = englishPath(fileStem)
+ if (!fs.existsSync(filePath))
+ return null
+
+ return JSON.parse(fs.readFileSync(filePath, 'utf8'))
+ }
+
+ const readBaseJson = (fileStem) => {
+ if (!baseSha)
+ return null
+
+ try {
+ const relativePath = `web/i18n/en-US/${fileStem}.json`
+ const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
+ return JSON.parse(content)
+ }
+ catch (error) {
+ return null
+ }
+ }
+
+ const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
+
+ const changes = {}
+
+ for (const fileStem of files) {
+ const currentJson = readCurrentJson(fileStem)
+ const beforeJson = readBaseJson(fileStem) || {}
+ const afterJson = currentJson || {}
+ const added = {}
+ const updated = {}
+ const deleted = []
+
+ for (const [key, value] of Object.entries(afterJson)) {
+ if (!(key in beforeJson)) {
+ added[key] = value
+ continue
+ }
+
+ if (!compareJson(beforeJson[key], value)) {
+ updated[key] = {
+ before: beforeJson[key],
+ after: value,
+ }
+ }
+ }
+
+ for (const key of Object.keys(beforeJson)) {
+ if (!(key in afterJson))
+ deleted.push(key)
+ }
+
+ changes[fileStem] = {
+ fileDeleted: currentJson === null,
+ added,
+ updated,
+ deleted,
+ }
+ }
+
+ fs.writeFileSync(
+ '/tmp/i18n-changes.json',
+ JSON.stringify({
+ baseSha,
+ headSha,
+ files,
+ changes,
+ })
+ )
+ NODE
+ }
+
+ if [ "${{ github.event_name }}" = "repository_dispatch" ]; then
+ BASE_SHA="${{ github.event.client_payload.base_sha }}"
+ HEAD_SHA="${{ github.event.client_payload.head_sha }}"
+ CHANGED_FILES="${{ github.event.client_payload.changed_files }}"
TARGET_LANGS="$DEFAULT_TARGET_LANGS"
- SYNC_MODE="incremental"
+ SYNC_MODE="${{ github.event.client_payload.sync_mode || 'incremental' }}"
+
+ if [ -n "${{ github.event.client_payload.changes_base64 }}" ]; then
+ printf '%s' '${{ github.event.client_payload.changes_base64 }}' | base64 -d > /tmp/i18n-changes.json
+ CHANGES_AVAILABLE="true"
+ CHANGES_SOURCE="embedded"
+ elif [ -n "$BASE_SHA" ] && [ -n "$CHANGED_FILES" ]; then
+ export BASE_SHA HEAD_SHA CHANGED_FILES
+ generate_changes_json
+ CHANGES_AVAILABLE="true"
+ CHANGES_SOURCE="recomputed"
+ else
+ printf '%s' '{"baseSha":"","headSha":"","files":[],"changes":{}}' > /tmp/i18n-changes.json
+ CHANGES_AVAILABLE="false"
+ CHANGES_SOURCE="unavailable"
+ fi
else
BASE_SHA=""
HEAD_SHA=$(git rev-parse HEAD)
@@ -104,6 +198,17 @@ jobs:
else
CHANGED_FILES=""
fi
+
+ if [ "$SYNC_MODE" = "incremental" ] && [ -n "$CHANGED_FILES" ]; then
+ export BASE_SHA HEAD_SHA CHANGED_FILES
+ generate_changes_json
+ CHANGES_AVAILABLE="true"
+ CHANGES_SOURCE="local"
+ else
+ printf '%s' '{"baseSha":"","headSha":"","files":[],"changes":{}}' > /tmp/i18n-changes.json
+ CHANGES_AVAILABLE="false"
+ CHANGES_SOURCE="unavailable"
+ fi
fi
FILE_ARGS=""
@@ -123,6 +228,8 @@ jobs:
echo "CHANGED_FILES=$CHANGED_FILES"
echo "TARGET_LANGS=$TARGET_LANGS"
echo "SYNC_MODE=$SYNC_MODE"
+ echo "CHANGES_AVAILABLE=$CHANGES_AVAILABLE"
+ echo "CHANGES_SOURCE=$CHANGES_SOURCE"
echo "FILE_ARGS=$FILE_ARGS"
echo "LANG_ARGS=$LANG_ARGS"
} >> "$GITHUB_OUTPUT"
@@ -141,7 +248,7 @@ jobs:
show_full_output: ${{ github.event_name == 'workflow_dispatch' }}
prompt: |
You are the i18n sync agent for the Dify repository.
- Your job is to keep translations synchronized with the English source files under `${{ github.workspace }}/web/i18n/en-US/`, then open a PR with the result.
+ Your job is to keep translations synchronized with the English source files under `${{ github.workspace }}/web/i18n/en-US/`.
Use absolute paths at all times:
- Repo root: `${{ github.workspace }}`
@@ -156,12 +263,15 @@ jobs:
- Head SHA: `${{ steps.context.outputs.HEAD_SHA }}`
- Scoped file args: `${{ steps.context.outputs.FILE_ARGS }}`
- Scoped language args: `${{ steps.context.outputs.LANG_ARGS }}`
+ - Structured change set available: `${{ steps.context.outputs.CHANGES_AVAILABLE }}`
+ - Structured change set source: `${{ steps.context.outputs.CHANGES_SOURCE }}`
+ - Structured change set file: `/tmp/i18n-changes.json`
Tool rules:
- Use Read for repository files.
- Use Edit for JSON updates.
- - Use Bash only for `git`, `gh`, `pnpm`, and `date`.
- - Run Bash commands one by one. Do not combine commands with `&&`, `||`, pipes, or command substitution.
+ - Use Bash only for `pnpm`.
+ - Do not use Bash for `git`, `gh`, or branch management.
Required execution plan:
1. Resolve target languages.
@@ -172,27 +282,25 @@ jobs:
- Only process the resolved target languages, never `en-US`.
- Do not touch unrelated i18n files.
- Do not modify `${{ github.workspace }}/web/i18n/en-US/`.
- 3. Detect English changes per file.
- - Read the current English JSON file for each file in scope.
- - If sync mode is `incremental` and `Base SHA` is not empty, run:
- `git -C ${{ github.workspace }} show :web/i18n/en-US/.json`
- - If sync mode is `full` or `Base SHA` is empty, skip historical comparison and treat the current English file as the only source of truth for structural sync.
- - If the file did not exist at Base SHA, treat all current keys as ADD.
- - Compare previous and current English JSON to identify:
- - ADD: key only in current
- - UPDATE: key exists in both and the English value changed
- - DELETE: key only in previous
- - Do not rely on a truncated diff file.
+ 3. Resolve source changes.
+ - If `Structured change set available` is `true`, read `/tmp/i18n-changes.json` and use it as the source of truth for file-level and key-level changes.
+ - For each file entry:
+ - `added` contains new English keys that need translations.
+ - `updated` contains stale keys whose English source changed; re-translate using the `after` value.
+ - `deleted` contains keys that should be removed from locale files.
+ - `fileDeleted: true` means the English file no longer exists; remove the matching locale file if present.
+ - Read the current English JSON file for any file that still exists so wording, placeholders, and surrounding terminology stay accurate.
+ - If `Structured change set available` is `false`, treat this as a scoped full sync and use the current English files plus scoped checks as the source of truth.
4. Run a scoped pre-check before editing:
- `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
- Use this command as the source of truth for missing and extra keys inside the current scope.
5. Apply translations.
- For every target language and scoped file:
+ - If `fileDeleted` is `true`, remove the locale file if it exists and skip the rest of that file.
- If the locale file does not exist yet, create it with `Write` and then continue with `Edit` as needed.
- ADD missing keys.
- UPDATE stale translations when the English value changed.
- DELETE removed keys. Prefer `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }} --auto-remove` for extra keys so deletions stay in scope.
- - For `zh-Hans` and `ja-JP`, if the locale file also changed between Base SHA and Head SHA, preserve manual translations unless they are clearly wrong for the new English value. If in doubt, keep the manual translation.
- Preserve placeholders exactly: `{{variable}}`, `${variable}`, HTML tags, component tags, and variable names.
- Match the existing terminology and register used by each locale.
- Prefer one Edit per file when stable, but prioritize correctness over batching.
@@ -200,14 +308,119 @@ jobs:
- Run `pnpm --dir ${{ github.workspace }}/web lint:fix --quiet -- `
- Run `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
- If verification fails, fix the remaining problems before continuing.
- 7. Create a PR only when there are changes in `web/i18n/`.
- - Check `git -C ${{ github.workspace }} status --porcelain -- web/i18n/`
- - Create branch `chore/i18n-sync-`
- - Commit message: `chore(i18n): sync translations with en-US`
- - Push the branch and open a PR against `main`
- - PR title: `chore(i18n): sync translations with en-US`
- - PR body: summarize files, languages, sync mode, and verification commands
- 8. If there are no translation changes after verification, do not create a branch, commit, or PR.
+ 7. Stop after the scoped locale files are updated and verification passes.
+ - Do not create branches, commits, or pull requests.
claude_args: |
- --max-turns 80
- --allowedTools "Read,Write,Edit,Bash(git *),Bash(git:*),Bash(gh *),Bash(gh:*),Bash(pnpm *),Bash(pnpm:*),Bash(date *),Bash(date:*),Glob,Grep"
+ --max-turns 120
+ --allowedTools "Read,Write,Edit,Bash(pnpm *),Bash(pnpm:*),Glob,Grep"
+
+ - name: Prepare branch metadata
+ id: pr_meta
+ if: steps.context.outputs.CHANGED_FILES != ''
+ shell: bash
+ run: |
+ if [ -z "$(git -C "${{ github.workspace }}" status --porcelain -- web/i18n/)" ]; then
+ echo "has_changes=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ SCOPE_HASH=$(printf '%s|%s|%s' "${{ steps.context.outputs.CHANGED_FILES }}" "${{ steps.context.outputs.TARGET_LANGS }}" "${{ steps.context.outputs.SYNC_MODE }}" | sha256sum | cut -c1-8)
+ HEAD_SHORT=$(printf '%s' "${{ steps.context.outputs.HEAD_SHA }}" | cut -c1-12)
+ BRANCH_NAME="chore/i18n-sync-${HEAD_SHORT}-${SCOPE_HASH}"
+
+ {
+ echo "has_changes=true"
+ echo "branch_name=$BRANCH_NAME"
+ } >> "$GITHUB_OUTPUT"
+
+ - name: Commit translation changes
+ if: steps.pr_meta.outputs.has_changes == 'true'
+ shell: bash
+ run: |
+ git -C "${{ github.workspace }}" checkout -B "${{ steps.pr_meta.outputs.branch_name }}"
+ git -C "${{ github.workspace }}" add web/i18n/
+ git -C "${{ github.workspace }}" commit -m "chore(i18n): sync translations with en-US"
+
+ - name: Push translation branch
+ if: steps.pr_meta.outputs.has_changes == 'true'
+ shell: bash
+ run: |
+ if git -C "${{ github.workspace }}" ls-remote --exit-code --heads origin "${{ steps.pr_meta.outputs.branch_name }}" >/dev/null 2>&1; then
+ git -C "${{ github.workspace }}" push --force-with-lease origin "${{ steps.pr_meta.outputs.branch_name }}"
+ else
+ git -C "${{ github.workspace }}" push --set-upstream origin "${{ steps.pr_meta.outputs.branch_name }}"
+ fi
+
+ - name: Create or update translation PR
+ if: steps.pr_meta.outputs.has_changes == 'true'
+ env:
+ BRANCH_NAME: ${{ steps.pr_meta.outputs.branch_name }}
+ FILES_IN_SCOPE: ${{ steps.context.outputs.CHANGED_FILES }}
+ TARGET_LANGS: ${{ steps.context.outputs.TARGET_LANGS }}
+ SYNC_MODE: ${{ steps.context.outputs.SYNC_MODE }}
+ CHANGES_SOURCE: ${{ steps.context.outputs.CHANGES_SOURCE }}
+ BASE_SHA: ${{ steps.context.outputs.BASE_SHA }}
+ HEAD_SHA: ${{ steps.context.outputs.HEAD_SHA }}
+ REPO_NAME: ${{ github.repository }}
+ shell: bash
+ run: |
+ PR_BODY_FILE=/tmp/i18n-pr-body.md
+ LANG_COUNT=$(printf '%s\n' "$TARGET_LANGS" | wc -w | tr -d ' ')
+ if [ "$LANG_COUNT" = "0" ]; then
+ LANG_COUNT="0"
+ fi
+ export LANG_COUNT
+
+ node <<'NODE' > "$PR_BODY_FILE"
+ const fs = require('node:fs')
+
+ const changesPath = '/tmp/i18n-changes.json'
+ const changes = fs.existsSync(changesPath)
+ ? JSON.parse(fs.readFileSync(changesPath, 'utf8'))
+ : { changes: {} }
+
+ const filesInScope = (process.env.FILES_IN_SCOPE || '').split(/\s+/).filter(Boolean)
+ const lines = [
+ '## Summary',
+ '',
+ `- **Files synced**: \`${process.env.FILES_IN_SCOPE || ''}\``,
+ `- **Languages updated**: ${process.env.TARGET_LANGS || ''} (${process.env.LANG_COUNT} languages)`,
+ `- **Sync mode**: ${process.env.SYNC_MODE}${process.env.BASE_SHA ? ` (base: \`${process.env.BASE_SHA.slice(0, 10)}\`, head: \`${process.env.HEAD_SHA.slice(0, 10)}\`)` : ` (head: \`${process.env.HEAD_SHA.slice(0, 10)}\`)`}`,
+ '',
+ '### Key changes',
+ ]
+
+ for (const fileName of filesInScope) {
+ const fileChange = changes.changes?.[fileName] || { added: {}, updated: {}, deleted: [], fileDeleted: false }
+ const addedKeys = Object.keys(fileChange.added || {})
+ const updatedKeys = Object.keys(fileChange.updated || {})
+ const deletedKeys = fileChange.deleted || []
+ lines.push(`- \`${fileName}\`: +${addedKeys.length} / ~${updatedKeys.length} / -${deletedKeys.length}${fileChange.fileDeleted ? ' (file deleted in en-US)' : ''}`)
+ }
+
+ lines.push(
+ '',
+ '## Verification',
+ '',
+ `- \`pnpm --dir web run i18n:check --file ${process.env.FILES_IN_SCOPE} --lang ${process.env.TARGET_LANGS}\``,
+ `- \`pnpm --dir web lint:fix --quiet -- \``,
+ '',
+ '## Notes',
+ '',
+ '- This PR was generated from structured en-US key changes produced by `trigger-i18n-sync.yml`.',
+ `- Structured change source: ${process.env.CHANGES_SOURCE || 'unknown'}.`,
+ '- Branch name is deterministic for the head SHA and scope, so reruns update the same PR instead of opening duplicates.',
+ '',
+ '🤖 Generated with [Claude Code](https://claude.com/claude-code)'
+ )
+
+ process.stdout.write(lines.join('\n'))
+ NODE
+
+ EXISTING_PR_NUMBER=$(gh pr list --repo "$REPO_NAME" --head "$BRANCH_NAME" --state open --json number --jq '.[0].number')
+
+ if [ -n "$EXISTING_PR_NUMBER" ] && [ "$EXISTING_PR_NUMBER" != "null" ]; then
+ gh pr edit "$EXISTING_PR_NUMBER" --repo "$REPO_NAME" --title "chore(i18n): sync translations with en-US" --body-file "$PR_BODY_FILE"
+ else
+ gh pr create --repo "$REPO_NAME" --head "$BRANCH_NAME" --base main --title "chore(i18n): sync translations with en-US" --body-file "$PR_BODY_FILE"
+ fi
diff --git a/.github/workflows/trigger-i18n-sync.yml b/.github/workflows/trigger-i18n-sync.yml
new file mode 100644
index 0000000000..a1ca42b26e
--- /dev/null
+++ b/.github/workflows/trigger-i18n-sync.yml
@@ -0,0 +1,171 @@
+name: Trigger i18n Sync on Push
+
+on:
+ push:
+ branches: [main]
+ paths:
+ - 'web/i18n/en-US/*.json'
+
+permissions:
+ contents: write
+
+concurrency:
+ group: trigger-i18n-sync-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ trigger:
+ if: github.repository == 'langgenius/dify'
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ fetch-depth: 0
+
+ - name: Detect changed files and build structured change set
+ id: detect
+ shell: bash
+ run: |
+ BASE_SHA="${{ github.event.before }}"
+ if [ -z "$BASE_SHA" ] || [ "$BASE_SHA" = "0000000000000000000000000000000000000000" ]; then
+ BASE_SHA=$(git rev-parse HEAD~1 2>/dev/null || true)
+ fi
+ HEAD_SHA="${{ github.sha }}"
+
+ if [ -n "$BASE_SHA" ]; then
+ CHANGED_FILES=$(git diff --name-only "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' 2>/dev/null | sed -n 's@^.*/@@p' | sed 's/\.json$//' | tr '\n' ' ' | sed 's/[[:space:]]*$//')
+ else
+ CHANGED_FILES=$(find web/i18n/en-US -maxdepth 1 -type f -name '*.json' -print | sed -n 's@^.*/@@p' | sed 's/\.json$//' | sort | tr '\n' ' ' | sed 's/[[:space:]]*$//')
+ fi
+
+ export BASE_SHA HEAD_SHA CHANGED_FILES
+ node <<'NODE'
+ const { execFileSync } = require('node:child_process')
+ const fs = require('node:fs')
+ const path = require('node:path')
+
+ const repoRoot = process.cwd()
+ const baseSha = process.env.BASE_SHA || ''
+ const headSha = process.env.HEAD_SHA || ''
+ const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
+
+ const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
+
+ const readCurrentJson = (fileStem) => {
+ const filePath = englishPath(fileStem)
+ if (!fs.existsSync(filePath))
+ return null
+
+ return JSON.parse(fs.readFileSync(filePath, 'utf8'))
+ }
+
+ const readBaseJson = (fileStem) => {
+ if (!baseSha)
+ return null
+
+ try {
+ const relativePath = `web/i18n/en-US/${fileStem}.json`
+ const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
+ return JSON.parse(content)
+ }
+ catch (error) {
+ return null
+ }
+ }
+
+ const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
+
+ const changes = {}
+
+ for (const fileStem of files) {
+ const beforeJson = readBaseJson(fileStem) || {}
+ const afterJson = readCurrentJson(fileStem) || {}
+ const added = {}
+ const updated = {}
+ const deleted = []
+
+ for (const [key, value] of Object.entries(afterJson)) {
+ if (!(key in beforeJson)) {
+ added[key] = value
+ continue
+ }
+
+ if (!compareJson(beforeJson[key], value)) {
+ updated[key] = {
+ before: beforeJson[key],
+ after: value,
+ }
+ }
+ }
+
+ for (const key of Object.keys(beforeJson)) {
+ if (!(key in afterJson))
+ deleted.push(key)
+ }
+
+ changes[fileStem] = {
+ fileDeleted: readCurrentJson(fileStem) === null,
+ added,
+ updated,
+ deleted,
+ }
+ }
+
+ fs.writeFileSync(
+ '/tmp/i18n-changes.json',
+ JSON.stringify({
+ baseSha,
+ headSha,
+ files,
+ changes,
+ })
+ )
+ NODE
+
+ if [ -n "$CHANGED_FILES" ]; then
+ echo "has_changes=true" >> "$GITHUB_OUTPUT"
+ else
+ echo "has_changes=false" >> "$GITHUB_OUTPUT"
+ fi
+
+ echo "base_sha=$BASE_SHA" >> "$GITHUB_OUTPUT"
+ echo "head_sha=$HEAD_SHA" >> "$GITHUB_OUTPUT"
+ echo "changed_files=$CHANGED_FILES" >> "$GITHUB_OUTPUT"
+
+ - name: Trigger i18n sync workflow
+ if: steps.detect.outputs.has_changes == 'true'
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ BASE_SHA: ${{ steps.detect.outputs.base_sha }}
+ HEAD_SHA: ${{ steps.detect.outputs.head_sha }}
+ CHANGED_FILES: ${{ steps.detect.outputs.changed_files }}
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const fs = require('fs')
+
+ const changesJson = fs.readFileSync('/tmp/i18n-changes.json', 'utf8')
+ const changesBase64 = Buffer.from(changesJson).toString('base64')
+ const maxEmbeddedChangesChars = 48000
+ const changesEmbedded = changesBase64.length <= maxEmbeddedChangesChars
+
+ if (!changesEmbedded) {
+ console.log(`Structured change set too large to embed safely (${changesBase64.length} chars). Downstream workflow will regenerate it from git history.`)
+ }
+
+ await github.rest.repos.createDispatchEvent({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ event_type: 'i18n-sync',
+ client_payload: {
+ changed_files: process.env.CHANGED_FILES,
+ changes_base64: changesEmbedded ? changesBase64 : '',
+ changes_embedded: changesEmbedded,
+ sync_mode: 'incremental',
+ base_sha: process.env.BASE_SHA,
+ head_sha: process.env.HEAD_SHA,
+ },
+ })
diff --git a/.github/workflows/vdb-tests-full.yml b/.github/workflows/vdb-tests-full.yml
new file mode 100644
index 0000000000..01d25902f6
--- /dev/null
+++ b/.github/workflows/vdb-tests-full.yml
@@ -0,0 +1,95 @@
+name: Run Full VDB Tests
+
+on:
+ schedule:
+ - cron: '0 3 * * 1'
+ workflow_dispatch:
+
+permissions:
+ contents: read
+
+concurrency:
+ group: vdb-tests-full-${{ github.ref || github.run_id }}
+ cancel-in-progress: true
+
+jobs:
+ test:
+ name: Full VDB Tests
+ if: github.repository == 'langgenius/dify'
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version:
+ - "3.12"
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ persist-credentials: false
+
+ - name: Free Disk Space
+ uses: endersonmenezes/free-disk-space@7901478139cff6e9d44df5972fd8ab8fcade4db1 # v3.2.2
+ with:
+ remove_dotnet: true
+ remove_haskell: true
+ remove_tool_cache: true
+
+ - name: Setup UV and Python
+ uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
+ with:
+ enable-cache: true
+ python-version: ${{ matrix.python-version }}
+ cache-dependency-glob: api/uv.lock
+
+ - name: Check UV lockfile
+ run: uv lock --project api --check
+
+ - name: Install dependencies
+ run: uv sync --project api --dev
+
+ - name: Set up dotenvs
+ run: |
+ cp docker/.env.example docker/.env
+ cp docker/middleware.env.example docker/middleware.env
+
+ - name: Expose Service Ports
+ run: sh .github/workflows/expose_service_ports.sh
+
+# - name: Set up Vector Store (TiDB)
+# uses: hoverkraft-tech/compose-action@v2.0.2
+# with:
+# compose-file: docker/tidb/docker-compose.yaml
+# services: |
+# tidb
+# tiflash
+
+ - name: Set up Full Vector Store Matrix
+ uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
+ with:
+ compose-file: |
+ docker/docker-compose.yaml
+ services: |
+ weaviate
+ qdrant
+ couchbase-server
+ etcd
+ minio
+ milvus-standalone
+ pgvecto-rs
+ pgvector
+ chroma
+ elasticsearch
+ oceanbase
+
+ - name: setup test config
+ run: |
+ echo $(pwd)
+ ls -lah .
+ cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
+
+# - name: Check VDB Ready (TiDB)
+# run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
+
+ - name: Test Vector Stores
+ run: uv run --project api bash dev/pytest/pytest_vdb.sh
diff --git a/.github/workflows/vdb-tests.yml b/.github/workflows/vdb-tests.yml
index 026ff0fe57..47ec70f603 100644
--- a/.github/workflows/vdb-tests.yml
+++ b/.github/workflows/vdb-tests.yml
@@ -1,15 +1,18 @@
-name: Run VDB Tests
+name: Run VDB Smoke Tests
on:
workflow_call:
+permissions:
+ contents: read
+
concurrency:
group: vdb-tests-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
test:
- name: VDB Tests
+ name: VDB Smoke Tests
runs-on: ubuntu-latest
strategy:
matrix:
@@ -58,23 +61,18 @@ jobs:
# tidb
# tiflash
- - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase, OceanBase)
+ - name: Set up Vector Stores for Smoke Coverage
uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
with:
compose-file: |
docker/docker-compose.yaml
services: |
+ db_postgres
+ redis
weaviate
qdrant
- couchbase-server
- etcd
- minio
- milvus-standalone
- pgvecto-rs
pgvector
chroma
- elasticsearch
- oceanbase
- name: setup test config
run: |
@@ -86,4 +84,9 @@ jobs:
# run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
- name: Test Vector Stores
- run: uv run --project api bash dev/pytest/pytest_vdb.sh
+ run: |
+ uv run --project api pytest --timeout "${PYTEST_TIMEOUT:-180}" \
+ api/tests/integration_tests/vdb/chroma \
+ api/tests/integration_tests/vdb/pgvector \
+ api/tests/integration_tests/vdb/qdrant \
+ api/tests/integration_tests/vdb/weaviate
diff --git a/.gitignore b/.gitignore
index d7698fe3fd..53dea88899 100644
--- a/.gitignore
+++ b/.gitignore
@@ -212,7 +212,8 @@ api/.vscode
# pnpm
/.pnpm-store
-/node_modules
+node_modules
+.vite-hooks/_
# plugin migrate
plugins.jsonl
diff --git a/.npmrc b/.npmrc
new file mode 100644
index 0000000000..cffe8cdef1
--- /dev/null
+++ b/.npmrc
@@ -0,0 +1 @@
+save-exact=true
diff --git a/web/.husky/pre-commit b/.vite-hooks/pre-commit
old mode 100644
new mode 100755
similarity index 99%
rename from web/.husky/pre-commit
rename to .vite-hooks/pre-commit
index 3f25de256f..54e09f80d6
--- a/web/.husky/pre-commit
+++ b/.vite-hooks/pre-commit
@@ -77,7 +77,7 @@ if $web_modified; then
fi
cd ./web || exit 1
- lint-staged
+ vp staged
if $web_ts_modified; then
echo "Running TypeScript type-check:tsgo"
diff --git a/api/.ruff.toml b/api/.ruff.toml
index 4b1252a861..2a825f1ef0 100644
--- a/api/.ruff.toml
+++ b/api/.ruff.toml
@@ -115,12 +115,6 @@ ignore = [
"controllers/console/human_input_form.py" = ["TID251"]
"controllers/web/human_input_form.py" = ["TID251"]
-[lint.pyflakes]
-allowed-unused-imports = [
- "tests.integration_tests",
- "tests.unit_tests",
-]
-
[lint.flake8-tidy-imports]
[lint.flake8-tidy-imports.banned-api."flask_restx.reqparse"]
diff --git a/api/constants/__init__.py b/api/constants/__init__.py
index e441395afc..8698fb855d 100644
--- a/api/constants/__init__.py
+++ b/api/constants/__init__.py
@@ -7,15 +7,16 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000"
DEFAULT_FILE_NUMBER_LIMITS = 3
-IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"})
+_IMAGE_EXTENSION_BASE: frozenset[str] = frozenset(("jpg", "jpeg", "png", "webp", "gif", "svg"))
+_VIDEO_EXTENSION_BASE: frozenset[str] = frozenset(("mp4", "mov", "mpeg", "webm"))
+_AUDIO_EXTENSION_BASE: frozenset[str] = frozenset(("mp3", "m4a", "wav", "amr", "mpga"))
-VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"})
+IMAGE_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_IMAGE_EXTENSION_BASE))
+VIDEO_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_VIDEO_EXTENSION_BASE))
+AUDIO_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_AUDIO_EXTENSION_BASE))
-AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"})
-
-_doc_extensions: set[str]
-if dify_config.ETL_TYPE == "Unstructured":
- _doc_extensions = {
+_UNSTRUCTURED_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
+ (
"txt",
"markdown",
"md",
@@ -35,11 +36,10 @@ if dify_config.ETL_TYPE == "Unstructured":
"pptx",
"xml",
"epub",
- }
- if dify_config.UNSTRUCTURED_API_URL:
- _doc_extensions.add("ppt")
-else:
- _doc_extensions = {
+ )
+)
+_DEFAULT_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
+ (
"txt",
"markdown",
"md",
@@ -53,8 +53,17 @@ else:
"csv",
"vtt",
"properties",
- }
-DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions)
+ )
+)
+
+_doc_extensions: set[str]
+if dify_config.ETL_TYPE == "Unstructured":
+ _doc_extensions = set(_UNSTRUCTURED_DOCUMENT_EXTENSION_BASE)
+ if dify_config.UNSTRUCTURED_API_URL:
+ _doc_extensions.add("ppt")
+else:
+ _doc_extensions = set(_DEFAULT_DOCUMENT_EXTENSION_BASE)
+DOCUMENT_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_doc_extensions))
# console
COOKIE_NAME_ACCESS_TOKEN = "access_token"
diff --git a/api/context/execution_context.py b/api/context/execution_context.py
index ba9a24d4f3..e687dfc4b1 100644
--- a/api/context/execution_context.py
+++ b/api/context/execution_context.py
@@ -10,7 +10,7 @@ import threading
from abc import ABC, abstractmethod
from collections.abc import Callable, Generator
from contextlib import AbstractContextManager, contextmanager
-from typing import Any, Protocol, TypeVar, final, runtime_checkable
+from typing import Any, Protocol, final, runtime_checkable
from pydantic import BaseModel
@@ -188,8 +188,6 @@ class ExecutionContextBuilder:
_capturer: Callable[[], IExecutionContext] | None = None
_tenant_context_providers: dict[tuple[str, str], Callable[[], BaseModel]] = {}
-T = TypeVar("T", bound=BaseModel)
-
class ContextProviderNotFoundError(KeyError):
"""Raised when a tenant-scoped context provider is missing."""
diff --git a/api/contexts/wrapper.py b/api/contexts/wrapper.py
index 8cd53487ef..1968f4b93d 100644
--- a/api/contexts/wrapper.py
+++ b/api/contexts/wrapper.py
@@ -1,7 +1,4 @@
from contextvars import ContextVar
-from typing import Generic, TypeVar
-
-T = TypeVar("T")
class HiddenValue:
@@ -11,7 +8,7 @@ class HiddenValue:
_default = HiddenValue()
-class RecyclableContextVar(Generic[T]):
+class RecyclableContextVar[T]:
"""
RecyclableContextVar is a wrapper around ContextVar
It's safe to use in gunicorn with thread recycling, but features like `reset` are not available for now
diff --git a/api/controllers/common/fields.py b/api/controllers/common/fields.py
index 7348ef62aa..4fe3fc9062 100644
--- a/api/controllers/common/fields.py
+++ b/api/controllers/common/fields.py
@@ -1,14 +1,14 @@
from __future__ import annotations
-from typing import Any, TypeAlias
+from typing import Any
from graphon.file import helpers as file_helpers
from pydantic import BaseModel, ConfigDict, computed_field
from models.model import IconType
-JSONValue: TypeAlias = str | int | float | bool | None | dict[str, Any] | list[Any]
-JSONObject: TypeAlias = dict[str, Any]
+type JSONValue = str | int | float | bool | None | dict[str, Any] | list[Any]
+type JSONObject = dict[str, Any]
class SystemParameters(BaseModel):
diff --git a/api/controllers/common/file_response.py b/api/controllers/common/file_response.py
index ca8ea3d52e..79df978012 100644
--- a/api/controllers/common/file_response.py
+++ b/api/controllers/common/file_response.py
@@ -4,8 +4,8 @@ from urllib.parse import quote
from flask import Response
-HTML_MIME_TYPES = frozenset({"text/html", "application/xhtml+xml"})
-HTML_EXTENSIONS = frozenset({"html", "htm"})
+HTML_MIME_TYPES: frozenset[str] = frozenset(("text/html", "application/xhtml+xml"))
+HTML_EXTENSIONS: frozenset[str] = frozenset(("html", "htm"))
def _normalize_mime_type(mime_type: str | None) -> str:
diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py
index 6c3a6a8c1f..9b8408980d 100644
--- a/api/controllers/console/admin.py
+++ b/api/controllers/console/admin.py
@@ -2,7 +2,6 @@ import csv
import io
from collections.abc import Callable
from functools import wraps
-from typing import ParamSpec, TypeVar
from flask import request
from flask_restx import Resource
@@ -20,9 +19,6 @@ from libs.token import extract_access_token
from models.model import App, ExporleBanner, InstalledApp, RecommendedApp, TrialApp
from services.billing_service import BillingService
-P = ParamSpec("P")
-R = TypeVar("R")
-
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
@@ -72,9 +68,9 @@ console_ns.schema_model(
)
-def admin_required(view: Callable[P, R]):
+def admin_required[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
- def decorated(*args: P.args, **kwargs: P.kwargs):
+ def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
if not dify_config.ADMIN_API_KEY:
raise Unauthorized("API key is invalid.")
diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py
index 783cb5c444..772bb9d0f1 100644
--- a/api/controllers/console/apikey.py
+++ b/api/controllers/console/apikey.py
@@ -2,7 +2,7 @@ import flask_restx
from flask_restx import Resource, fields, marshal_with
from flask_restx._http import HTTPStatus
from sqlalchemy import delete, func, select
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import Forbidden
from extensions.ext_database import db
@@ -34,7 +34,7 @@ api_key_list_model = console_ns.model(
def _get_resource(resource_id, tenant_id, resource_model):
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
resource = session.execute(
select(resource_model).filter_by(id=resource_id, tenant_id=tenant_id)
).scalar_one_or_none()
diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py
index 738e77b371..c67ca57c63 100644
--- a/api/controllers/console/app/app.py
+++ b/api/controllers/console/app/app.py
@@ -1,7 +1,7 @@
import logging
import uuid
from datetime import datetime
-from typing import Any, Literal, TypeAlias
+from typing import Any, Literal
from flask import request
from flask_restx import Resource
@@ -9,7 +9,7 @@ from graphon.enums import WorkflowExecutionStatus
from graphon.file import helpers as file_helpers
from pydantic import AliasChoices, BaseModel, ConfigDict, Field, computed_field, field_validator
from sqlalchemy import select
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import BadRequest
from controllers.common.helpers import FileInfo
@@ -152,7 +152,7 @@ class AppTracePayload(BaseModel):
return value
-JSONValue: TypeAlias = Any
+type JSONValue = Any
class ResponseModel(BaseModel):
@@ -642,7 +642,7 @@ class AppCopyApi(Resource):
args = CopyAppPayload.model_validate(console_ns.payload or {})
- with Session(db.engine) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
import_service = AppDslService(session)
yaml_content = import_service.export_dsl(app_model=app_model, include_secret=True)
result = import_service.import_app(
@@ -655,7 +655,6 @@ class AppCopyApi(Resource):
icon=args.icon,
icon_background=args.icon_background,
)
- session.commit()
# Inherit web app permission from original app
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py
index fdef54ba5a..16e1fa3245 100644
--- a/api/controllers/console/app/app_import.py
+++ b/api/controllers/console/app/app_import.py
@@ -1,6 +1,6 @@
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
@@ -71,7 +71,7 @@ class AppImportApi(Resource):
args = AppImportPayload.model_validate(console_ns.payload)
# Create service with session
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
import_service = AppDslService(session)
# Import app
account = current_user
@@ -87,7 +87,6 @@ class AppImportApi(Resource):
icon_background=args.icon_background,
app_id=args.app_id,
)
- session.commit()
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
# update web app setting as private
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private")
@@ -112,12 +111,11 @@ class AppImportConfirmApi(Resource):
current_user, _ = current_account_with_tenant()
# Create service with session
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
import_service = AppDslService(session)
# Confirm import
account = current_user
result = import_service.confirm_import(import_id=import_id, account=account)
- session.commit()
# Return appropriate status code based on result
if result.status == ImportStatus.FAILED:
@@ -134,7 +132,7 @@ class AppImportCheckDependenciesApi(Resource):
@marshal_with(app_import_check_dependencies_model)
@edit_permission_required
def get(self, app_model: App):
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
import_service = AppDslService(session)
result = import_service.check_dependencies(app_model=app_model)
diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py
index 368a6112ba..369c26a80c 100644
--- a/api/controllers/console/app/conversation_variables.py
+++ b/api/controllers/console/app/conversation_variables.py
@@ -2,7 +2,7 @@ from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field
from sqlalchemy import select
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
@@ -69,7 +69,7 @@ class ConversationVariablesApi(Resource):
page_size = 100
stmt = stmt.limit(page_size).offset((page - 1) * page_size)
- with Session(db.engine) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
rows = session.scalars(stmt).all()
return {
diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py
index 1f5a84c0b2..dcd24d2200 100644
--- a/api/controllers/console/app/workflow.py
+++ b/api/controllers/console/app/workflow.py
@@ -9,8 +9,8 @@ from graphon.enums import NodeType
from graphon.file import File
from graphon.graph_engine.manager import GraphEngineManager
from graphon.model_runtime.utils.encoders import jsonable_encoder
-from pydantic import BaseModel, Field, field_validator
-from sqlalchemy.orm import Session
+from pydantic import BaseModel, Field, ValidationError, field_validator
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound
import services
@@ -268,22 +268,18 @@ class DraftWorkflowApi(Resource):
content_type = request.headers.get("Content-Type", "")
- payload_data: dict[str, Any] | None = None
if "application/json" in content_type:
payload_data = request.get_json(silent=True)
if not isinstance(payload_data, dict):
return {"message": "Invalid JSON data"}, 400
+ args_model = SyncDraftWorkflowPayload.model_validate(payload_data)
elif "text/plain" in content_type:
try:
- payload_data = json.loads(request.data.decode("utf-8"))
- except json.JSONDecodeError:
- return {"message": "Invalid JSON data"}, 400
- if not isinstance(payload_data, dict):
+ args_model = SyncDraftWorkflowPayload.model_validate_json(request.data)
+ except (ValueError, ValidationError):
return {"message": "Invalid JSON data"}, 400
else:
abort(415)
-
- args_model = SyncDraftWorkflowPayload.model_validate(payload_data)
args = args_model.model_dump()
workflow_service = WorkflowService()
@@ -840,7 +836,7 @@ class PublishedWorkflowApi(Resource):
args = PublishWorkflowPayload.model_validate(console_ns.payload or {})
workflow_service = WorkflowService()
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
workflow = workflow_service.publish_workflow(
session=session,
app_model=app_model,
@@ -858,8 +854,6 @@ class PublishedWorkflowApi(Resource):
workflow_created_at = TimestampField().format(workflow.created_at)
- session.commit()
-
return {
"result": "success",
"created_at": workflow_created_at,
@@ -982,7 +976,7 @@ class PublishedAllWorkflowApi(Resource):
raise Forbidden()
workflow_service = WorkflowService()
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
workflows, has_more = workflow_service.get_all_published_workflow(
session=session,
app_model=app_model,
@@ -1072,7 +1066,7 @@ class WorkflowByIdApi(Resource):
workflow_service = WorkflowService()
# Create a session and manage the transaction
- with Session(db.engine, expire_on_commit=False) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
workflow = workflow_service.update_workflow(
session=session,
workflow_id=workflow_id,
@@ -1084,9 +1078,6 @@ class WorkflowByIdApi(Resource):
if not workflow:
raise NotFound("Workflow not found")
- # Commit the transaction in the controller
- session.commit()
-
return workflow
@setup_required
@@ -1101,13 +1092,11 @@ class WorkflowByIdApi(Resource):
workflow_service = WorkflowService()
# Create a session and manage the transaction
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
try:
workflow_service.delete_workflow(
session=session, workflow_id=workflow_id, tenant_id=app_model.tenant_id
)
- # Commit the transaction in the controller
- session.commit()
except WorkflowInUseError as e:
abort(400, description=str(e))
except DraftWorkflowDeletionError as e:
diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py
index f0e26c86a5..3b24c2a402 100644
--- a/api/controllers/console/app/workflow_app_log.py
+++ b/api/controllers/console/app/workflow_app_log.py
@@ -5,7 +5,7 @@ from flask import request
from flask_restx import Resource, marshal_with
from graphon.enums import WorkflowExecutionStatus
from pydantic import BaseModel, Field, field_validator
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
@@ -87,7 +87,7 @@ class WorkflowAppLogApi(Resource):
# get paginate workflow app logs
workflow_app_service = WorkflowAppService()
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
session=session,
app_model=app_model,
@@ -124,7 +124,7 @@ class WorkflowArchivedLogApi(Resource):
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
workflow_app_service = WorkflowAppService()
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_archive_logs(
session=session,
app_model=app_model,
diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py
index 4052897e9a..366f145360 100644
--- a/api/controllers/console/app/workflow_draft_variable.py
+++ b/api/controllers/console/app/workflow_draft_variable.py
@@ -1,7 +1,7 @@
import logging
from collections.abc import Callable
from functools import wraps
-from typing import Any, NoReturn, ParamSpec, TypeVar
+from typing import Any
from flask import Response, request
from flask_restx import Resource, fields, marshal, marshal_with
@@ -10,7 +10,7 @@ from graphon.variables.segment_group import SegmentGroup
from graphon.variables.segments import ArrayFileSegment, FileSegment, Segment
from graphon.variables.types import SegmentType
from pydantic import BaseModel, Field
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from controllers.console import console_ns
from controllers.console.app.error import (
@@ -192,11 +192,8 @@ workflow_draft_variable_list_model = console_ns.model(
"WorkflowDraftVariableList", workflow_draft_variable_list_fields_copy
)
-P = ParamSpec("P")
-R = TypeVar("R")
-
-def _api_prerequisite(f: Callable[P, R]):
+def _api_prerequisite(f: Callable[..., Any]) -> Callable[..., Any]:
"""Common prerequisites for all draft workflow variable APIs.
It ensures the following conditions are satisfied:
@@ -213,7 +210,7 @@ def _api_prerequisite(f: Callable[P, R]):
@edit_permission_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@wraps(f)
- def wrapper(*args: P.args, **kwargs: P.kwargs):
+ def wrapper(*args: Any, **kwargs: Any):
return f(*args, **kwargs)
return wrapper
@@ -244,7 +241,7 @@ class WorkflowVariableCollectionApi(Resource):
raise DraftWorkflowNotExist()
# fetch draft workflow by app_model
- with Session(bind=db.engine, expire_on_commit=False) as session:
+ with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
@@ -270,7 +267,7 @@ class WorkflowVariableCollectionApi(Resource):
return Response("", 204)
-def validate_node_id(node_id: str) -> NoReturn | None:
+def validate_node_id(node_id: str) -> None:
if node_id in [
CONVERSATION_VARIABLE_NODE_ID,
SYSTEM_VARIABLE_NODE_ID,
@@ -285,7 +282,6 @@ def validate_node_id(node_id: str) -> NoReturn | None:
raise InvalidArgumentError(
f"invalid node_id, please use correspond api for conversation and system variables, node_id={node_id}",
)
- return None
@console_ns.route("/apps//workflows/draft/nodes//variables")
@@ -298,7 +294,7 @@ class NodeVariableCollectionApi(Resource):
@marshal_with(workflow_draft_variable_list_model)
def get(self, app_model: App, node_id: str):
validate_node_id(node_id)
- with Session(bind=db.engine, expire_on_commit=False) as session:
+ with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
@@ -465,7 +461,7 @@ class VariableResetApi(Resource):
def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
- with Session(bind=db.engine, expire_on_commit=False) as session:
+ with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
diff --git a/api/controllers/console/app/workflow_trigger.py b/api/controllers/console/app/workflow_trigger.py
index 8236e766ae..aa37d24738 100644
--- a/api/controllers/console/app/workflow_trigger.py
+++ b/api/controllers/console/app/workflow_trigger.py
@@ -4,7 +4,7 @@ from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel
from sqlalchemy import select
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import NotFound
from configs import dify_config
@@ -64,7 +64,7 @@ class WebhookTriggerApi(Resource):
node_id = args.node_id
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
# Get webhook trigger for this app and node
webhook_trigger = (
session.query(WorkflowWebhookTrigger)
@@ -95,7 +95,7 @@ class AppTriggersApi(Resource):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
# Get all triggers for this app using select API
triggers = (
session.execute(
@@ -137,7 +137,7 @@ class AppTriggerEnableApi(Resource):
assert current_user.current_tenant_id is not None
trigger_id = args.trigger_id
- with Session(db.engine) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
# Find the trigger using select
trigger = session.execute(
select(AppTrigger).where(
@@ -153,9 +153,6 @@ class AppTriggerEnableApi(Resource):
# Update status based on enable_trigger boolean
trigger.status = AppTriggerStatus.ENABLED if args.enable_trigger else AppTriggerStatus.DISABLED
- session.commit()
- session.refresh(trigger)
-
# Add computed icon field
url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/"
if trigger.trigger_type == "trigger-plugin":
diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py
index 493022ffea..bd6f019eac 100644
--- a/api/controllers/console/app/wraps.py
+++ b/api/controllers/console/app/wraps.py
@@ -1,6 +1,6 @@
from collections.abc import Callable
from functools import wraps
-from typing import ParamSpec, TypeVar, Union
+from typing import Any
from sqlalchemy import select
@@ -9,11 +9,6 @@ from extensions.ext_database import db
from libs.login import current_account_with_tenant
from models import App, AppMode
-P = ParamSpec("P")
-R = TypeVar("R")
-P1 = ParamSpec("P1")
-R1 = TypeVar("R1")
-
def _load_app_model(app_id: str) -> App | None:
_, current_tenant_id = current_account_with_tenant()
@@ -28,10 +23,14 @@ def _load_app_model_with_trial(app_id: str) -> App | None:
return app_model
-def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None):
- def decorator(view_func: Callable[P1, R1]):
+def get_app_model(
+ view: Callable[..., Any] | None = None,
+ *,
+ mode: AppMode | list[AppMode] | None = None,
+) -> Callable[..., Any] | Callable[[Callable[..., Any]], Callable[..., Any]]:
+ def decorator(view_func: Callable[..., Any]) -> Callable[..., Any]:
@wraps(view_func)
- def decorated_view(*args: P1.args, **kwargs: P1.kwargs):
+ def decorated_view(*args: Any, **kwargs: Any):
if not kwargs.get("app_id"):
raise ValueError("missing app_id in path parameters")
@@ -69,10 +68,14 @@ def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, li
return decorator(view)
-def get_app_model_with_trial(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None):
- def decorator(view_func: Callable[P, R]):
+def get_app_model_with_trial(
+ view: Callable[..., Any] | None = None,
+ *,
+ mode: AppMode | list[AppMode] | None = None,
+) -> Callable[..., Any] | Callable[[Callable[..., Any]], Callable[..., Any]]:
+ def decorator(view_func: Callable[..., Any]) -> Callable[..., Any]:
@wraps(view_func)
- def decorated_view(*args: P.args, **kwargs: P.kwargs):
+ def decorated_view(*args: Any, **kwargs: Any):
if not kwargs.get("app_id"):
raise ValueError("missing app_id in path parameters")
diff --git a/api/controllers/console/auth/oauth_server.py b/api/controllers/console/auth/oauth_server.py
index 686b865871..b55cda4244 100644
--- a/api/controllers/console/auth/oauth_server.py
+++ b/api/controllers/console/auth/oauth_server.py
@@ -1,8 +1,9 @@
from collections.abc import Callable
from functools import wraps
-from typing import Concatenate, ParamSpec, TypeVar
+from typing import Concatenate
from flask import jsonify, request
+from flask.typing import ResponseReturnValue
from flask_restx import Resource
from graphon.model_runtime.utils.encoders import jsonable_encoder
from pydantic import BaseModel
@@ -16,10 +17,6 @@ from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN, OAuthGrantType,
from .. import console_ns
-P = ParamSpec("P")
-R = TypeVar("R")
-T = TypeVar("T")
-
class OAuthClientPayload(BaseModel):
client_id: str
@@ -39,9 +36,11 @@ class OAuthTokenRequest(BaseModel):
refresh_token: str | None = None
-def oauth_server_client_id_required(view: Callable[Concatenate[T, OAuthProviderApp, P], R]):
+def oauth_server_client_id_required[T, **P, R](
+ view: Callable[Concatenate[T, OAuthProviderApp, P], R],
+) -> Callable[Concatenate[T, P], R]:
@wraps(view)
- def decorated(self: T, *args: P.args, **kwargs: P.kwargs):
+ def decorated(self: T, *args: P.args, **kwargs: P.kwargs) -> R:
json_data = request.get_json()
if json_data is None:
raise BadRequest("client_id is required")
@@ -58,9 +57,13 @@ def oauth_server_client_id_required(view: Callable[Concatenate[T, OAuthProviderA
return decorated
-def oauth_server_access_token_required(view: Callable[Concatenate[T, OAuthProviderApp, Account, P], R]):
+def oauth_server_access_token_required[T, **P, R](
+ view: Callable[Concatenate[T, OAuthProviderApp, Account, P], R],
+) -> Callable[Concatenate[T, OAuthProviderApp, P], R | ResponseReturnValue]:
@wraps(view)
- def decorated(self: T, oauth_provider_app: OAuthProviderApp, *args: P.args, **kwargs: P.kwargs):
+ def decorated(
+ self: T, oauth_provider_app: OAuthProviderApp, *args: P.args, **kwargs: P.kwargs
+ ) -> R | ResponseReturnValue:
if not isinstance(oauth_provider_app, OAuthProviderApp):
raise BadRequest("Invalid oauth_provider_app")
diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py
index ac039f9c5d..23c01eedb1 100644
--- a/api/controllers/console/billing/billing.py
+++ b/api/controllers/console/billing/billing.py
@@ -36,7 +36,7 @@ class Subscription(Resource):
@only_edition_cloud
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
- args = SubscriptionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
+ args = SubscriptionQuery.model_validate(request.args.to_dict(flat=True))
BillingService.is_tenant_owner_or_admin(current_user)
return BillingService.get_subscription(args.plan, args.interval, current_user.email, current_tenant_id)
diff --git a/api/controllers/console/billing/compliance.py b/api/controllers/console/billing/compliance.py
index afc5f92b68..b5a08e0791 100644
--- a/api/controllers/console/billing/compliance.py
+++ b/api/controllers/console/billing/compliance.py
@@ -31,7 +31,7 @@ class ComplianceApi(Resource):
@only_edition_cloud
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
- args = ComplianceDownloadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
+ args = ComplianceDownloadQuery.model_validate(request.args.to_dict(flat=True))
ip_address = extract_remote_ip(request)
device_info = request.headers.get("User-Agent", "Unknown device")
diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py
index daef4e005a..ac14349045 100644
--- a/api/controllers/console/datasets/data_source.py
+++ b/api/controllers/console/datasets/data_source.py
@@ -6,7 +6,7 @@ from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field
from sqlalchemy import select
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import NotFound
from controllers.common.schema import get_or_create_model, register_schema_model
@@ -159,7 +159,7 @@ class DataSourceApi(Resource):
@account_initialization_required
def patch(self, binding_id, action: Literal["enable", "disable"]):
binding_id = str(binding_id)
- with Session(db.engine) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
data_source_binding = session.execute(
select(DataSourceOauthBinding).filter_by(id=binding_id)
).scalar_one_or_none()
@@ -211,7 +211,7 @@ class DataSourceNotionListApi(Resource):
if not credential:
raise NotFound("Credential not found.")
exist_page_ids = []
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
# import notion in the exist dataset
if query.dataset_id:
dataset = DatasetService.get_dataset(query.dataset_id)
diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py
index fc6896f123..f3866f6aef 100644
--- a/api/controllers/console/datasets/external.py
+++ b/api/controllers/console/datasets/external.py
@@ -173,8 +173,11 @@ class ExternalApiTemplateApi(Resource):
@login_required
@account_initialization_required
def get(self, external_knowledge_api_id):
+ _, current_tenant_id = current_account_with_tenant()
external_knowledge_api_id = str(external_knowledge_api_id)
- external_knowledge_api = ExternalDatasetService.get_external_knowledge_api(external_knowledge_api_id)
+ external_knowledge_api = ExternalDatasetService.get_external_knowledge_api(
+ external_knowledge_api_id, current_tenant_id
+ )
if external_knowledge_api is None:
raise NotFound("API template not found.")
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py
index 4f31093cfe..1758bad31d 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py
@@ -3,7 +3,7 @@ import logging
from flask import request
from flask_restx import Resource
from pydantic import BaseModel, Field
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from controllers.common.schema import register_schema_models
from controllers.console import console_ns
@@ -85,7 +85,7 @@ class CustomizedPipelineTemplateApi(Resource):
@account_initialization_required
@enterprise_license_required
def post(self, template_id: str):
- with Session(db.engine) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
template = (
session.query(PipelineCustomizedTemplate).where(PipelineCustomizedTemplate.id == template_id).first()
)
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py
index e65cb19b39..a6ca0689d0 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py
@@ -1,6 +1,6 @@
from flask_restx import Resource, marshal
from pydantic import BaseModel
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import Forbidden
import services
@@ -54,7 +54,7 @@ class CreateRagPipelineDatasetApi(Resource):
yaml_content=payload.yaml_content,
)
try:
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
rag_pipeline_dsl_service = RagPipelineDslService(session)
import_info = rag_pipeline_dsl_service.create_rag_pipeline_dataset(
tenant_id=current_tenant_id,
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py
index f12cbd3495..d635dcb530 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py
@@ -5,7 +5,7 @@ from flask import Response, request
from flask_restx import Resource, marshal, marshal_with
from graphon.variables.types import SegmentType
from pydantic import BaseModel, Field
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import Forbidden
from controllers.common.schema import register_schema_models
@@ -96,7 +96,7 @@ class RagPipelineVariableCollectionApi(Resource):
raise DraftWorkflowNotExist()
# fetch draft workflow by app_model
- with Session(bind=db.engine, expire_on_commit=False) as session:
+ with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
@@ -143,7 +143,7 @@ class RagPipelineNodeVariableCollectionApi(Resource):
@marshal_with(workflow_draft_variable_list_model)
def get(self, pipeline: Pipeline, node_id: str):
validate_node_id(node_id)
- with Session(bind=db.engine, expire_on_commit=False) as session:
+ with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
@@ -289,7 +289,7 @@ class RagPipelineVariableResetApi(Resource):
def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList:
- with Session(bind=db.engine, expire_on_commit=False) as session:
+ with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py
index af142b4646..732a6dc446 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py
@@ -1,7 +1,7 @@
from flask import request
from flask_restx import Resource, fields, marshal_with # type: ignore
from pydantic import BaseModel, Field
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from controllers.common.schema import get_or_create_model, register_schema_models
from controllers.console import console_ns
@@ -68,7 +68,7 @@ class RagPipelineImportApi(Resource):
payload = RagPipelineImportPayload.model_validate(console_ns.payload or {})
# Create service with session
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
import_service = RagPipelineDslService(session)
# Import app
account = current_user
@@ -80,7 +80,6 @@ class RagPipelineImportApi(Resource):
pipeline_id=payload.pipeline_id,
dataset_name=payload.name,
)
- session.commit()
# Return appropriate status code based on result
status = result.status
@@ -102,12 +101,11 @@ class RagPipelineImportConfirmApi(Resource):
current_user, _ = current_account_with_tenant()
# Create service with session
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
import_service = RagPipelineDslService(session)
# Confirm import
account = current_user
result = import_service.confirm_import(import_id=import_id, account=account)
- session.commit()
# Return appropriate status code based on result
if result.status == ImportStatus.FAILED:
@@ -124,7 +122,7 @@ class RagPipelineImportCheckDependenciesApi(Resource):
@edit_permission_required
@marshal_with(pipeline_import_check_dependencies_model)
def get(self, pipeline: Pipeline):
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
import_service = RagPipelineDslService(session)
result = import_service.check_dependencies(pipeline=pipeline)
@@ -142,7 +140,7 @@ class RagPipelineExportApi(Resource):
# Add include_secret params
query = IncludeSecretQuery.model_validate(request.args.to_dict())
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
export_service = RagPipelineDslService(session)
result = export_service.export_rag_pipeline_dsl(
pipeline=pipeline, include_secret=query.include_secret == "true"
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py
index 8efb59a8e9..70dfe47d7f 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py
@@ -5,8 +5,8 @@ from typing import Any, Literal, cast
from flask import abort, request
from flask_restx import Resource, marshal_with # type: ignore
from graphon.model_runtime.utils.encoders import jsonable_encoder
-from pydantic import BaseModel, Field
-from sqlalchemy.orm import Session
+from pydantic import BaseModel, Field, ValidationError
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound
import services
@@ -186,29 +186,14 @@ class DraftRagPipelineApi(Resource):
if "application/json" in content_type:
payload_dict = console_ns.payload or {}
+ payload = DraftWorkflowSyncPayload.model_validate(payload_dict)
elif "text/plain" in content_type:
try:
- data = json.loads(request.data.decode("utf-8"))
- if "graph" not in data or "features" not in data:
- raise ValueError("graph or features not found in data")
-
- if not isinstance(data.get("graph"), dict):
- raise ValueError("graph is not a dict")
-
- payload_dict = {
- "graph": data.get("graph"),
- "features": data.get("features"),
- "hash": data.get("hash"),
- "environment_variables": data.get("environment_variables"),
- "conversation_variables": data.get("conversation_variables"),
- "rag_pipeline_variables": data.get("rag_pipeline_variables"),
- }
- except json.JSONDecodeError:
+ payload = DraftWorkflowSyncPayload.model_validate_json(request.data)
+ except (ValueError, ValidationError):
return {"message": "Invalid JSON data"}, 400
else:
abort(415)
-
- payload = DraftWorkflowSyncPayload.model_validate(payload_dict)
rag_pipeline_service = RagPipelineService()
try:
@@ -608,19 +593,15 @@ class PublishedRagPipelineApi(Resource):
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
rag_pipeline_service = RagPipelineService()
- with Session(db.engine) as session:
- pipeline = session.merge(pipeline)
- workflow = rag_pipeline_service.publish_workflow(
- session=session,
- pipeline=pipeline,
- account=current_user,
- )
- pipeline.is_published = True
- pipeline.workflow_id = workflow.id
- session.add(pipeline)
- workflow_created_at = TimestampField().format(workflow.created_at)
-
- session.commit()
+ workflow = rag_pipeline_service.publish_workflow(
+ session=db.session, # type: ignore[reportArgumentType,arg-type]
+ pipeline=pipeline,
+ account=current_user,
+ )
+ pipeline.is_published = True
+ pipeline.workflow_id = workflow.id
+ db.session.commit()
+ workflow_created_at = TimestampField().format(workflow.created_at)
return {
"result": "success",
@@ -695,7 +676,7 @@ class PublishedAllRagPipelineApi(Resource):
raise Forbidden()
rag_pipeline_service = RagPipelineService()
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
workflows, has_more = rag_pipeline_service.get_all_published_workflow(
session=session,
pipeline=pipeline,
@@ -767,7 +748,7 @@ class RagPipelineByIdApi(Resource):
rag_pipeline_service = RagPipelineService()
# Create a session and manage the transaction
- with Session(db.engine, expire_on_commit=False) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
workflow = rag_pipeline_service.update_workflow(
session=session,
workflow_id=workflow_id,
@@ -779,9 +760,6 @@ class RagPipelineByIdApi(Resource):
if not workflow:
raise NotFound("Workflow not found")
- # Commit the transaction in the controller
- session.commit()
-
return workflow
@setup_required
@@ -798,14 +776,13 @@ class RagPipelineByIdApi(Resource):
workflow_service = WorkflowService()
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
try:
workflow_service.delete_workflow(
session=session,
workflow_id=workflow_id,
tenant_id=pipeline.tenant_id,
)
- session.commit()
except WorkflowInUseError as e:
abort(400, description=str(e))
except DraftWorkflowDeletionError as e:
diff --git a/api/controllers/console/datasets/wraps.py b/api/controllers/console/datasets/wraps.py
index d533e6c5b1..b58a07029c 100644
--- a/api/controllers/console/datasets/wraps.py
+++ b/api/controllers/console/datasets/wraps.py
@@ -1,6 +1,5 @@
from collections.abc import Callable
from functools import wraps
-from typing import ParamSpec, TypeVar
from sqlalchemy import select
@@ -9,13 +8,10 @@ from extensions.ext_database import db
from libs.login import current_account_with_tenant
from models.dataset import Pipeline
-P = ParamSpec("P")
-R = TypeVar("R")
-
-def get_rag_pipeline(view_func: Callable[P, R]):
+def get_rag_pipeline[**P, R](view_func: Callable[P, R]) -> Callable[P, R]:
@wraps(view_func)
- def decorated_view(*args: P.args, **kwargs: P.kwargs):
+ def decorated_view(*args: P.args, **kwargs: P.kwargs) -> R:
if not kwargs.get("pipeline_id"):
raise ValueError("missing pipeline_id in path parameters")
diff --git a/api/controllers/console/explore/conversation.py b/api/controllers/console/explore/conversation.py
index 933c80f509..092f509f1c 100644
--- a/api/controllers/console/explore/conversation.py
+++ b/api/controllers/console/explore/conversation.py
@@ -2,7 +2,7 @@ from typing import Any
from flask import request
from pydantic import BaseModel, Field, TypeAdapter, model_validator
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import NotFound
from controllers.common.schema import register_schema_models
@@ -74,7 +74,7 @@ class ConversationListApi(InstalledAppResource):
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
pagination = WebConversationService.pagination_by_last_id(
session=session,
app_model=app_model,
diff --git a/api/controllers/console/explore/wraps.py b/api/controllers/console/explore/wraps.py
index 9d9337e63e..9f7e829ae8 100644
--- a/api/controllers/console/explore/wraps.py
+++ b/api/controllers/console/explore/wraps.py
@@ -1,6 +1,6 @@
from collections.abc import Callable
from functools import wraps
-from typing import Concatenate, ParamSpec, TypeVar
+from typing import Concatenate
from flask import abort
from flask_restx import Resource
@@ -15,12 +15,8 @@ from models import AccountTrialAppRecord, App, InstalledApp, TrialApp
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import FeatureService
-P = ParamSpec("P")
-R = TypeVar("R")
-T = TypeVar("T")
-
-def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | None = None):
+def installed_app_required[**P, R](view: Callable[Concatenate[InstalledApp, P], R] | None = None):
def decorator(view: Callable[Concatenate[InstalledApp, P], R]):
@wraps(view)
def decorated(installed_app_id: str, *args: P.args, **kwargs: P.kwargs):
@@ -49,7 +45,7 @@ def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | Non
return decorator
-def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] | None = None):
+def user_allowed_to_access_app[**P, R](view: Callable[Concatenate[InstalledApp, P], R] | None = None):
def decorator(view: Callable[Concatenate[InstalledApp, P], R]):
@wraps(view)
def decorated(installed_app: InstalledApp, *args: P.args, **kwargs: P.kwargs):
@@ -73,7 +69,7 @@ def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] |
return decorator
-def trial_app_required(view: Callable[Concatenate[App, P], R] | None = None):
+def trial_app_required[**P, R](view: Callable[Concatenate[App, P], R] | None = None):
def decorator(view: Callable[Concatenate[App, P], R]):
@wraps(view)
def decorated(app_id: str, *args: P.args, **kwargs: P.kwargs):
@@ -106,7 +102,7 @@ def trial_app_required(view: Callable[Concatenate[App, P], R] | None = None):
return decorator
-def trial_feature_enable(view: Callable[P, R]):
+def trial_feature_enable[**P, R](view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_system_features()
@@ -117,7 +113,7 @@ def trial_feature_enable(view: Callable[P, R]):
return decorated
-def explore_banner_enabled(view: Callable[P, R]):
+def explore_banner_enabled[**P, R](view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_system_features()
diff --git a/api/controllers/console/workspace/__init__.py b/api/controllers/console/workspace/__init__.py
index 876e2301f2..971674cee2 100644
--- a/api/controllers/console/workspace/__init__.py
+++ b/api/controllers/console/workspace/__init__.py
@@ -1,30 +1,26 @@
from collections.abc import Callable
from functools import wraps
-from typing import ParamSpec, TypeVar
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import Forbidden
from extensions.ext_database import db
from libs.login import current_account_with_tenant
from models.account import TenantPluginPermission
-P = ParamSpec("P")
-R = TypeVar("R")
-
def plugin_permission_required(
install_required: bool = False,
debug_required: bool = False,
):
- def interceptor(view: Callable[P, R]):
+ def interceptor[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
- def decorated(*args: P.args, **kwargs: P.kwargs):
+ def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
current_user, current_tenant_id = current_account_with_tenant()
user = current_user
tenant_id = current_tenant_id
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
permission = (
session.query(TenantPluginPermission)
.where(
diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py
index 6f93ff1e70..626d330e9d 100644
--- a/api/controllers/console/workspace/account.py
+++ b/api/controllers/console/workspace/account.py
@@ -8,7 +8,7 @@ from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field, field_validator, model_validator
from sqlalchemy import select
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from configs import dify_config
from constants.languages import supported_language
@@ -519,7 +519,7 @@ class EducationAutoCompleteApi(Resource):
@cloud_edition_billing_enabled
@marshal_with(data_fields)
def get(self):
- payload = request.args.to_dict(flat=True) # type: ignore
+ payload = request.args.to_dict(flat=True)
args = EducationAutocompleteQuery.model_validate(payload)
return BillingService.EducationIdentity.autocomplete(args.keywords, args.page, args.limit)
@@ -562,7 +562,7 @@ class ChangeEmailSendEmailApi(Resource):
user_email = current_user.email
else:
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
account = AccountService.get_account_by_email_with_case_fallback(args.email, session=session)
if account is None:
raise AccountNotFound()
diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py
index 8e0aefc9e3..cbb9677309 100644
--- a/api/controllers/console/workspace/model_providers.py
+++ b/api/controllers/console/workspace/model_providers.py
@@ -99,7 +99,7 @@ class ModelProviderListApi(Resource):
_, current_tenant_id = current_account_with_tenant()
tenant_id = current_tenant_id
- payload = request.args.to_dict(flat=True) # type: ignore
+ payload = request.args.to_dict(flat=True)
args = ParserModelList.model_validate(payload)
model_provider_service = ModelProviderService()
@@ -118,7 +118,7 @@ class ModelProviderCredentialApi(Resource):
_, current_tenant_id = current_account_with_tenant()
tenant_id = current_tenant_id
# if credential_id is not provided, return current used credential
- payload = request.args.to_dict(flat=True) # type: ignore
+ payload = request.args.to_dict(flat=True)
args = ParserCredentialId.model_validate(payload)
model_provider_service = ModelProviderService()
diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py
index 2ec1a9435a..9182dbb510 100644
--- a/api/controllers/console/workspace/models.py
+++ b/api/controllers/console/workspace/models.py
@@ -287,12 +287,10 @@ class ModelProviderModelCredentialApi(Resource):
provider=provider,
)
else:
- # Normalize model_type to the origin value stored in DB (e.g., "text-generation" for LLM)
- normalized_model_type = args.model_type.to_origin_model_type()
available_credentials = model_provider_service.get_provider_model_available_credentials(
tenant_id=tenant_id,
provider=provider,
- model_type=normalized_model_type,
+ model_type=args.model_type,
model=args.model,
)
diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py
index 80216915cd..c9956501e2 100644
--- a/api/controllers/console/workspace/tool_providers.py
+++ b/api/controllers/console/workspace/tool_providers.py
@@ -7,7 +7,7 @@ from flask import make_response, redirect, request, send_file
from flask_restx import Resource
from graphon.model_runtime.utils.encoders import jsonable_encoder
from pydantic import BaseModel, Field, HttpUrl, field_validator, model_validator
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import Forbidden
from configs import dify_config
@@ -1019,7 +1019,7 @@ class ToolProviderMCPApi(Resource):
# Step 1: Get provider data for URL validation (short-lived session, no network I/O)
validation_data = None
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
validation_data = service.get_provider_for_url_validation(
tenant_id=current_tenant_id, provider_id=payload.provider_id
@@ -1034,7 +1034,7 @@ class ToolProviderMCPApi(Resource):
)
# Step 3: Perform database update in a transaction
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
service.update_provider(
tenant_id=current_tenant_id,
@@ -1061,7 +1061,7 @@ class ToolProviderMCPApi(Resource):
payload = MCPProviderDeletePayload.model_validate(console_ns.payload or {})
_, current_tenant_id = current_account_with_tenant()
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
service.delete_provider(tenant_id=current_tenant_id, provider_id=payload.provider_id)
@@ -1079,7 +1079,7 @@ class ToolMCPAuthApi(Resource):
provider_id = payload.provider_id
_, tenant_id = current_account_with_tenant()
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
db_provider = service.get_provider(provider_id=provider_id, tenant_id=tenant_id)
if not db_provider:
@@ -1100,7 +1100,7 @@ class ToolMCPAuthApi(Resource):
sse_read_timeout=provider_entity.sse_read_timeout,
):
# Update credentials in new transaction
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
service.update_provider_credentials(
provider_id=provider_id,
@@ -1118,17 +1118,17 @@ class ToolMCPAuthApi(Resource):
resource_metadata_url=e.resource_metadata_url,
scope_hint=e.scope_hint,
)
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
response = service.execute_auth_actions(auth_result)
return response
except MCPRefreshTokenError as e:
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id)
raise ValueError(f"Failed to refresh token, please try to authorize again: {e}") from e
except (MCPError, ValueError) as e:
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id)
raise ValueError(f"Failed to connect to MCP server: {e}") from e
@@ -1141,7 +1141,7 @@ class ToolMCPDetailApi(Resource):
@account_initialization_required
def get(self, provider_id):
_, tenant_id = current_account_with_tenant()
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
provider = service.get_provider(provider_id=provider_id, tenant_id=tenant_id)
return jsonable_encoder(ToolTransformService.mcp_provider_to_user_provider(provider, for_list=True))
@@ -1155,7 +1155,7 @@ class ToolMCPListAllApi(Resource):
def get(self):
_, tenant_id = current_account_with_tenant()
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
# Skip sensitive data decryption for list view to improve performance
tools = service.list_providers(tenant_id=tenant_id, include_sensitive=False)
@@ -1170,7 +1170,7 @@ class ToolMCPUpdateApi(Resource):
@account_initialization_required
def get(self, provider_id):
_, tenant_id = current_account_with_tenant()
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
service = MCPToolManageService(session=session)
tools = service.list_provider_tools(
tenant_id=tenant_id,
@@ -1188,7 +1188,7 @@ class ToolMCPCallbackApi(Resource):
authorization_code = query.code
# Create service instance for handle_callback
- with Session(db.engine) as session, session.begin():
+ with sessionmaker(db.engine).begin() as session:
mcp_service = MCPToolManageService(session=session)
# handle_callback now returns state data and tokens
state_data, tokens = handle_callback(state_key, authorization_code)
diff --git a/api/controllers/console/workspace/trigger_providers.py b/api/controllers/console/workspace/trigger_providers.py
index 76d64cb97c..7a28a09861 100644
--- a/api/controllers/console/workspace/trigger_providers.py
+++ b/api/controllers/console/workspace/trigger_providers.py
@@ -5,7 +5,7 @@ from flask import make_response, redirect, request
from flask_restx import Resource
from graphon.model_runtime.utils.encoders import jsonable_encoder
from pydantic import BaseModel, model_validator
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import BadRequest, Forbidden
from configs import dify_config
@@ -375,7 +375,7 @@ class TriggerSubscriptionDeleteApi(Resource):
assert user.current_tenant_id is not None
try:
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
# Delete trigger provider subscription
TriggerProviderService.delete_trigger_provider(
session=session,
@@ -388,7 +388,6 @@ class TriggerSubscriptionDeleteApi(Resource):
tenant_id=user.current_tenant_id,
subscription_id=subscription_id,
)
- session.commit()
return {"result": "success"}
except ValueError as e:
raise BadRequest(str(e))
diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py
index 88fd2c010f..a06b4fd195 100644
--- a/api/controllers/console/workspace/workspace.py
+++ b/api/controllers/console/workspace/workspace.py
@@ -155,7 +155,7 @@ class WorkspaceListApi(Resource):
@setup_required
@admin_required
def get(self):
- payload = request.args.to_dict(flat=True) # type: ignore
+ payload = request.args.to_dict(flat=True)
args = WorkspaceListQuery.model_validate(payload)
stmt = select(Tenant).order_by(Tenant.created_at.desc())
diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py
index 6785ba0c34..4b5fb7ca5b 100644
--- a/api/controllers/console/wraps.py
+++ b/api/controllers/console/wraps.py
@@ -4,7 +4,6 @@ import os
import time
from collections.abc import Callable
from functools import wraps
-from typing import ParamSpec, TypeVar
from flask import abort, request
from sqlalchemy import select
@@ -25,9 +24,6 @@ from services.operation_service import OperationService
from .error import NotInitValidateError, NotSetupError, UnauthorizedAndForceLogout
-P = ParamSpec("P")
-R = TypeVar("R")
-
# Field names for decryption
FIELD_NAME_PASSWORD = "password"
FIELD_NAME_CODE = "code"
@@ -37,7 +33,7 @@ ERROR_MSG_INVALID_ENCRYPTED_DATA = "Invalid encrypted data"
ERROR_MSG_INVALID_ENCRYPTED_CODE = "Invalid encrypted code"
-def account_initialization_required(view: Callable[P, R]) -> Callable[P, R]:
+def account_initialization_required[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
# check account initialization
@@ -50,7 +46,7 @@ def account_initialization_required(view: Callable[P, R]) -> Callable[P, R]:
return decorated
-def only_edition_cloud(view: Callable[P, R]):
+def only_edition_cloud[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
if dify_config.EDITION != "CLOUD":
@@ -61,7 +57,7 @@ def only_edition_cloud(view: Callable[P, R]):
return decorated
-def only_edition_enterprise(view: Callable[P, R]):
+def only_edition_enterprise[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
if not dify_config.ENTERPRISE_ENABLED:
@@ -72,7 +68,7 @@ def only_edition_enterprise(view: Callable[P, R]):
return decorated
-def only_edition_self_hosted(view: Callable[P, R]):
+def only_edition_self_hosted[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
if dify_config.EDITION != "SELF_HOSTED":
@@ -83,7 +79,7 @@ def only_edition_self_hosted(view: Callable[P, R]):
return decorated
-def cloud_edition_billing_enabled(view: Callable[P, R]):
+def cloud_edition_billing_enabled[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
_, current_tenant_id = current_account_with_tenant()
@@ -95,7 +91,7 @@ def cloud_edition_billing_enabled(view: Callable[P, R]):
return decorated
-def cloud_edition_billing_resource_check(resource: str):
+def cloud_edition_billing_resource_check[**P, R](resource: str) -> Callable[[Callable[P, R]], Callable[P, R]]:
def interceptor(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
@@ -137,7 +133,9 @@ def cloud_edition_billing_resource_check(resource: str):
return interceptor
-def cloud_edition_billing_knowledge_limit_check(resource: str):
+def cloud_edition_billing_knowledge_limit_check[**P, R](
+ resource: str,
+) -> Callable[[Callable[P, R]], Callable[P, R]]:
def interceptor(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
@@ -160,7 +158,7 @@ def cloud_edition_billing_knowledge_limit_check(resource: str):
return interceptor
-def cloud_edition_billing_rate_limit_check(resource: str):
+def cloud_edition_billing_rate_limit_check[**P, R](resource: str) -> Callable[[Callable[P, R]], Callable[P, R]]:
def interceptor(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
@@ -196,7 +194,7 @@ def cloud_edition_billing_rate_limit_check(resource: str):
return interceptor
-def cloud_utm_record(view: Callable[P, R]):
+def cloud_utm_record[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
with contextlib.suppress(Exception):
@@ -215,7 +213,7 @@ def cloud_utm_record(view: Callable[P, R]):
return decorated
-def setup_required(view: Callable[P, R]) -> Callable[P, R]:
+def setup_required[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
# check setup
@@ -229,7 +227,7 @@ def setup_required(view: Callable[P, R]) -> Callable[P, R]:
return decorated
-def enterprise_license_required(view: Callable[P, R]):
+def enterprise_license_required[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
settings = FeatureService.get_system_features()
@@ -241,7 +239,7 @@ def enterprise_license_required(view: Callable[P, R]):
return decorated
-def email_password_login_enabled(view: Callable[P, R]):
+def email_password_login_enabled[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_system_features()
@@ -254,7 +252,7 @@ def email_password_login_enabled(view: Callable[P, R]):
return decorated
-def email_register_enabled(view: Callable[P, R]):
+def email_register_enabled[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_system_features()
@@ -267,7 +265,7 @@ def email_register_enabled(view: Callable[P, R]):
return decorated
-def enable_change_email(view: Callable[P, R]):
+def enable_change_email[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_system_features()
@@ -280,7 +278,7 @@ def enable_change_email(view: Callable[P, R]):
return decorated
-def is_allow_transfer_owner(view: Callable[P, R]):
+def is_allow_transfer_owner[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
from libs.workspace_permission import check_workspace_owner_transfer_permission
@@ -293,7 +291,7 @@ def is_allow_transfer_owner(view: Callable[P, R]):
return decorated
-def knowledge_pipeline_publish_enabled(view: Callable[P, R]):
+def knowledge_pipeline_publish_enabled[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
_, current_tenant_id = current_account_with_tenant()
@@ -305,7 +303,7 @@ def knowledge_pipeline_publish_enabled(view: Callable[P, R]):
return decorated
-def edit_permission_required(f: Callable[P, R]):
+def edit_permission_required[**P, R](f: Callable[P, R]) -> Callable[P, R]:
@wraps(f)
def decorated_function(*args: P.args, **kwargs: P.kwargs):
from werkzeug.exceptions import Forbidden
@@ -323,7 +321,7 @@ def edit_permission_required(f: Callable[P, R]):
return decorated_function
-def is_admin_or_owner_required(f: Callable[P, R]):
+def is_admin_or_owner_required[**P, R](f: Callable[P, R]) -> Callable[P, R]:
@wraps(f)
def decorated_function(*args: P.args, **kwargs: P.kwargs):
from werkzeug.exceptions import Forbidden
@@ -339,7 +337,7 @@ def is_admin_or_owner_required(f: Callable[P, R]):
return decorated_function
-def annotation_import_rate_limit(view: Callable[P, R]):
+def annotation_import_rate_limit[**P, R](view: Callable[P, R]) -> Callable[P, R]:
"""
Rate limiting decorator for annotation import operations.
@@ -388,7 +386,7 @@ def annotation_import_rate_limit(view: Callable[P, R]):
return decorated
-def annotation_import_concurrency_limit(view: Callable[P, R]):
+def annotation_import_concurrency_limit[**P, R](view: Callable[P, R]) -> Callable[P, R]:
"""
Concurrency control decorator for annotation import operations.
@@ -455,7 +453,7 @@ def _decrypt_field(field_name: str, error_class: type[Exception], error_message:
payload[field_name] = decoded_value
-def decrypt_password_field(view: Callable[P, R]):
+def decrypt_password_field[**P, R](view: Callable[P, R]) -> Callable[P, R]:
"""
Decorator to decrypt password field in request payload.
@@ -477,7 +475,7 @@ def decrypt_password_field(view: Callable[P, R]):
return decorated
-def decrypt_code_field(view: Callable[P, R]):
+def decrypt_code_field[**P, R](view: Callable[P, R]) -> Callable[P, R]:
"""
Decorator to decrypt verification code field in request payload.
diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py
index d6e3ebfbcd..1d378c754c 100644
--- a/api/controllers/inner_api/plugin/wraps.py
+++ b/api/controllers/inner_api/plugin/wraps.py
@@ -1,21 +1,17 @@
from collections.abc import Callable
from functools import wraps
-from typing import ParamSpec, TypeVar
from flask import current_app, request
from flask_login import user_logged_in
from pydantic import BaseModel
from sqlalchemy import select
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from extensions.ext_database import db
from libs.login import current_user
from models.account import Tenant
from models.model import DefaultEndUserSessionID, EndUser
-P = ParamSpec("P")
-R = TypeVar("R")
-
class TenantUserPayload(BaseModel):
tenant_id: str
@@ -33,7 +29,7 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser:
user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID
is_anonymous = user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID
try:
- with Session(db.engine) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
user_model = None
if is_anonymous:
@@ -56,7 +52,7 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser:
session_id=user_id,
)
session.add(user_model)
- session.commit()
+ session.flush()
session.refresh(user_model)
except Exception:
@@ -65,9 +61,9 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser:
return user_model
-def get_user_tenant(view_func: Callable[P, R]):
+def get_user_tenant[**P, R](view_func: Callable[P, R]) -> Callable[P, R]:
@wraps(view_func)
- def decorated_view(*args: P.args, **kwargs: P.kwargs):
+ def decorated_view(*args: P.args, **kwargs: P.kwargs) -> R:
payload = TenantUserPayload.model_validate(request.get_json(silent=True) or {})
user_id = payload.user_id
@@ -97,10 +93,14 @@ def get_user_tenant(view_func: Callable[P, R]):
return decorated_view
-def plugin_data(view: Callable[P, R] | None = None, *, payload_type: type[BaseModel]):
- def decorator(view_func: Callable[P, R]):
+def plugin_data[**P, R](
+ view: Callable[P, R] | None = None,
+ *,
+ payload_type: type[BaseModel],
+) -> Callable[P, R] | Callable[[Callable[P, R]], Callable[P, R]]:
+ def decorator(view_func: Callable[P, R]) -> Callable[P, R]:
@wraps(view_func)
- def decorated_view(*args: P.args, **kwargs: P.kwargs):
+ def decorated_view(*args: P.args, **kwargs: P.kwargs) -> R:
try:
data = request.get_json()
except Exception:
diff --git a/api/controllers/inner_api/wraps.py b/api/controllers/inner_api/wraps.py
index 7c60b316e8..874fd8a7e3 100644
--- a/api/controllers/inner_api/wraps.py
+++ b/api/controllers/inner_api/wraps.py
@@ -3,10 +3,7 @@ from collections.abc import Callable
from functools import wraps
from hashlib import sha1
from hmac import new as hmac_new
-from typing import ParamSpec, TypeVar
-P = ParamSpec("P")
-R = TypeVar("R")
from flask import abort, request
from configs import dify_config
@@ -14,9 +11,9 @@ from extensions.ext_database import db
from models.model import EndUser
-def billing_inner_api_only(view: Callable[P, R]):
+def billing_inner_api_only[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
- def decorated(*args: P.args, **kwargs: P.kwargs):
+ def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
if not dify_config.INNER_API:
abort(404)
@@ -30,9 +27,9 @@ def billing_inner_api_only(view: Callable[P, R]):
return decorated
-def enterprise_inner_api_only(view: Callable[P, R]):
+def enterprise_inner_api_only[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
- def decorated(*args: P.args, **kwargs: P.kwargs):
+ def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
if not dify_config.INNER_API:
abort(404)
@@ -46,9 +43,9 @@ def enterprise_inner_api_only(view: Callable[P, R]):
return decorated
-def enterprise_inner_api_user_auth(view: Callable[P, R]):
+def enterprise_inner_api_user_auth[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
- def decorated(*args: P.args, **kwargs: P.kwargs):
+ def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
if not dify_config.INNER_API:
return view(*args, **kwargs)
@@ -82,9 +79,9 @@ def enterprise_inner_api_user_auth(view: Callable[P, R]):
return decorated
-def plugin_inner_api_only(view: Callable[P, R]):
+def plugin_inner_api_only[**P, R](view: Callable[P, R]) -> Callable[P, R]:
@wraps(view)
- def decorated(*args: P.args, **kwargs: P.kwargs):
+ def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
if not dify_config.PLUGIN_DAEMON_KEY:
abort(404)
diff --git a/api/controllers/mcp/mcp.py b/api/controllers/mcp/mcp.py
index 3d00f77e79..3c59535a48 100644
--- a/api/controllers/mcp/mcp.py
+++ b/api/controllers/mcp/mcp.py
@@ -4,7 +4,7 @@ from flask import Response
from flask_restx import Resource
from graphon.variables.input_entities import VariableEntity
from pydantic import BaseModel, Field, ValidationError
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session, sessionmaker
from controllers.common.schema import register_schema_model
from controllers.mcp import mcp_ns
@@ -67,7 +67,7 @@ class MCPAppApi(Resource):
request_id: Union[int, str] | None = args.id
mcp_request = self._parse_mcp_request(args.model_dump(exclude_none=True))
- with Session(db.engine, expire_on_commit=False) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
# Get MCP server and app
mcp_server, app = self._get_mcp_server_and_app(server_code, session)
self._validate_server_status(mcp_server)
@@ -174,6 +174,7 @@ class MCPAppApi(Resource):
required=variable.get("required", False),
max_length=variable.get("max_length"),
options=variable.get("options") or [],
+ json_schema=variable.get("json_schema"),
)
def _parse_mcp_request(self, args: dict) -> mcp_types.ClientRequest | mcp_types.ClientNotification:
@@ -188,7 +189,7 @@ class MCPAppApi(Resource):
def _retrieve_end_user(self, tenant_id: str, mcp_server_id: str) -> EndUser | None:
"""Get end user - manages its own database session"""
- with Session(db.engine, expire_on_commit=False) as session, session.begin():
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
return (
session.query(EndUser)
.where(EndUser.tenant_id == tenant_id)
@@ -228,9 +229,7 @@ class MCPAppApi(Resource):
if not end_user and isinstance(mcp_request.root, mcp_types.InitializeRequest):
client_info = mcp_request.root.params.clientInfo
client_name = f"{client_info.name}@{client_info.version}"
- # Commit the session before creating end user to avoid transaction conflicts
- session.commit()
- with Session(db.engine, expire_on_commit=False) as create_session, create_session.begin():
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as create_session:
end_user = self._create_end_user(client_name, app.tenant_id, app.id, mcp_server.id, create_session)
return handle_mcp_request(app, mcp_request, user_input_form, mcp_server, end_user, request_id)
diff --git a/api/controllers/service_api/app/conversation.py b/api/controllers/service_api/app/conversation.py
index edbf011656..8c9a3eb5e9 100644
--- a/api/controllers/service_api/app/conversation.py
+++ b/api/controllers/service_api/app/conversation.py
@@ -3,7 +3,7 @@ from typing import Any, Literal
from flask import request
from flask_restx import Resource
from pydantic import BaseModel, Field, TypeAdapter, field_validator, model_validator
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import BadRequest, NotFound
import services
@@ -116,7 +116,7 @@ class ConversationApi(Resource):
last_id = str(query_args.last_id) if query_args.last_id else None
try:
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
pagination = ConversationService.pagination_by_last_id(
session=session,
app_model=app_model,
diff --git a/api/controllers/service_api/app/workflow.py b/api/controllers/service_api/app/workflow.py
index 1759075139..d7992a2a3a 100644
--- a/api/controllers/service_api/app/workflow.py
+++ b/api/controllers/service_api/app/workflow.py
@@ -8,7 +8,7 @@ from graphon.enums import WorkflowExecutionStatus
from graphon.graph_engine.manager import GraphEngineManager
from graphon.model_runtime.errors.invoke import InvokeError
from pydantic import BaseModel, Field
-from sqlalchemy.orm import Session, sessionmaker
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import BadRequest, InternalServerError, NotFound
from controllers.common.schema import register_schema_models
@@ -314,7 +314,7 @@ class WorkflowAppLogApi(Resource):
# get paginate workflow app logs
workflow_app_service = WorkflowAppService()
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
session=session,
app_model=app_model,
diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py
index 1d52b8a737..2dd916bb31 100644
--- a/api/controllers/service_api/wraps.py
+++ b/api/controllers/service_api/wraps.py
@@ -3,7 +3,7 @@ import time
from collections.abc import Callable
from enum import StrEnum, auto
from functools import wraps
-from typing import Concatenate, ParamSpec, TypeVar, cast, overload
+from typing import Any, cast, overload
from flask import current_app, request
from flask_login import user_logged_in
@@ -23,10 +23,6 @@ from services.api_token_service import ApiTokenCache, fetch_token_with_single_fl
from services.end_user_service import EndUserService
from services.feature_service import FeatureService
-P = ParamSpec("P")
-R = TypeVar("R")
-T = TypeVar("T")
-
logger = logging.getLogger(__name__)
@@ -46,16 +42,16 @@ class FetchUserArg(BaseModel):
@overload
-def validate_app_token(view: Callable[P, R]) -> Callable[P, R]: ...
+def validate_app_token[**P, R](view: Callable[P, R]) -> Callable[P, R]: ...
@overload
-def validate_app_token(
+def validate_app_token[**P, R](
view: None = None, *, fetch_user_arg: FetchUserArg | None = None
) -> Callable[[Callable[P, R]], Callable[P, R]]: ...
-def validate_app_token(
+def validate_app_token[**P, R](
view: Callable[P, R] | None = None, *, fetch_user_arg: FetchUserArg | None = None
) -> Callable[P, R] | Callable[[Callable[P, R]], Callable[P, R]]:
def decorator(view_func: Callable[P, R]) -> Callable[P, R]:
@@ -136,7 +132,10 @@ def validate_app_token(
return decorator(view)
-def cloud_edition_billing_resource_check(resource: str, api_token_type: str):
+def cloud_edition_billing_resource_check[**P, R](
+ resource: str,
+ api_token_type: str,
+) -> Callable[[Callable[P, R]], Callable[P, R]]:
def interceptor(view: Callable[P, R]):
def decorated(*args: P.args, **kwargs: P.kwargs):
api_token = validate_and_get_api_token(api_token_type)
@@ -166,7 +165,10 @@ def cloud_edition_billing_resource_check(resource: str, api_token_type: str):
return interceptor
-def cloud_edition_billing_knowledge_limit_check(resource: str, api_token_type: str):
+def cloud_edition_billing_knowledge_limit_check[**P, R](
+ resource: str,
+ api_token_type: str,
+) -> Callable[[Callable[P, R]], Callable[P, R]]:
def interceptor(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
@@ -188,7 +190,10 @@ def cloud_edition_billing_knowledge_limit_check(resource: str, api_token_type: s
return interceptor
-def cloud_edition_billing_rate_limit_check(resource: str, api_token_type: str):
+def cloud_edition_billing_rate_limit_check[**P, R](
+ resource: str,
+ api_token_type: str,
+) -> Callable[[Callable[P, R]], Callable[P, R]]:
def interceptor(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
@@ -225,20 +230,12 @@ def cloud_edition_billing_rate_limit_check(resource: str, api_token_type: str):
return interceptor
-@overload
-def validate_dataset_token(view: Callable[Concatenate[T, P], R]) -> Callable[P, R]: ...
-
-
-@overload
-def validate_dataset_token(view: None = None) -> Callable[[Callable[Concatenate[T, P], R]], Callable[P, R]]: ...
-
-
def validate_dataset_token(
- view: Callable[Concatenate[T, P], R] | None = None,
-) -> Callable[P, R] | Callable[[Callable[Concatenate[T, P], R]], Callable[P, R]]:
- def decorator(view_func: Callable[Concatenate[T, P], R]) -> Callable[P, R]:
+ view: Callable[..., Any] | None = None,
+) -> Callable[..., Any] | Callable[[Callable[..., Any]], Callable[..., Any]]:
+ def decorator(view_func: Callable[..., Any]) -> Callable[..., Any]:
@wraps(view_func)
- def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
+ def decorated(*args: Any, **kwargs: Any) -> Any:
api_token = validate_and_get_api_token("dataset")
# get url path dataset_id from positional args or kwargs
@@ -308,7 +305,10 @@ def validate_dataset_token(
raise Unauthorized("Tenant owner account does not exist.")
else:
raise Unauthorized("Tenant does not exist.")
- return view_func(api_token.tenant_id, *args, **kwargs) # type: ignore[arg-type]
+ if args and isinstance(args[0], Resource):
+ return view_func(args[0], api_token.tenant_id, *args[1:], **kwargs)
+
+ return view_func(api_token.tenant_id, *args, **kwargs)
return decorated
diff --git a/api/controllers/web/conversation.py b/api/controllers/web/conversation.py
index e76649495a..d5baa5fb7d 100644
--- a/api/controllers/web/conversation.py
+++ b/api/controllers/web/conversation.py
@@ -2,7 +2,7 @@ from typing import Literal
from flask import request
from pydantic import BaseModel, Field, TypeAdapter, field_validator, model_validator
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import NotFound
from controllers.common.schema import register_schema_models
@@ -99,7 +99,7 @@ class ConversationListApi(WebApiResource):
query = ConversationListQuery.model_validate(raw_args)
try:
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
pagination = WebConversationService.pagination_by_last_id(
session=session,
app_model=app_model,
diff --git a/api/controllers/web/forgot_password.py b/api/controllers/web/forgot_password.py
index 91d206f727..d69571cc9c 100644
--- a/api/controllers/web/forgot_password.py
+++ b/api/controllers/web/forgot_password.py
@@ -4,7 +4,7 @@ import secrets
from flask import request
from flask_restx import Resource
from pydantic import BaseModel, Field, field_validator
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from controllers.common.schema import register_schema_models
from controllers.console.auth.error import (
@@ -81,7 +81,7 @@ class ForgotPasswordSendEmailApi(Resource):
else:
language = "en-US"
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
account = AccountService.get_account_by_email_with_case_fallback(request_email, session=session)
token = None
if account is None:
@@ -180,18 +180,17 @@ class ForgotPasswordResetApi(Resource):
email = reset_data.get("email", "")
- with Session(db.engine) as session:
+ with sessionmaker(db.engine).begin() as session:
account = AccountService.get_account_by_email_with_case_fallback(email, session=session)
if account:
- self._update_existing_account(account, password_hashed, salt, session)
+ self._update_existing_account(account, password_hashed, salt)
else:
raise AuthenticationFailedError()
return {"result": "success"}
- def _update_existing_account(self, account: Account, password_hashed, salt, session):
+ def _update_existing_account(self, account: Account, password_hashed, salt):
# Update existing account credentials
account.password = base64.b64encode(password_hashed).decode()
account.password_salt = base64.b64encode(salt).decode()
- session.commit()
diff --git a/api/controllers/web/wraps.py b/api/controllers/web/wraps.py
index 152137f39c..11650fa4b5 100644
--- a/api/controllers/web/wraps.py
+++ b/api/controllers/web/wraps.py
@@ -1,12 +1,12 @@
from collections.abc import Callable
from datetime import UTC, datetime
from functools import wraps
-from typing import Concatenate, ParamSpec, TypeVar
+from typing import Concatenate
from flask import request
from flask_restx import Resource
from sqlalchemy import select
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import BadRequest, NotFound, Unauthorized
from constants import HEADER_NAME_APP_CODE
@@ -20,14 +20,13 @@ from services.enterprise.enterprise_service import EnterpriseService, WebAppSett
from services.feature_service import FeatureService
from services.webapp_auth_service import WebAppAuthService
-P = ParamSpec("P")
-R = TypeVar("R")
-
-def validate_jwt_token(view: Callable[Concatenate[App, EndUser, P], R] | None = None):
- def decorator(view: Callable[Concatenate[App, EndUser, P], R]):
+def validate_jwt_token[**P, R](
+ view: Callable[Concatenate[App, EndUser, P], R] | None = None,
+) -> Callable[P, R] | Callable[[Callable[Concatenate[App, EndUser, P], R]], Callable[P, R]]:
+ def decorator(view: Callable[Concatenate[App, EndUser, P], R]) -> Callable[P, R]:
@wraps(view)
- def decorated(*args: P.args, **kwargs: P.kwargs):
+ def decorated(*args: P.args, **kwargs: P.kwargs) -> R:
app_model, end_user = decode_jwt_token()
return view(app_model, end_user, *args, **kwargs)
@@ -38,7 +37,7 @@ def validate_jwt_token(view: Callable[Concatenate[App, EndUser, P], R] | None =
return decorator
-def decode_jwt_token(app_code: str | None = None, user_id: str | None = None):
+def decode_jwt_token(app_code: str | None = None, user_id: str | None = None) -> tuple[App, EndUser]:
system_features = FeatureService.get_system_features()
if not app_code:
app_code = str(request.headers.get(HEADER_NAME_APP_CODE))
@@ -49,7 +48,7 @@ def decode_jwt_token(app_code: str | None = None, user_id: str | None = None):
decoded = PassportService().verify(tk)
app_code = decoded.get("app_code")
app_id = decoded.get("app_id")
- with Session(db.engine, expire_on_commit=False) as session:
+ with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
app_model = session.scalar(select(App).where(App.id == app_id))
site = session.scalar(select(Site).where(Site.code == app_code))
if not app_model:
diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py
index aa2b65766f..985ded0f74 100644
--- a/api/core/app/apps/advanced_chat/app_generator.py
+++ b/api/core/app/apps/advanced_chat/app_generator.py
@@ -5,7 +5,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping, Sequence
-from typing import TYPE_CHECKING, Any, Literal, Union, overload
+from typing import TYPE_CHECKING, Any, Literal, overload
from flask import Flask, current_app
from pydantic import ValidationError
@@ -68,7 +68,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
self,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
workflow_run_id: str,
@@ -81,7 +81,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
self,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
workflow_run_id: str,
@@ -94,7 +94,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
self,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
workflow_run_id: str,
@@ -106,7 +106,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
self,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
workflow_run_id: str,
@@ -239,7 +239,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
*,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
conversation: Conversation,
message: Message,
application_generate_entity: AdvancedChatAppGenerateEntity,
@@ -271,9 +271,9 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
workflow: Workflow,
node_id: str,
user: Account | EndUser,
- args: Mapping,
+ args: Mapping[str, Any],
streaming: bool = True,
- ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]:
+ ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Generate App response.
@@ -359,7 +359,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
user: Account | EndUser,
args: LoopNodeRunPayload,
streaming: bool = True,
- ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]:
+ ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Generate App response.
@@ -439,7 +439,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
self,
*,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
invoke_from: InvokeFrom,
application_generate_entity: AdvancedChatAppGenerateEntity,
workflow_execution_repository: WorkflowExecutionRepository,
@@ -451,7 +451,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
pause_state_config: PauseStateLayerConfig | None = None,
graph_runtime_state: GraphRuntimeState | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (),
- ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]:
+ ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Generate App response.
@@ -653,10 +653,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
queue_manager: AppQueueManager,
conversation: ConversationSnapshot,
message: MessageSnapshot,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
draft_var_saver_factory: DraftVariableSaverFactory,
stream: bool = False,
- ) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
+ ) -> ChatbotAppBlockingResponse | Generator[ChatbotAppStreamResponse, None, None]:
"""
Handle response.
:param application_generate_entity: application generate entity
diff --git a/api/core/app/apps/agent_chat/app_generator.py b/api/core/app/apps/agent_chat/app_generator.py
index bb258af4c1..5872f6b264 100644
--- a/api/core/app/apps/agent_chat/app_generator.py
+++ b/api/core/app/apps/agent_chat/app_generator.py
@@ -3,7 +3,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping
-from typing import Any, Literal, Union, overload
+from typing import Any, Literal, overload
from flask import Flask, current_app
from graphon.model_runtime.errors.invoke import InvokeAuthorizationError
@@ -37,7 +37,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
self,
*,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[False],
@@ -48,7 +48,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
self,
*,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[True],
@@ -59,21 +59,21 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
self,
*,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool,
- ) -> Union[Mapping, Generator[Mapping | str, None, None]]: ...
+ ) -> Mapping | Generator[Mapping | str, None, None]: ...
def generate(
self,
*,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool = True,
- ) -> Union[Mapping, Generator[Mapping | str, None, None]]:
+ ) -> Mapping | Generator[Mapping | str, None, None]:
"""
Generate App response.
diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py
index b675a87382..891dcece73 100644
--- a/api/core/app/apps/chat/app_generator.py
+++ b/api/core/app/apps/chat/app_generator.py
@@ -3,7 +3,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping
-from typing import Any, Literal, Union, overload
+from typing import Any, Literal, overload
from flask import Flask, copy_current_request_context, current_app
from graphon.model_runtime.errors.invoke import InvokeAuthorizationError
@@ -36,7 +36,7 @@ class ChatAppGenerator(MessageBasedAppGenerator):
def generate(
self,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[True],
@@ -46,7 +46,7 @@ class ChatAppGenerator(MessageBasedAppGenerator):
def generate(
self,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[False],
@@ -56,20 +56,20 @@ class ChatAppGenerator(MessageBasedAppGenerator):
def generate(
self,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool,
- ) -> Union[Mapping[str, Any], Generator[Mapping[str, Any] | str, None, None]]: ...
+ ) -> Mapping[str, Any] | Generator[Mapping[str, Any] | str, None, None]: ...
def generate(
self,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool = True,
- ) -> Union[Mapping[str, Any], Generator[Mapping[str, Any] | str, None, None]]:
+ ) -> Mapping[str, Any] | Generator[Mapping[str, Any] | str, None, None]:
"""
Generate App response.
diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py
index a62c5b80b5..61339b316a 100644
--- a/api/core/app/apps/completion/app_generator.py
+++ b/api/core/app/apps/completion/app_generator.py
@@ -3,7 +3,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping
-from typing import Any, Literal, Union, overload
+from typing import Any, Literal, overload
from flask import Flask, copy_current_request_context, current_app
from graphon.model_runtime.errors.invoke import InvokeAuthorizationError
@@ -36,7 +36,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
def generate(
self,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[True],
@@ -46,7 +46,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
def generate(
self,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[False],
@@ -56,20 +56,20 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
def generate(
self,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool = False,
- ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]: ...
+ ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]: ...
def generate(
self,
app_model: App,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool = True,
- ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]:
+ ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Generate App response.
@@ -244,10 +244,10 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
self,
app_model: App,
message_id: str,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
invoke_from: InvokeFrom,
stream: bool = True,
- ) -> Union[Mapping, Generator[Mapping | str, None, None]]:
+ ) -> Mapping | Generator[Mapping | str, None, None]:
"""
Generate App response.
diff --git a/api/core/app/apps/pipeline/pipeline_generator.py b/api/core/app/apps/pipeline/pipeline_generator.py
index fa242003a2..139c7e73e0 100644
--- a/api/core/app/apps/pipeline/pipeline_generator.py
+++ b/api/core/app/apps/pipeline/pipeline_generator.py
@@ -7,7 +7,7 @@ import threading
import time
import uuid
from collections.abc import Generator, Mapping
-from typing import Any, Literal, Union, cast, overload
+from typing import Any, Literal, cast, overload
from flask import Flask, current_app
from graphon.model_runtime.errors.invoke import InvokeAuthorizationError
@@ -62,7 +62,7 @@ class PipelineGenerator(BaseAppGenerator):
*,
pipeline: Pipeline,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[True],
@@ -77,7 +77,7 @@ class PipelineGenerator(BaseAppGenerator):
*,
pipeline: Pipeline,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[False],
@@ -92,28 +92,28 @@ class PipelineGenerator(BaseAppGenerator):
*,
pipeline: Pipeline,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool,
call_depth: int,
workflow_thread_pool_id: str | None,
is_retry: bool = False,
- ) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]: ...
+ ) -> Mapping[str, Any] | Generator[Mapping | str, None, None]: ...
def generate(
self,
*,
pipeline: Pipeline,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool = True,
call_depth: int = 0,
workflow_thread_pool_id: str | None = None,
is_retry: bool = False,
- ) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None], None]:
+ ) -> Mapping[str, Any] | Generator[Mapping | str, None, None] | None:
# Add null check for dataset
with Session(db.engine, expire_on_commit=False) as session:
@@ -278,7 +278,7 @@ class PipelineGenerator(BaseAppGenerator):
context: contextvars.Context,
pipeline: Pipeline,
workflow_id: str,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
application_generate_entity: RagPipelineGenerateEntity,
invoke_from: InvokeFrom,
workflow_execution_repository: WorkflowExecutionRepository,
@@ -286,7 +286,7 @@ class PipelineGenerator(BaseAppGenerator):
streaming: bool = True,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
workflow_thread_pool_id: str | None = None,
- ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]:
+ ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Generate App response.
@@ -302,7 +302,7 @@ class PipelineGenerator(BaseAppGenerator):
"""
with preserve_flask_contexts(flask_app, context_vars=context):
# init queue manager
- workflow = db.session.query(Workflow).where(Workflow.id == workflow_id).first()
+ workflow = db.session.get(Workflow, workflow_id)
if not workflow:
raise ValueError(f"Workflow not found: {workflow_id}")
queue_manager = PipelineQueueManager(
@@ -624,10 +624,10 @@ class PipelineGenerator(BaseAppGenerator):
application_generate_entity: RagPipelineGenerateEntity,
workflow: Workflow,
queue_manager: AppQueueManager,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
draft_var_saver_factory: DraftVariableSaverFactory,
stream: bool = False,
- ) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
+ ) -> WorkflowAppBlockingResponse | Generator[WorkflowAppStreamResponse, None, None]:
"""
Handle response.
:param application_generate_entity: application generate entity
@@ -668,7 +668,7 @@ class PipelineGenerator(BaseAppGenerator):
datasource_info: Mapping[str, Any],
created_from: str,
position: int,
- account: Union[Account, EndUser],
+ account: Account | EndUser,
batch: str,
document_form: str,
):
@@ -715,7 +715,7 @@ class PipelineGenerator(BaseAppGenerator):
pipeline: Pipeline,
workflow: Workflow,
start_node_id: str,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
) -> list[Mapping[str, Any]]:
"""
Format datasource info list.
diff --git a/api/core/app/apps/pipeline/pipeline_runner.py b/api/core/app/apps/pipeline/pipeline_runner.py
index 4c188dac68..b4d2310da8 100644
--- a/api/core/app/apps/pipeline/pipeline_runner.py
+++ b/api/core/app/apps/pipeline/pipeline_runner.py
@@ -9,6 +9,7 @@ from graphon.graph_events import GraphEngineEvent, GraphRunFailedEvent
from graphon.runtime import GraphRuntimeState, VariablePool
from graphon.variable_loader import VariableLoader
from graphon.variables.variables import RAGPipelineVariable, RAGPipelineVariableInput
+from sqlalchemy import select
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.apps.pipeline.pipeline_config_manager import PipelineConfig
@@ -84,13 +85,13 @@ class PipelineRunner(WorkflowBasedAppRunner):
user_id = None
if invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}:
- end_user = db.session.query(EndUser).where(EndUser.id == self.application_generate_entity.user_id).first()
+ end_user = db.session.get(EndUser, self.application_generate_entity.user_id)
if end_user:
user_id = end_user.session_id
else:
user_id = self.application_generate_entity.user_id
- pipeline = db.session.query(Pipeline).where(Pipeline.id == app_config.app_id).first()
+ pipeline = db.session.get(Pipeline, app_config.app_id)
if not pipeline:
raise ValueError("Pipeline not found")
@@ -213,10 +214,10 @@ class PipelineRunner(WorkflowBasedAppRunner):
Get workflow
"""
# fetch workflow by workflow_id
- workflow = (
- db.session.query(Workflow)
+ workflow = db.session.scalar(
+ select(Workflow)
.where(Workflow.tenant_id == pipeline.tenant_id, Workflow.app_id == pipeline.id, Workflow.id == workflow_id)
- .first()
+ .limit(1)
)
# return workflow
@@ -297,10 +298,8 @@ class PipelineRunner(WorkflowBasedAppRunner):
"""
if isinstance(event, GraphRunFailedEvent):
if document_id and dataset_id:
- document = (
- db.session.query(Document)
- .where(Document.id == document_id, Document.dataset_id == dataset_id)
- .first()
+ document = db.session.scalar(
+ select(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).limit(1)
)
if document:
document.indexing_status = "error"
diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py
index 9618ab35c6..6074e81d1e 100644
--- a/api/core/app/apps/workflow/app_generator.py
+++ b/api/core/app/apps/workflow/app_generator.py
@@ -5,7 +5,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping, Sequence
-from typing import TYPE_CHECKING, Any, Literal, Union, overload
+from typing import TYPE_CHECKING, Any, Literal, overload
from flask import Flask, current_app
from graphon.graph_engine.layers import GraphEngineLayer
@@ -64,7 +64,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
*,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[True],
@@ -82,7 +82,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
*,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[False],
@@ -100,7 +100,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
*,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool,
@@ -110,14 +110,14 @@ class WorkflowAppGenerator(BaseAppGenerator):
root_node_id: str | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (),
pause_state_config: PauseStateLayerConfig | None = None,
- ) -> Union[Mapping[str, Any], Generator[Mapping[str, Any] | str, None, None]]: ...
+ ) -> Mapping[str, Any] | Generator[Mapping[str, Any] | str, None, None]: ...
def generate(
self,
*,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool = True,
@@ -127,7 +127,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
root_node_id: str | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (),
pause_state_config: PauseStateLayerConfig | None = None,
- ) -> Union[Mapping[str, Any], Generator[Mapping[str, Any] | str, None, None]]:
+ ) -> Mapping[str, Any] | Generator[Mapping[str, Any] | str, None, None]:
with self._bind_file_access_scope(tenant_id=app_model.tenant_id, user=user, invoke_from=invoke_from):
files: Sequence[Mapping[str, Any]] = args.get("files") or []
@@ -237,7 +237,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
*,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
application_generate_entity: WorkflowAppGenerateEntity,
graph_runtime_state: GraphRuntimeState,
workflow_execution_repository: WorkflowExecutionRepository,
@@ -245,7 +245,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
graph_engine_layers: Sequence[GraphEngineLayer] = (),
pause_state_config: PauseStateLayerConfig | None = None,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
- ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]:
+ ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Resume a paused workflow execution using the persisted runtime state.
"""
@@ -269,7 +269,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
*,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
application_generate_entity: WorkflowAppGenerateEntity,
invoke_from: InvokeFrom,
workflow_execution_repository: WorkflowExecutionRepository,
@@ -280,7 +280,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
graph_engine_layers: Sequence[GraphEngineLayer] = (),
graph_runtime_state: GraphRuntimeState | None = None,
pause_state_config: PauseStateLayerConfig | None = None,
- ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]:
+ ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Generate App response.
@@ -609,10 +609,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
application_generate_entity: WorkflowAppGenerateEntity,
workflow: Workflow,
queue_manager: AppQueueManager,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
draft_var_saver_factory: DraftVariableSaverFactory,
stream: bool = False,
- ) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
+ ) -> WorkflowAppBlockingResponse | Generator[WorkflowAppStreamResponse, None, None]:
"""
Handle response.
:param application_generate_entity: application generate entity
diff --git a/api/core/app/layers/pause_state_persist_layer.py b/api/core/app/layers/pause_state_persist_layer.py
index 79a5442130..c027f42788 100644
--- a/api/core/app/layers/pause_state_persist_layer.py
+++ b/api/core/app/layers/pause_state_persist_layer.py
@@ -1,5 +1,5 @@
from dataclasses import dataclass
-from typing import Annotated, Literal, Self, TypeAlias
+from typing import Annotated, Literal, Self
from graphon.graph_engine.layers import GraphEngineLayer
from graphon.graph_events import GraphEngineEvent, GraphRunPausedEvent
@@ -27,7 +27,7 @@ class _AdvancedChatAppGenerateEntityWrapper(BaseModel):
entity: AdvancedChatAppGenerateEntity
-_GenerateEntityUnion: TypeAlias = Annotated[
+type _GenerateEntityUnion = Annotated[
_WorkflowGenerateEntityWrapper | _AdvancedChatAppGenerateEntityWrapper,
Field(discriminator="type"),
]
diff --git a/api/core/app/llm/quota.py b/api/core/app/llm/quota.py
index 63d2235358..182f1b767d 100644
--- a/api/core/app/llm/quota.py
+++ b/api/core/app/llm/quota.py
@@ -81,7 +81,7 @@ def deduct_llm_quota(*, tenant_id: str, model_instance: ModelInstance, usage: LL
# TODO: Use provider name with prefix after the data migration.
Provider.provider_name == ModelProviderID(model_instance.provider).provider_name,
Provider.provider_type == ProviderType.SYSTEM.value,
- Provider.quota_type == system_configuration.current_quota_type.value,
+ Provider.quota_type == system_configuration.current_quota_type,
Provider.quota_limit > Provider.quota_used,
)
.values(
diff --git a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py
index a410fac558..e0e6a6f5c3 100644
--- a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py
+++ b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py
@@ -2,7 +2,7 @@ import logging
import time
from collections.abc import Generator
from threading import Thread
-from typing import Any, Union, cast
+from typing import Any, cast
from graphon.file import FileTransferMethod
from graphon.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
@@ -72,14 +72,12 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
"""
_task_state: EasyUITaskState
- _application_generate_entity: Union[ChatAppGenerateEntity, CompletionAppGenerateEntity, AgentChatAppGenerateEntity]
+ _application_generate_entity: ChatAppGenerateEntity | CompletionAppGenerateEntity | AgentChatAppGenerateEntity
_precomputed_event_type: StreamEvent | None = None
def __init__(
self,
- application_generate_entity: Union[
- ChatAppGenerateEntity, CompletionAppGenerateEntity, AgentChatAppGenerateEntity
- ],
+ application_generate_entity: ChatAppGenerateEntity | CompletionAppGenerateEntity | AgentChatAppGenerateEntity,
queue_manager: AppQueueManager,
conversation: Conversation,
message: Message,
@@ -117,11 +115,11 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
def process(
self,
- ) -> Union[
- ChatbotAppBlockingResponse,
- CompletionAppBlockingResponse,
- Generator[Union[ChatbotAppStreamResponse, CompletionAppStreamResponse], None, None],
- ]:
+ ) -> (
+ ChatbotAppBlockingResponse
+ | CompletionAppBlockingResponse
+ | Generator[ChatbotAppStreamResponse | CompletionAppStreamResponse, None, None]
+ ):
if self._application_generate_entity.app_config.app_mode != AppMode.COMPLETION:
# start generate conversation name thread
self._conversation_name_generate_thread = self._message_cycle_manager.generate_conversation_name(
@@ -136,7 +134,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
def _to_blocking_response(
self, generator: Generator[StreamResponse, None, None]
- ) -> Union[ChatbotAppBlockingResponse, CompletionAppBlockingResponse]:
+ ) -> ChatbotAppBlockingResponse | CompletionAppBlockingResponse:
"""
Process blocking response.
:return:
@@ -148,7 +146,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
extras = {"usage": self._task_state.llm_result.usage.model_dump()}
if self._task_state.metadata:
extras["metadata"] = self._task_state.metadata.model_dump()
- response: Union[ChatbotAppBlockingResponse, CompletionAppBlockingResponse]
+ response: ChatbotAppBlockingResponse | CompletionAppBlockingResponse
if self._conversation_mode == AppMode.COMPLETION:
response = CompletionAppBlockingResponse(
task_id=self._application_generate_entity.task_id,
@@ -183,7 +181,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
def _to_stream_response(
self, generator: Generator[StreamResponse, None, None]
- ) -> Generator[Union[ChatbotAppStreamResponse, CompletionAppStreamResponse], None, None]:
+ ) -> Generator[ChatbotAppStreamResponse | CompletionAppStreamResponse, None, None]:
"""
To stream response.
:return:
diff --git a/api/core/app/workflow/layers/llm_quota.py b/api/core/app/workflow/layers/llm_quota.py
index 48cabaf4d0..c577ce0754 100644
--- a/api/core/app/workflow/layers/llm_quota.py
+++ b/api/core/app/workflow/layers/llm_quota.py
@@ -5,14 +5,13 @@ This layer centralizes model-quota deduction outside node implementations.
"""
import logging
-from typing import TYPE_CHECKING, cast, final
+from typing import TYPE_CHECKING, cast, final, override
from graphon.enums import BuiltinNodeTypes
from graphon.graph_engine.entities.commands import AbortCommand, CommandType
from graphon.graph_engine.layers import GraphEngineLayer
from graphon.graph_events import GraphEngineEvent, GraphNodeEventBase, NodeRunSucceededEvent
from graphon.nodes.base.node import Node
-from typing_extensions import override
from core.app.entities.app_invoke_entities import DIFY_RUN_CONTEXT_KEY, DifyRunContext
from core.app.llm import deduct_llm_quota, ensure_llm_quota_available
diff --git a/api/core/app/workflow/layers/observability.py b/api/core/app/workflow/layers/observability.py
index c4ed54a140..99e8015c0b 100644
--- a/api/core/app/workflow/layers/observability.py
+++ b/api/core/app/workflow/layers/observability.py
@@ -10,7 +10,7 @@ associates with the node span.
import logging
from contextvars import Token
from dataclasses import dataclass
-from typing import cast, final
+from typing import cast, final, override
from graphon.enums import BuiltinNodeTypes, NodeType
from graphon.graph_engine.layers import GraphEngineLayer
@@ -18,7 +18,6 @@ from graphon.graph_events import GraphNodeEventBase
from graphon.nodes.base.node import Node
from opentelemetry import context as context_api
from opentelemetry.trace import Span, SpanKind, Tracer, get_tracer, set_span_in_context
-from typing_extensions import override
from configs import dify_config
from extensions.otel.parser import (
diff --git a/api/core/datasource/datasource_file_manager.py b/api/core/datasource/datasource_file_manager.py
index fe40d8f0e5..492b507aa9 100644
--- a/api/core/datasource/datasource_file_manager.py
+++ b/api/core/datasource/datasource_file_manager.py
@@ -153,7 +153,7 @@ class DatasourceFileManager:
:return: the binary of the file, mime type
"""
- upload_file: UploadFile | None = db.session.query(UploadFile).where(UploadFile.id == id).first()
+ upload_file: UploadFile | None = db.session.get(UploadFile, id)
if not upload_file:
return None
@@ -171,7 +171,7 @@ class DatasourceFileManager:
:return: the binary of the file, mime type
"""
- message_file: MessageFile | None = db.session.query(MessageFile).where(MessageFile.id == id).first()
+ message_file: MessageFile | None = db.session.get(MessageFile, id)
# Check if message_file is not None
if message_file is not None:
@@ -185,7 +185,7 @@ class DatasourceFileManager:
else:
tool_file_id = None
- tool_file: ToolFile | None = db.session.query(ToolFile).where(ToolFile.id == tool_file_id).first()
+ tool_file: ToolFile | None = db.session.get(ToolFile, tool_file_id)
if not tool_file:
return None
@@ -203,7 +203,7 @@ class DatasourceFileManager:
:return: the binary of the file, mime type
"""
- upload_file: UploadFile | None = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first()
+ upload_file: UploadFile | None = db.session.get(UploadFile, upload_file_id)
if not upload_file:
return None, None
diff --git a/api/core/entities/execution_extra_content.py b/api/core/entities/execution_extra_content.py
index 72f6590e68..d304c982cd 100644
--- a/api/core/entities/execution_extra_content.py
+++ b/api/core/entities/execution_extra_content.py
@@ -44,7 +44,8 @@ class HumanInputContent(BaseModel):
type: ExecutionContentType = Field(default=ExecutionContentType.HUMAN_INPUT)
-ExecutionExtraContentDomainModel: TypeAlias = HumanInputContent
+# Keep a runtime alias here: callers and tests expect identity with HumanInputContent.
+ExecutionExtraContentDomainModel: TypeAlias = HumanInputContent # noqa: UP040
__all__ = [
"ExecutionExtraContentDomainModel",
diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py
index 8b48aa2660..782897aea9 100644
--- a/api/core/entities/provider_configuration.py
+++ b/api/core/entities/provider_configuration.py
@@ -403,7 +403,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
),
)
@@ -753,7 +753,7 @@ class ProviderConfiguration(BaseModel):
ProviderModel.tenant_id == self.tenant_id,
ProviderModel.provider_name.in_(provider_names),
ProviderModel.model_name == model,
- ProviderModel.model_type == model_type.to_origin_model_type(),
+ ProviderModel.model_type == model_type,
)
return session.execute(stmt).scalar_one_or_none()
@@ -778,7 +778,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
)
credential_record = session.execute(stmt).scalar_one_or_none()
@@ -825,7 +825,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
ProviderModelCredential.credential_name == credential_name,
)
if exclude_id:
@@ -901,7 +901,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
)
credential_record = s.execute(stmt).scalar_one_or_none()
original_credentials = (
@@ -970,7 +970,7 @@ class ProviderConfiguration(BaseModel):
tenant_id=self.tenant_id,
provider_name=self.provider.provider,
model_name=model,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
encrypted_config=json.dumps(credentials),
credential_name=credential_name,
)
@@ -983,7 +983,7 @@ class ProviderConfiguration(BaseModel):
tenant_id=self.tenant_id,
provider_name=self.provider.provider,
model_name=model,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
credential_id=credential.id,
is_valid=True,
)
@@ -1038,7 +1038,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
)
credential_record = session.execute(stmt).scalar_one_or_none()
if not credential_record:
@@ -1083,7 +1083,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
)
credential_record = session.execute(stmt).scalar_one_or_none()
if not credential_record:
@@ -1116,7 +1116,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
)
available_credentials_count = session.execute(count_stmt).scalar() or 0
session.delete(credential_record)
@@ -1156,7 +1156,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
)
credential_record = session.execute(stmt).scalar_one_or_none()
if not credential_record:
@@ -1171,7 +1171,7 @@ class ProviderConfiguration(BaseModel):
tenant_id=self.tenant_id,
provider_name=self.provider.provider,
model_name=model,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
is_valid=True,
credential_id=credential_id,
)
@@ -1207,7 +1207,7 @@ class ProviderConfiguration(BaseModel):
ProviderModelCredential.tenant_id == self.tenant_id,
ProviderModelCredential.provider_name.in_(self._get_provider_names()),
ProviderModelCredential.model_name == model,
- ProviderModelCredential.model_type == model_type.to_origin_model_type(),
+ ProviderModelCredential.model_type == model_type,
)
credential_record = session.execute(stmt).scalar_one_or_none()
if not credential_record:
@@ -1263,7 +1263,7 @@ class ProviderConfiguration(BaseModel):
stmt = select(ProviderModelSetting).where(
ProviderModelSetting.tenant_id == self.tenant_id,
ProviderModelSetting.provider_name.in_(self._get_provider_names()),
- ProviderModelSetting.model_type == model_type.to_origin_model_type(),
+ ProviderModelSetting.model_type == model_type,
ProviderModelSetting.model_name == model,
)
return session.execute(stmt).scalars().first()
@@ -1286,7 +1286,7 @@ class ProviderConfiguration(BaseModel):
model_setting = ProviderModelSetting(
tenant_id=self.tenant_id,
provider_name=self.provider.provider,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
model_name=model,
enabled=True,
)
@@ -1312,7 +1312,7 @@ class ProviderConfiguration(BaseModel):
model_setting = ProviderModelSetting(
tenant_id=self.tenant_id,
provider_name=self.provider.provider,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
model_name=model,
enabled=False,
)
@@ -1348,7 +1348,7 @@ class ProviderConfiguration(BaseModel):
stmt = select(func.count(LoadBalancingModelConfig.id)).where(
LoadBalancingModelConfig.tenant_id == self.tenant_id,
LoadBalancingModelConfig.provider_name.in_(provider_names),
- LoadBalancingModelConfig.model_type == model_type.to_origin_model_type(),
+ LoadBalancingModelConfig.model_type == model_type,
LoadBalancingModelConfig.model_name == model,
)
load_balancing_config_count = session.execute(stmt).scalar() or 0
@@ -1364,7 +1364,7 @@ class ProviderConfiguration(BaseModel):
model_setting = ProviderModelSetting(
tenant_id=self.tenant_id,
provider_name=self.provider.provider,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
model_name=model,
load_balancing_enabled=True,
)
@@ -1391,7 +1391,7 @@ class ProviderConfiguration(BaseModel):
model_setting = ProviderModelSetting(
tenant_id=self.tenant_id,
provider_name=self.provider.provider,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
model_name=model,
load_balancing_enabled=False,
)
diff --git a/api/core/helper/csv_sanitizer.py b/api/core/helper/csv_sanitizer.py
index 0023de5a35..c4fa230b3b 100644
--- a/api/core/helper/csv_sanitizer.py
+++ b/api/core/helper/csv_sanitizer.py
@@ -17,7 +17,7 @@ class CSVSanitizer:
"""
# Characters that can start a formula in Excel/LibreOffice/Google Sheets
- FORMULA_CHARS = frozenset({"=", "+", "-", "@", "\t", "\r"})
+ FORMULA_CHARS = frozenset(("=", "+", "-", "@", "\t", "\r"))
@classmethod
def sanitize_value(cls, value: Any) -> str:
diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py
index 2bada85582..768210d899 100644
--- a/api/core/helper/module_import_helper.py
+++ b/api/core/helper/module_import_helper.py
@@ -2,12 +2,13 @@ import importlib.util
import logging
import sys
from types import ModuleType
-from typing import AnyStr
logger = logging.getLogger(__name__)
-def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_lazy_loader: bool = False) -> ModuleType:
+def import_module_from_source[T: (str, bytes)](
+ *, module_name: str, py_file_path: T, use_lazy_loader: bool = False
+) -> ModuleType:
"""
Importing a module from the source file directly
"""
diff --git a/api/core/helper/position_helper.py b/api/core/helper/position_helper.py
index 2fc8fbf885..71d83bef4a 100644
--- a/api/core/helper/position_helper.py
+++ b/api/core/helper/position_helper.py
@@ -2,7 +2,6 @@ import os
from collections import OrderedDict
from collections.abc import Callable
from functools import lru_cache
-from typing import TypeVar
from configs import dify_config
from core.tools.utils.yaml_utils import load_yaml_file_cached
@@ -65,10 +64,7 @@ def pin_position_map(original_position_map: dict[str, int], pin_list: list[str])
return position_map
-T = TypeVar("T")
-
-
-def is_filtered(
+def is_filtered[T](
include_set: set[str],
exclude_set: set[str],
data: T,
@@ -97,11 +93,11 @@ def is_filtered(
return False
-def sort_by_position_map(
+def sort_by_position_map[T](
position_map: dict[str, int],
data: list[T],
name_func: Callable[[T], str],
-):
+) -> list[T]:
"""
Sort the objects by the position map.
If the name of the object is not in the position map, it will be put at the end.
@@ -116,11 +112,11 @@ def sort_by_position_map(
return sorted(data, key=lambda x: position_map.get(name_func(x), float("inf")))
-def sort_to_dict_by_position_map(
+def sort_to_dict_by_position_map[T](
position_map: dict[str, int],
data: list[T],
name_func: Callable[[T], str],
-):
+) -> OrderedDict[str, T]:
"""
Sort the objects into a ordered dict by the position map.
If the name of the object is not in the position map, it will be put at the end.
diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py
index 54068fc28d..e38592bb7b 100644
--- a/api/core/helper/ssrf_proxy.py
+++ b/api/core/helper/ssrf_proxy.py
@@ -4,7 +4,7 @@ Proxy requests to avoid SSRF
import logging
import time
-from typing import Any, TypeAlias
+from typing import Any
import httpx
from pydantic import TypeAdapter, ValidationError
@@ -20,8 +20,8 @@ SSRF_DEFAULT_MAX_RETRIES = dify_config.SSRF_DEFAULT_MAX_RETRIES
BACKOFF_FACTOR = 0.5
STATUS_FORCELIST = [429, 500, 502, 503, 504]
-Headers: TypeAlias = dict[str, str]
-_HEADERS_ADAPTER = TypeAdapter(Headers)
+type Headers = dict[str, str]
+_HEADERS_ADAPTER: TypeAdapter[Headers] = TypeAdapter(Headers)
_SSL_VERIFIED_POOL_KEY = "ssrf:verified"
_SSL_UNVERIFIED_POOL_KEY = "ssrf:unverified"
diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py
index 3ec17bc986..b8d5ca2f50 100644
--- a/api/core/indexing_runner.py
+++ b/api/core/indexing_runner.py
@@ -10,7 +10,7 @@ from typing import Any
from flask import Flask, current_app
from graphon.model_runtime.entities.model_entities import ModelType
-from sqlalchemy import select
+from sqlalchemy import delete, func, select, update
from sqlalchemy.orm.exc import ObjectDeletedError
from configs import dify_config
@@ -78,7 +78,7 @@ class IndexingRunner:
continue
# get dataset
- dataset = db.session.query(Dataset).filter_by(id=requeried_document.dataset_id).first()
+ dataset = db.session.get(Dataset, requeried_document.dataset_id)
if not dataset:
raise ValueError("no dataset found")
@@ -95,7 +95,7 @@ class IndexingRunner:
text_docs = self._extract(index_processor, requeried_document, processing_rule.to_dict())
# transform
- current_user = db.session.query(Account).filter_by(id=requeried_document.created_by).first()
+ current_user = db.session.get(Account, requeried_document.created_by)
if not current_user:
raise ValueError("no current user found")
current_user.set_tenant_id(dataset.tenant_id)
@@ -137,23 +137,24 @@ class IndexingRunner:
return
# get dataset
- dataset = db.session.query(Dataset).filter_by(id=requeried_document.dataset_id).first()
+ dataset = db.session.get(Dataset, requeried_document.dataset_id)
if not dataset:
raise ValueError("no dataset found")
# get exist document_segment list and delete
- document_segments = (
- db.session.query(DocumentSegment)
- .filter_by(dataset_id=dataset.id, document_id=requeried_document.id)
- .all()
- )
+ document_segments = db.session.scalars(
+ select(DocumentSegment).where(
+ DocumentSegment.dataset_id == dataset.id,
+ DocumentSegment.document_id == requeried_document.id,
+ )
+ ).all()
for document_segment in document_segments:
db.session.delete(document_segment)
if requeried_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX:
# delete child chunks
- db.session.query(ChildChunk).where(ChildChunk.segment_id == document_segment.id).delete()
+ db.session.execute(delete(ChildChunk).where(ChildChunk.segment_id == document_segment.id))
db.session.commit()
# get the process rule
stmt = select(DatasetProcessRule).where(DatasetProcessRule.id == requeried_document.dataset_process_rule_id)
@@ -167,7 +168,7 @@ class IndexingRunner:
text_docs = self._extract(index_processor, requeried_document, processing_rule.to_dict())
# transform
- current_user = db.session.query(Account).filter_by(id=requeried_document.created_by).first()
+ current_user = db.session.get(Account, requeried_document.created_by)
if not current_user:
raise ValueError("no current user found")
current_user.set_tenant_id(dataset.tenant_id)
@@ -207,17 +208,18 @@ class IndexingRunner:
return
# get dataset
- dataset = db.session.query(Dataset).filter_by(id=requeried_document.dataset_id).first()
+ dataset = db.session.get(Dataset, requeried_document.dataset_id)
if not dataset:
raise ValueError("no dataset found")
# get exist document_segment list and delete
- document_segments = (
- db.session.query(DocumentSegment)
- .filter_by(dataset_id=dataset.id, document_id=requeried_document.id)
- .all()
- )
+ document_segments = db.session.scalars(
+ select(DocumentSegment).where(
+ DocumentSegment.dataset_id == dataset.id,
+ DocumentSegment.document_id == requeried_document.id,
+ )
+ ).all()
documents = []
if document_segments:
@@ -289,7 +291,7 @@ class IndexingRunner:
embedding_model_instance = None
if dataset_id:
- dataset = db.session.query(Dataset).filter_by(id=dataset_id).first()
+ dataset = db.session.get(Dataset, dataset_id)
if not dataset:
raise ValueError("Dataset not found.")
if IndexTechniqueType.HIGH_QUALITY in {dataset.indexing_technique, indexing_technique}:
@@ -652,24 +654,26 @@ class IndexingRunner:
@staticmethod
def _process_keyword_index(flask_app, dataset_id, document_id, documents):
with flask_app.app_context():
- dataset = db.session.query(Dataset).filter_by(id=dataset_id).first()
+ dataset = db.session.get(Dataset, dataset_id)
if not dataset:
raise ValueError("no dataset found")
keyword = Keyword(dataset)
keyword.create(documents)
if dataset.indexing_technique != IndexTechniqueType.HIGH_QUALITY:
document_ids = [document.metadata["doc_id"] for document in documents]
- db.session.query(DocumentSegment).where(
- DocumentSegment.document_id == document_id,
- DocumentSegment.dataset_id == dataset_id,
- DocumentSegment.index_node_id.in_(document_ids),
- DocumentSegment.status == SegmentStatus.INDEXING,
- ).update(
- {
- DocumentSegment.status: SegmentStatus.COMPLETED,
- DocumentSegment.enabled: True,
- DocumentSegment.completed_at: naive_utc_now(),
- }
+ db.session.execute(
+ update(DocumentSegment)
+ .where(
+ DocumentSegment.document_id == document_id,
+ DocumentSegment.dataset_id == dataset_id,
+ DocumentSegment.index_node_id.in_(document_ids),
+ DocumentSegment.status == SegmentStatus.INDEXING,
+ )
+ .values(
+ status=SegmentStatus.COMPLETED,
+ enabled=True,
+ completed_at=naive_utc_now(),
+ )
)
db.session.commit()
@@ -703,17 +707,19 @@ class IndexingRunner:
)
document_ids = [document.metadata["doc_id"] for document in chunk_documents]
- db.session.query(DocumentSegment).where(
- DocumentSegment.document_id == dataset_document.id,
- DocumentSegment.dataset_id == dataset.id,
- DocumentSegment.index_node_id.in_(document_ids),
- DocumentSegment.status == SegmentStatus.INDEXING,
- ).update(
- {
- DocumentSegment.status: SegmentStatus.COMPLETED,
- DocumentSegment.enabled: True,
- DocumentSegment.completed_at: naive_utc_now(),
- }
+ db.session.execute(
+ update(DocumentSegment)
+ .where(
+ DocumentSegment.document_id == dataset_document.id,
+ DocumentSegment.dataset_id == dataset.id,
+ DocumentSegment.index_node_id.in_(document_ids),
+ DocumentSegment.status == SegmentStatus.INDEXING,
+ )
+ .values(
+ status=SegmentStatus.COMPLETED,
+ enabled=True,
+ completed_at=naive_utc_now(),
+ )
)
db.session.commit()
@@ -734,10 +740,17 @@ class IndexingRunner:
"""
Update the document indexing status.
"""
- count = db.session.query(DatasetDocument).filter_by(id=document_id, is_paused=True).count()
+ count = (
+ db.session.scalar(
+ select(func.count())
+ .select_from(DatasetDocument)
+ .where(DatasetDocument.id == document_id, DatasetDocument.is_paused == True)
+ )
+ or 0
+ )
if count > 0:
raise DocumentIsPausedError()
- document = db.session.query(DatasetDocument).filter_by(id=document_id).first()
+ document = db.session.get(DatasetDocument, document_id)
if not document:
raise DocumentIsDeletedPausedError()
@@ -745,7 +758,7 @@ class IndexingRunner:
if extra_update_params:
update_params.update(extra_update_params)
- db.session.query(DatasetDocument).filter_by(id=document_id).update(update_params) # type: ignore
+ db.session.execute(update(DatasetDocument).where(DatasetDocument.id == document_id).values(update_params)) # type: ignore
db.session.commit()
@staticmethod
@@ -753,7 +766,9 @@ class IndexingRunner:
"""
Update the document segment by document id.
"""
- db.session.query(DocumentSegment).filter_by(document_id=dataset_document_id).update(update_params)
+ db.session.execute(
+ update(DocumentSegment).where(DocumentSegment.document_id == dataset_document_id).values(update_params)
+ )
db.session.commit()
def _transform(
diff --git a/api/core/mcp/client/sse_client.py b/api/core/mcp/client/sse_client.py
index 1de1d5a073..19d977c8e5 100644
--- a/api/core/mcp/client/sse_client.py
+++ b/api/core/mcp/client/sse_client.py
@@ -3,7 +3,7 @@ import queue
from collections.abc import Generator
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
-from typing import Any, TypeAlias, final
+from typing import Any, final
from urllib.parse import urljoin, urlparse
import httpx
@@ -33,9 +33,9 @@ class _StatusError:
# Type aliases for better readability
-ReadQueue: TypeAlias = queue.Queue[SessionMessage | Exception | None]
-WriteQueue: TypeAlias = queue.Queue[SessionMessage | Exception | None]
-StatusQueue: TypeAlias = queue.Queue[_StatusReady | _StatusError]
+type ReadQueue = queue.Queue[SessionMessage | Exception | None]
+type WriteQueue = queue.Queue[SessionMessage | Exception | None]
+type StatusQueue = queue.Queue[_StatusReady | _StatusError]
class SSETransport:
diff --git a/api/core/mcp/entities.py b/api/core/mcp/entities.py
index 08823daab1..d6d3a677c6 100644
--- a/api/core/mcp/entities.py
+++ b/api/core/mcp/entities.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
from enum import StrEnum
-from typing import Any, Generic, TypeVar
+from typing import Any, TypeVar
from pydantic import BaseModel
@@ -9,13 +9,12 @@ from core.mcp.types import LATEST_PROTOCOL_VERSION, OAuthClientInformation, OAut
SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", "2025-03-26", LATEST_PROTOCOL_VERSION]
-
SessionT = TypeVar("SessionT", bound=BaseSession[Any, Any, Any, Any, Any])
LifespanContextT = TypeVar("LifespanContextT")
@dataclass
-class RequestContext(Generic[SessionT, LifespanContextT]):
+class RequestContext[SessionT: BaseSession[Any, Any, Any, Any, Any], LifespanContextT]:
request_id: RequestId
meta: RequestParams.Meta | None
session: SessionT
diff --git a/api/core/mcp/server/streamable_http.py b/api/core/mcp/server/streamable_http.py
index 27000c947c..278add8cc9 100644
--- a/api/core/mcp/server/streamable_http.py
+++ b/api/core/mcp/server/streamable_http.py
@@ -260,4 +260,12 @@ def convert_input_form_to_parameters(
parameters[item.variable]["enum"] = item.options
elif item.type == VariableEntityType.NUMBER:
parameters[item.variable]["type"] = "number"
+ elif item.type == VariableEntityType.CHECKBOX:
+ parameters[item.variable]["type"] = "boolean"
+ elif item.type == VariableEntityType.JSON_OBJECT:
+ parameters[item.variable]["type"] = "object"
+ if item.json_schema:
+ for key in ("properties", "required", "additionalProperties"):
+ if key in item.json_schema:
+ parameters[item.variable][key] = item.json_schema[key]
return parameters, required
diff --git a/api/core/mcp/session/base_session.py b/api/core/mcp/session/base_session.py
index e1a40593e7..e50fd42198 100644
--- a/api/core/mcp/session/base_session.py
+++ b/api/core/mcp/session/base_session.py
@@ -4,7 +4,7 @@ from collections.abc import Callable
from concurrent.futures import Future, ThreadPoolExecutor, TimeoutError
from datetime import timedelta
from types import TracebackType
-from typing import Any, Generic, Self, TypeVar
+from typing import Any, Self, cast
from httpx import HTTPStatusError
from pydantic import BaseModel
@@ -34,16 +34,10 @@ from core.mcp.types import (
logger = logging.getLogger(__name__)
-SendRequestT = TypeVar("SendRequestT", ClientRequest, ServerRequest)
-SendResultT = TypeVar("SendResultT", ClientResult, ServerResult)
-SendNotificationT = TypeVar("SendNotificationT", ClientNotification, ServerNotification)
-ReceiveRequestT = TypeVar("ReceiveRequestT", ClientRequest, ServerRequest)
-ReceiveResultT = TypeVar("ReceiveResultT", bound=BaseModel)
-ReceiveNotificationT = TypeVar("ReceiveNotificationT", ClientNotification, ServerNotification)
DEFAULT_RESPONSE_READ_TIMEOUT = 1.0
-class RequestResponder(Generic[ReceiveRequestT, SendResultT]):
+class RequestResponder[ReceiveRequestT: ClientRequest | ServerRequest, SendResultT: ClientResult | ServerResult]:
"""Handles responding to MCP requests and manages request lifecycle.
This class MUST be used as a context manager to ensure proper cleanup and
@@ -60,7 +54,7 @@ class RequestResponder(Generic[ReceiveRequestT, SendResultT]):
"""
request: ReceiveRequestT
- _session: Any
+ _session: "BaseSession[Any, Any, SendResultT, ReceiveRequestT, Any]"
_on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any]
def __init__(
@@ -68,7 +62,7 @@ class RequestResponder(Generic[ReceiveRequestT, SendResultT]):
request_id: RequestId,
request_meta: RequestParams.Meta | None,
request: ReceiveRequestT,
- session: """BaseSession[SendRequestT, SendNotificationT, SendResultT, ReceiveRequestT, ReceiveNotificationT]""",
+ session: "BaseSession[Any, Any, SendResultT, ReceiveRequestT, Any]",
on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any],
):
self.request_id = request_id
@@ -111,7 +105,7 @@ class RequestResponder(Generic[ReceiveRequestT, SendResultT]):
self.completed = True
- self._session._send_response(request_id=self.request_id, response=response)
+ self._session.send_response(request_id=self.request_id, response=response)
def cancel(self):
"""Cancel this request and mark it as completed."""
@@ -120,21 +114,19 @@ class RequestResponder(Generic[ReceiveRequestT, SendResultT]):
self.completed = True # Mark as completed so it's removed from in_flight
# Send an error response to indicate cancellation
- self._session._send_response(
+ self._session.send_response(
request_id=self.request_id,
response=ErrorData(code=0, message="Request cancelled", data=None),
)
-class BaseSession(
- Generic[
- SendRequestT,
- SendNotificationT,
- SendResultT,
- ReceiveRequestT,
- ReceiveNotificationT,
- ],
-):
+class BaseSession[
+ SendRequestT: ClientRequest | ServerRequest,
+ SendNotificationT: ClientNotification | ServerNotification,
+ SendResultT: ClientResult | ServerResult,
+ ReceiveRequestT: ClientRequest | ServerRequest,
+ ReceiveNotificationT: ClientNotification | ServerNotification,
+]:
"""
Implements an MCP "session" on top of read/write streams, including features
like request/response linking, notifications, and progress.
@@ -204,13 +196,13 @@ class BaseSession(
# The receiver thread should have already exited due to the None message in the queue
self._executor.shutdown(wait=False)
- def send_request(
+ def send_request[T: BaseModel](
self,
request: SendRequestT,
- result_type: type[ReceiveResultT],
+ result_type: type[T],
request_read_timeout_seconds: timedelta | None = None,
metadata: MessageMetadata | None = None,
- ) -> ReceiveResultT:
+ ) -> T:
"""
Sends a request and wait for a response. Raises an McpError if the
response contains an error. If a request read timeout is provided, it
@@ -299,7 +291,7 @@ class BaseSession(
)
self._write_stream.put(session_message)
- def _send_response(self, request_id: RequestId, response: SendResultT | ErrorData):
+ def send_response(self, request_id: RequestId, response: SendResultT | ErrorData):
if isinstance(response, ErrorData):
jsonrpc_error = JSONRPCError(jsonrpc="2.0", id=request_id, error=response)
session_message = SessionMessage(message=JSONRPCMessage(jsonrpc_error))
@@ -346,6 +338,7 @@ class BaseSession(
validated_request = self._receive_request_type.model_validate(
message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
+ validated_request = cast(ReceiveRequestT, validated_request)
responder = RequestResponder[ReceiveRequestT, SendResultT](
request_id=message.message.root.id,
@@ -366,6 +359,7 @@ class BaseSession(
notification = self._receive_notification_type.model_validate(
message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
+ notification = cast(ReceiveNotificationT, notification)
# Handle cancellation notifications
if isinstance(notification.root, CancelledNotification):
cancelled_id = notification.root.params.requestId
diff --git a/api/core/mcp/types.py b/api/core/mcp/types.py
index 335c6a5cbc..2653d20a7d 100644
--- a/api/core/mcp/types.py
+++ b/api/core/mcp/types.py
@@ -1,6 +1,6 @@
from collections.abc import Callable
from dataclasses import dataclass
-from typing import Annotated, Any, Generic, Literal, TypeAlias, TypeVar
+from typing import Annotated, Any, Literal
from pydantic import BaseModel, ConfigDict, Field, FileUrl, RootModel
from pydantic.networks import AnyUrl, UrlConstraints
@@ -31,7 +31,7 @@ ProgressToken = str | int
Cursor = str
Role = Literal["user", "assistant"]
RequestId = Annotated[int | str, Field(union_mode="left_to_right")]
-AnyFunction: TypeAlias = Callable[..., Any]
+type AnyFunction = Callable[..., Any]
class RequestParams(BaseModel):
@@ -68,12 +68,7 @@ class NotificationParams(BaseModel):
"""
-RequestParamsT = TypeVar("RequestParamsT", bound=RequestParams | dict[str, Any] | None)
-NotificationParamsT = TypeVar("NotificationParamsT", bound=NotificationParams | dict[str, Any] | None)
-MethodT = TypeVar("MethodT", bound=str)
-
-
-class Request(BaseModel, Generic[RequestParamsT, MethodT]):
+class Request[RequestParamsT: RequestParams | dict[str, Any] | None, MethodT: str](BaseModel):
"""Base class for JSON-RPC requests."""
method: MethodT
@@ -81,14 +76,14 @@ class Request(BaseModel, Generic[RequestParamsT, MethodT]):
model_config = ConfigDict(extra="allow")
-class PaginatedRequest(Request[PaginatedRequestParams | None, MethodT], Generic[MethodT]):
+class PaginatedRequest[T: str](Request[PaginatedRequestParams | None, T]):
"""Base class for paginated requests,
matching the schema's PaginatedRequest interface."""
params: PaginatedRequestParams | None = None
-class Notification(BaseModel, Generic[NotificationParamsT, MethodT]):
+class Notification[NotificationParamsT: NotificationParams | dict[str, Any] | None, MethodT: str](BaseModel):
"""Base class for JSON-RPC notifications."""
method: MethodT
@@ -736,7 +731,7 @@ class ResourceLink(Resource):
ContentBlock = TextContent | ImageContent | AudioContent | ResourceLink | EmbeddedResource
"""A content block that can be used in prompts and tool results."""
-Content: TypeAlias = ContentBlock
+type Content = ContentBlock
# """DEPRECATED: Content is deprecated, you should use ContentBlock directly."""
diff --git a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py
index 902f58e6b7..66933cea28 100644
--- a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py
+++ b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py
@@ -38,6 +38,7 @@ from core.ops.entities.trace_entity import (
TraceTaskName,
WorkflowTraceInfo,
)
+from core.ops.utils import JSON_DICT_ADAPTER
from core.repositories import DifyCoreRepositoryFactory
from extensions.ext_database import db
from models.model import EndUser, MessageFile
@@ -469,7 +470,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
llm_attributes[SpanAttributes.LLM_PROVIDER] = trace_info.message_data.model_provider
if trace_info.message_data and trace_info.message_data.message_metadata:
- metadata_dict = json.loads(trace_info.message_data.message_metadata)
+ metadata_dict = JSON_DICT_ADAPTER.validate_json(trace_info.message_data.message_metadata)
if model_params := metadata_dict.get("model_parameters"):
llm_attributes[SpanAttributes.LLM_INVOCATION_PARAMETERS] = json.dumps(model_params)
diff --git a/api/core/ops/mlflow_trace/mlflow_trace.py b/api/core/ops/mlflow_trace/mlflow_trace.py
index 946d3cdd47..3d8c1dd038 100644
--- a/api/core/ops/mlflow_trace/mlflow_trace.py
+++ b/api/core/ops/mlflow_trace/mlflow_trace.py
@@ -1,4 +1,3 @@
-import json
import logging
import os
from datetime import datetime, timedelta
@@ -25,6 +24,7 @@ from core.ops.entities.trace_entity import (
TraceTaskName,
WorkflowTraceInfo,
)
+from core.ops.utils import JSON_DICT_ADAPTER
from extensions.ext_database import db
from models import EndUser
from models.workflow import WorkflowNodeExecutionModel
@@ -153,7 +153,7 @@ class MLflowDataTrace(BaseTraceInstance):
inputs = node.process_data # contains request URL
if not inputs:
- inputs = json.loads(node.inputs) if node.inputs else {}
+ inputs = JSON_DICT_ADAPTER.validate_json(node.inputs) if node.inputs else {}
node_span = start_span_no_context(
name=node.title,
@@ -180,7 +180,7 @@ class MLflowDataTrace(BaseTraceInstance):
# End node span
finished_at = node.created_at + timedelta(seconds=node.elapsed_time)
- outputs = json.loads(node.outputs) if node.outputs else {}
+ outputs = JSON_DICT_ADAPTER.validate_json(node.outputs) if node.outputs else {}
if node.node_type == BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL:
outputs = self._parse_knowledge_retrieval_outputs(outputs)
elif node.node_type == BuiltinNodeTypes.LLM:
@@ -216,8 +216,8 @@ class MLflowDataTrace(BaseTraceInstance):
return {}, {}
try:
- data = json.loads(node.process_data)
- except (json.JSONDecodeError, TypeError):
+ data = JSON_DICT_ADAPTER.validate_json(node.process_data)
+ except (ValueError, TypeError):
return {}, {}
inputs = self._parse_prompts(data.get("prompts"))
diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py
index 9c36d57c6f..fd235faf80 100644
--- a/api/core/ops/ops_trace_manager.py
+++ b/api/core/ops/ops_trace_manager.py
@@ -6,17 +6,19 @@ import queue
import threading
import time
from datetime import timedelta
-from typing import TYPE_CHECKING, Any, Optional, Union
+from typing import TYPE_CHECKING, Any, TypedDict
from uuid import UUID, uuid4
from cachetools import LRUCache
from flask import current_app
+from pydantic import TypeAdapter
from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
from core.helper.encrypter import batch_decrypt_token, encrypt_token, obfuscated_token
from core.ops.entities.config_entity import (
OPS_FILE_PATH,
+ BaseTracingConfig,
TracingProviderEnum,
)
from core.ops.entities.trace_entity import (
@@ -33,7 +35,7 @@ from core.ops.entities.trace_entity import (
WorkflowNodeTraceInfo,
WorkflowTraceInfo,
)
-from core.ops.utils import get_message_data
+from core.ops.utils import JSON_DICT_ADAPTER, get_message_data
from extensions.ext_database import db
from extensions.ext_storage import storage
from models.account import Tenant
@@ -50,6 +52,14 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
+class _AppTracingConfig(TypedDict, total=False):
+ enabled: bool
+ tracing_provider: str | None
+
+
+_app_tracing_config_adapter: TypeAdapter[_AppTracingConfig] = TypeAdapter(_AppTracingConfig)
+
+
def _lookup_app_and_workspace_names(app_id: str | None, tenant_id: str | None) -> tuple[str, str]:
"""Return (app_name, workspace_name) for the given IDs. Falls back to empty strings."""
app_name = ""
@@ -185,8 +195,15 @@ def _lookup_llm_credential_info(
return None, ""
-class OpsTraceProviderConfigMap(collections.UserDict[str, dict[str, Any]]):
- def __getitem__(self, provider: str) -> dict[str, Any]:
+class TracingProviderConfigEntry(TypedDict):
+ config_class: type[BaseTracingConfig]
+ secret_keys: list[str]
+ other_keys: list[str]
+ trace_instance: type[Any]
+
+
+class OpsTraceProviderConfigMap(collections.UserDict[str, TracingProviderConfigEntry]):
+ def __getitem__(self, provider: str) -> TracingProviderConfigEntry:
match provider:
case TracingProviderEnum.LANGFUSE:
from core.ops.entities.config_entity import LangfuseConfig
@@ -446,7 +463,7 @@ class OpsTraceManager:
@classmethod
def get_ops_trace_instance(
cls,
- app_id: Union[UUID, str] | None = None,
+ app_id: UUID | str | None = None,
):
"""
Get ops trace through model config
@@ -468,7 +485,7 @@ class OpsTraceManager:
if app is None:
return None
- app_ops_trace_config = json.loads(app.tracing) if app.tracing else None
+ app_ops_trace_config = _app_tracing_config_adapter.validate_json(app.tracing) if app.tracing else None
if app_ops_trace_config is None:
return None
if not app_ops_trace_config.get("enabled"):
@@ -560,7 +577,7 @@ class OpsTraceManager:
raise ValueError("App not found")
if not app.tracing:
return {"enabled": False, "tracing_provider": None}
- app_trace_config = json.loads(app.tracing)
+ app_trace_config = _app_tracing_config_adapter.validate_json(app.tracing)
return app_trace_config
@staticmethod
@@ -575,8 +592,8 @@ class OpsTraceManager:
provider_config_map[tracing_provider]["config_class"],
provider_config_map[tracing_provider]["trace_instance"],
)
- tracing_config = config_type(**tracing_config)
- return trace_instance(tracing_config).api_check()
+ config = config_type(**tracing_config)
+ return trace_instance(config).api_check()
@staticmethod
def get_trace_config_project_key(tracing_config: dict, tracing_provider: str):
@@ -590,8 +607,8 @@ class OpsTraceManager:
provider_config_map[tracing_provider]["config_class"],
provider_config_map[tracing_provider]["trace_instance"],
)
- tracing_config = config_type(**tracing_config)
- return trace_instance(tracing_config).get_project_key()
+ config = config_type(**tracing_config)
+ return trace_instance(config).get_project_key()
@staticmethod
def get_trace_config_project_url(tracing_config: dict, tracing_provider: str):
@@ -605,8 +622,8 @@ class OpsTraceManager:
provider_config_map[tracing_provider]["config_class"],
provider_config_map[tracing_provider]["trace_instance"],
)
- tracing_config = config_type(**tracing_config)
- return trace_instance(tracing_config).get_project_url()
+ config = config_type(**tracing_config)
+ return trace_instance(config).get_project_url()
class TraceTask:
@@ -636,7 +653,6 @@ class TraceTask:
carries ``total_tokens``. Projects only the ``outputs`` column to avoid loading
large JSON blobs unnecessarily.
"""
- import json
from models.workflow import WorkflowNodeExecutionModel
@@ -658,7 +674,7 @@ class TraceTask:
if not raw:
continue
try:
- outputs = json.loads(raw) if isinstance(raw, str) else raw
+ outputs = JSON_DICT_ADAPTER.validate_json(raw) if isinstance(raw, str) else raw
except (ValueError, TypeError):
continue
if not isinstance(outputs, dict):
@@ -700,7 +716,7 @@ class TraceTask:
self,
trace_type: Any,
message_id: str | None = None,
- workflow_execution: Optional["WorkflowExecution"] = None,
+ workflow_execution: "WorkflowExecution | None" = None,
conversation_id: str | None = None,
user_id: str | None = None,
timer: Any | None = None,
@@ -1420,7 +1436,7 @@ class TraceTask:
return {}
try:
- metadata = json.loads(message_data.message_metadata)
+ metadata = JSON_DICT_ADAPTER.validate_json(message_data.message_metadata)
usage = metadata.get("usage", {})
time_to_first_token = usage.get("time_to_first_token")
time_to_generate = usage.get("time_to_generate")
@@ -1430,7 +1446,7 @@ class TraceTask:
"llm_streaming_time_to_generate": time_to_generate,
"is_streaming_request": time_to_first_token is not None,
}
- except (json.JSONDecodeError, AttributeError):
+ except (ValueError, AttributeError):
return {}
diff --git a/api/core/ops/utils.py b/api/core/ops/utils.py
index 8b9a2e424a..a6f10c09ac 100644
--- a/api/core/ops/utils.py
+++ b/api/core/ops/utils.py
@@ -3,11 +3,14 @@ from datetime import datetime
from typing import Any, Union
from urllib.parse import urlparse
+from pydantic import TypeAdapter
from sqlalchemy import select
from models.engine import db
from models.model import Message
+JSON_DICT_ADAPTER: TypeAdapter[dict[str, Any]] = TypeAdapter(dict[str, Any])
+
def filter_none_values(data: dict[str, Any]) -> dict[str, Any]:
new_data = {}
diff --git a/api/core/plugin/backwards_invocation/base.py b/api/core/plugin/backwards_invocation/base.py
index a89b0f95be..85c2eb89b1 100644
--- a/api/core/plugin/backwards_invocation/base.py
+++ b/api/core/plugin/backwards_invocation/base.py
@@ -1,5 +1,4 @@
from collections.abc import Generator, Mapping
-from typing import Generic, TypeVar
from pydantic import BaseModel
@@ -19,9 +18,6 @@ class BaseBackwardsInvocation:
yield BaseBackwardsInvocationResponse(data=response).model_dump_json().encode()
-T = TypeVar("T", bound=dict | Mapping | str | bool | int | BaseModel)
-
-
-class BaseBackwardsInvocationResponse(BaseModel, Generic[T]):
+class BaseBackwardsInvocationResponse[T: dict | Mapping | str | bool | int | BaseModel](BaseModel):
data: T | None = None
error: str = ""
diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py
index 94263ec44e..b57180690e 100644
--- a/api/core/plugin/entities/plugin_daemon.py
+++ b/api/core/plugin/entities/plugin_daemon.py
@@ -4,7 +4,7 @@ import enum
from collections.abc import Mapping, Sequence
from datetime import datetime
from enum import StrEnum
-from typing import Any, Generic, TypeVar
+from typing import Any
from graphon.model_runtime.entities.model_entities import AIModelEntity
from graphon.model_runtime.entities.provider_entities import ProviderEntity
@@ -19,10 +19,8 @@ from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin
from core.trigger.entities.entities import TriggerProviderEntity
-T = TypeVar("T", bound=(BaseModel | dict | list | bool | str))
-
-class PluginDaemonBasicResponse(BaseModel, Generic[T]):
+class PluginDaemonBasicResponse[T: BaseModel | dict | list | bool | str](BaseModel):
"""
Basic response from plugin daemon.
"""
diff --git a/api/core/plugin/impl/base.py b/api/core/plugin/impl/base.py
index 2d0ab3fcd7..7f36560b49 100644
--- a/api/core/plugin/impl/base.py
+++ b/api/core/plugin/impl/base.py
@@ -2,7 +2,7 @@ import inspect
import json
import logging
from collections.abc import Callable, Generator
-from typing import Any, TypeVar, cast
+from typing import Any, cast
import httpx
from graphon.model_runtime.errors.invoke import (
@@ -17,6 +17,7 @@ from pydantic import BaseModel
from yarl import URL
from configs import dify_config
+from core.helper.http_client_pooling import get_pooled_http_client
from core.plugin.endpoint.exc import EndpointSetupFailedError
from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse, PluginDaemonError, PluginDaemonInnerError
from core.plugin.impl.exc import (
@@ -50,10 +51,13 @@ elif isinstance(_plugin_daemon_timeout_config, httpx.Timeout):
else:
plugin_daemon_request_timeout = httpx.Timeout(_plugin_daemon_timeout_config)
-T = TypeVar("T", bound=(BaseModel | dict[str, Any] | list[Any] | bool | str))
-
logger = logging.getLogger(__name__)
+_httpx_client: httpx.Client = get_pooled_http_client(
+ "plugin_daemon",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100), trust_env=False),
+)
+
class BasePluginClient:
def _request(
@@ -84,7 +88,7 @@ class BasePluginClient:
request_kwargs["content"] = prepared_data
try:
- response = httpx.request(**request_kwargs)
+ response = _httpx_client.request(**request_kwargs)
except httpx.RequestError:
logger.exception("Request to Plugin Daemon Service failed")
raise PluginDaemonInnerError(code=-500, message="Request to Plugin Daemon Service failed")
@@ -171,7 +175,7 @@ class BasePluginClient:
stream_kwargs["content"] = prepared_data
try:
- with httpx.stream(**stream_kwargs) as response:
+ with _httpx_client.stream(**stream_kwargs) as response:
for raw_line in response.iter_lines():
if not raw_line:
continue
@@ -185,7 +189,7 @@ class BasePluginClient:
logger.exception("Stream request to Plugin Daemon Service failed")
raise PluginDaemonInnerError(code=-500, message="Request to Plugin Daemon Service failed")
- def _stream_request_with_model(
+ def _stream_request_with_model[T: BaseModel | dict[str, Any] | list[Any] | bool | str](
self,
method: str,
path: str,
@@ -201,7 +205,7 @@ class BasePluginClient:
for line in self._stream_request(method, path, params, headers, data, files):
yield type_(**json.loads(line)) # type: ignore
- def _request_with_model(
+ def _request_with_model[T: BaseModel | dict[str, Any] | list[Any] | bool | str](
self,
method: str,
path: str,
@@ -217,7 +221,7 @@ class BasePluginClient:
response = self._request(method, path, headers, data, params, files)
return type_(**response.json()) # type: ignore[return-value]
- def _request_with_plugin_daemon_response(
+ def _request_with_plugin_daemon_response[T: BaseModel | dict[str, Any] | list[Any] | bool | str](
self,
method: str,
path: str,
@@ -272,7 +276,7 @@ class BasePluginClient:
return rep.data
- def _request_with_plugin_daemon_response_stream(
+ def _request_with_plugin_daemon_response_stream[T: BaseModel | dict[str, Any] | list[Any] | bool | str](
self,
method: str,
path: str,
diff --git a/api/core/plugin/utils/chunk_merger.py b/api/core/plugin/utils/chunk_merger.py
index 28cb70f96a..941d208205 100644
--- a/api/core/plugin/utils/chunk_merger.py
+++ b/api/core/plugin/utils/chunk_merger.py
@@ -1,12 +1,9 @@
from collections.abc import Generator
from dataclasses import dataclass, field
-from typing import TypeVar, Union
from core.agent.entities import AgentInvokeMessage
from core.tools.entities.tool_entities import ToolInvokeMessage
-MessageType = TypeVar("MessageType", bound=Union[ToolInvokeMessage, AgentInvokeMessage])
-
@dataclass
class FileChunk:
@@ -22,11 +19,11 @@ class FileChunk:
self.data = bytearray(self.total_length)
-def merge_blob_chunks(
- response: Generator[MessageType, None, None],
+def merge_blob_chunks[T: ToolInvokeMessage | AgentInvokeMessage](
+ response: Generator[T, None, None],
max_file_size: int = 30 * 1024 * 1024,
max_chunk_size: int = 8192,
-) -> Generator[MessageType, None, None]:
+) -> Generator[T, None, None]:
"""
Merge streaming blob chunks into complete blob messages.
diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py
index 30933239f6..5d536e0e32 100644
--- a/api/core/provider_manager.py
+++ b/api/core/provider_manager.py
@@ -306,7 +306,7 @@ class ProviderManager:
"""
stmt = select(TenantDefaultModel).where(
TenantDefaultModel.tenant_id == tenant_id,
- TenantDefaultModel.model_type == model_type.to_origin_model_type(),
+ TenantDefaultModel.model_type == model_type,
)
default_model = db.session.scalar(stmt)
@@ -324,7 +324,7 @@ class ProviderManager:
default_model = TenantDefaultModel(
tenant_id=tenant_id,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
provider_name=available_model.provider.provider,
model_name=available_model.model,
)
@@ -391,7 +391,7 @@ class ProviderManager:
raise ValueError(f"Model {model} does not exist.")
stmt = select(TenantDefaultModel).where(
TenantDefaultModel.tenant_id == tenant_id,
- TenantDefaultModel.model_type == model_type.to_origin_model_type(),
+ TenantDefaultModel.model_type == model_type,
)
default_model = db.session.scalar(stmt)
@@ -405,7 +405,7 @@ class ProviderManager:
# create default model
default_model = TenantDefaultModel(
tenant_id=tenant_id,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
provider_name=provider,
model_name=model,
)
@@ -626,9 +626,8 @@ class ProviderManager:
if provider_record.provider_type != ProviderType.SYSTEM:
continue
- provider_quota_to_provider_record_dict[ProviderQuotaType.value_of(provider_record.quota_type)] = (
- provider_record
- )
+ if provider_record.quota_type is not None:
+ provider_quota_to_provider_record_dict[provider_record.quota_type] = provider_record
for quota in configuration.quotas:
if quota.quota_type in (ProviderQuotaType.TRIAL, ProviderQuotaType.PAID):
@@ -641,7 +640,7 @@ class ProviderManager:
# TODO: Use provider name with prefix after the data migration.
provider_name=ModelProviderID(provider_name).provider_name,
provider_type=ProviderType.SYSTEM,
- quota_type=quota.quota_type,
+ quota_type=quota.quota_type, # type: ignore[arg-type]
quota_limit=0, # type: ignore
quota_used=0,
is_valid=True,
@@ -823,7 +822,7 @@ class ProviderManager:
custom_model_configurations.append(
CustomModelConfiguration(
model=provider_model_record.model_name,
- model_type=ModelType.value_of(provider_model_record.model_type),
+ model_type=provider_model_record.model_type,
credentials=provider_model_credentials,
current_credential_id=provider_model_record.credential_id,
current_credential_name=provider_model_record.credential_name,
@@ -921,9 +920,8 @@ class ProviderManager:
if provider_record.provider_type != ProviderType.SYSTEM:
continue
- quota_type_to_provider_records_dict[ProviderQuotaType.value_of(provider_record.quota_type)] = (
- provider_record
- )
+ if provider_record.quota_type is not None:
+ quota_type_to_provider_records_dict[provider_record.quota_type] = provider_record # type: ignore[index]
quota_configurations = []
if dify_config.EDITION == "CLOUD":
@@ -1203,7 +1201,7 @@ class ProviderManager:
model_settings.append(
ModelSettings(
model=provider_model_setting.model_name,
- model_type=ModelType.value_of(provider_model_setting.model_type),
+ model_type=provider_model_setting.model_type,
enabled=provider_model_setting.enabled,
load_balancing_enabled=provider_model_setting.load_balancing_enabled,
load_balancing_configs=load_balancing_configs if len(load_balancing_configs) > 1 else [],
diff --git a/api/core/rag/data_post_processor/data_post_processor.py b/api/core/rag/data_post_processor/data_post_processor.py
index b872ea8a8f..9ce91f52ff 100644
--- a/api/core/rag/data_post_processor/data_post_processor.py
+++ b/api/core/rag/data_post_processor/data_post_processor.py
@@ -1,6 +1,7 @@
+from typing import TypedDict
+
from graphon.model_runtime.entities.model_entities import ModelType
from graphon.model_runtime.errors.invoke import InvokeAuthorizationError
-from typing_extensions import TypedDict
from core.model_manager import ModelInstance, ModelManager
from core.rag.data_post_processor.reorder import ReorderRunner
diff --git a/api/core/rag/datasource/keyword/jieba/jieba.py b/api/core/rag/datasource/keyword/jieba/jieba.py
index b8d5db7a43..ed264878d3 100644
--- a/api/core/rag/datasource/keyword/jieba/jieba.py
+++ b/api/core/rag/datasource/keyword/jieba/jieba.py
@@ -1,10 +1,9 @@
from collections import defaultdict
-from typing import Any
+from typing import Any, TypedDict
import orjson
from pydantic import BaseModel
from sqlalchemy import select
-from typing_extensions import TypedDict
from configs import dify_config
from core.rag.datasource.keyword.jieba.jieba_keyword_table_handler import JiebaKeywordTableHandler
diff --git a/api/core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py b/api/core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py
index 57a60e6970..84f35c25f8 100644
--- a/api/core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py
+++ b/api/core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py
@@ -122,6 +122,6 @@ class JiebaKeywordTableHandler:
results.add(token)
sub_tokens = re.findall(r"\w+", token)
if len(sub_tokens) > 1:
- results.update({w for w in sub_tokens if w not in list(STOPWORDS)})
+ results.update({w for w in sub_tokens if w not in STOPWORDS})
return results
diff --git a/api/core/rag/datasource/keyword/jieba/stopwords.py b/api/core/rag/datasource/keyword/jieba/stopwords.py
index 54b65d9a2d..78ed1cf594 100644
--- a/api/core/rag/datasource/keyword/jieba/stopwords.py
+++ b/api/core/rag/datasource/keyword/jieba/stopwords.py
@@ -1,1370 +1,1372 @@
-STOPWORDS = {
- "during",
- "when",
- "but",
- "then",
- "further",
- "isn",
- "mustn't",
- "until",
- "own",
- "i",
- "couldn",
- "y",
- "only",
- "you've",
- "ours",
- "who",
- "where",
- "ourselves",
- "has",
- "to",
- "was",
- "didn't",
- "themselves",
- "if",
- "against",
- "through",
- "her",
- "an",
- "your",
- "can",
- "those",
- "didn",
- "about",
- "aren't",
- "shan't",
- "be",
- "not",
- "these",
- "again",
- "so",
- "t",
- "theirs",
- "weren",
- "won't",
- "won",
- "itself",
- "just",
- "same",
- "while",
- "why",
- "doesn",
- "aren",
- "him",
- "haven",
- "for",
- "you'll",
- "that",
- "we",
- "am",
- "d",
- "by",
- "having",
- "wasn't",
- "than",
- "weren't",
- "out",
- "from",
- "now",
- "their",
- "too",
- "hadn",
- "o",
- "needn",
- "most",
- "it",
- "under",
- "needn't",
- "any",
- "some",
- "few",
- "ll",
- "hers",
- "which",
- "m",
- "you're",
- "off",
- "other",
- "had",
- "she",
- "you'd",
- "do",
- "you",
- "does",
- "s",
- "will",
- "each",
- "wouldn't",
- "hasn't",
- "such",
- "more",
- "whom",
- "she's",
- "my",
- "yours",
- "yourself",
- "of",
- "on",
- "very",
- "hadn't",
- "with",
- "yourselves",
- "been",
- "ma",
- "them",
- "mightn't",
- "shan",
- "mustn",
- "they",
- "what",
- "both",
- "that'll",
- "how",
- "is",
- "he",
- "because",
- "down",
- "haven't",
- "are",
- "no",
- "it's",
- "our",
- "being",
- "the",
- "or",
- "above",
- "myself",
- "once",
- "don't",
- "doesn't",
- "as",
- "nor",
- "here",
- "herself",
- "hasn",
- "mightn",
- "have",
- "its",
- "all",
- "were",
- "ain",
- "this",
- "at",
- "after",
- "over",
- "shouldn't",
- "into",
- "before",
- "don",
- "wouldn",
- "re",
- "couldn't",
- "wasn",
- "in",
- "should",
- "there",
- "himself",
- "isn't",
- "should've",
- "doing",
- "ve",
- "shouldn",
- "a",
- "did",
- "and",
- "his",
- "between",
- "me",
- "up",
- "below",
- "人民",
- "末##末",
- "啊",
- "阿",
- "哎",
- "哎呀",
- "哎哟",
- "唉",
- "俺",
- "俺们",
- "按",
- "按照",
- "吧",
- "吧哒",
- "把",
- "罢了",
- "被",
- "本",
- "本着",
- "比",
- "比方",
- "比如",
- "鄙人",
- "彼",
- "彼此",
- "边",
- "别",
- "别的",
- "别说",
- "并",
- "并且",
- "不比",
- "不成",
- "不单",
- "不但",
- "不独",
- "不管",
- "不光",
- "不过",
- "不仅",
- "不拘",
- "不论",
- "不怕",
- "不然",
- "不如",
- "不特",
- "不惟",
- "不问",
- "不只",
- "朝",
- "朝着",
- "趁",
- "趁着",
- "乘",
- "冲",
- "除",
- "除此之外",
- "除非",
- "除了",
- "此",
- "此间",
- "此外",
- "从",
- "从而",
- "打",
- "待",
- "但",
- "但是",
- "当",
- "当着",
- "到",
- "得",
- "的",
- "的话",
- "等",
- "等等",
- "地",
- "第",
- "叮咚",
- "对",
- "对于",
- "多",
- "多少",
- "而",
- "而况",
- "而且",
- "而是",
- "而外",
- "而言",
- "而已",
- "尔后",
- "反过来",
- "反过来说",
- "反之",
- "非但",
- "非徒",
- "否则",
- "嘎",
- "嘎登",
- "该",
- "赶",
- "个",
- "各",
- "各个",
- "各位",
- "各种",
- "各自",
- "给",
- "根据",
- "跟",
- "故",
- "故此",
- "固然",
- "关于",
- "管",
- "归",
- "果然",
- "果真",
- "过",
- "哈",
- "哈哈",
- "呵",
- "和",
- "何",
- "何处",
- "何况",
- "何时",
- "嘿",
- "哼",
- "哼唷",
- "呼哧",
- "乎",
- "哗",
- "还是",
- "还有",
- "换句话说",
- "换言之",
- "或",
- "或是",
- "或者",
- "极了",
- "及",
- "及其",
- "及至",
- "即",
- "即便",
- "即或",
- "即令",
- "即若",
- "即使",
- "几",
- "几时",
- "己",
- "既",
- "既然",
- "既是",
- "继而",
- "加之",
- "假如",
- "假若",
- "假使",
- "鉴于",
- "将",
- "较",
- "较之",
- "叫",
- "接着",
- "结果",
- "借",
- "紧接着",
- "进而",
- "尽",
- "尽管",
- "经",
- "经过",
- "就",
- "就是",
- "就是说",
- "据",
- "具体地说",
- "具体说来",
- "开始",
- "开外",
- "靠",
- "咳",
- "可",
- "可见",
- "可是",
- "可以",
- "况且",
- "啦",
- "来",
- "来着",
- "离",
- "例如",
- "哩",
- "连",
- "连同",
- "两者",
- "了",
- "临",
- "另",
- "另外",
- "另一方面",
- "论",
- "嘛",
- "吗",
- "慢说",
- "漫说",
- "冒",
- "么",
- "每",
- "每当",
- "们",
- "莫若",
- "某",
- "某个",
- "某些",
- "拿",
- "哪",
- "哪边",
- "哪儿",
- "哪个",
- "哪里",
- "哪年",
- "哪怕",
- "哪天",
- "哪些",
- "哪样",
- "那",
- "那边",
- "那儿",
- "那个",
- "那会儿",
- "那里",
- "那么",
- "那么些",
- "那么样",
- "那时",
- "那些",
- "那样",
- "乃",
- "乃至",
- "呢",
- "能",
- "你",
- "你们",
- "您",
- "宁",
- "宁可",
- "宁肯",
- "宁愿",
- "哦",
- "呕",
- "啪达",
- "旁人",
- "呸",
- "凭",
- "凭借",
- "其",
- "其次",
- "其二",
- "其他",
- "其它",
- "其一",
- "其余",
- "其中",
- "起",
- "起见",
- "岂但",
- "恰恰相反",
- "前后",
- "前者",
- "且",
- "然而",
- "然后",
- "然则",
- "让",
- "人家",
- "任",
- "任何",
- "任凭",
- "如",
- "如此",
- "如果",
- "如何",
- "如其",
- "如若",
- "如上所述",
- "若",
- "若非",
- "若是",
- "啥",
- "上下",
- "尚且",
- "设若",
- "设使",
- "甚而",
- "甚么",
- "甚至",
- "省得",
- "时候",
- "什么",
- "什么样",
- "使得",
- "是",
- "是的",
- "首先",
- "谁",
- "谁知",
- "顺",
- "顺着",
- "似的",
- "虽",
- "虽然",
- "虽说",
- "虽则",
- "随",
- "随着",
- "所",
- "所以",
- "他",
- "他们",
- "他人",
- "它",
- "它们",
- "她",
- "她们",
- "倘",
- "倘或",
- "倘然",
- "倘若",
- "倘使",
- "腾",
- "替",
- "通过",
- "同",
- "同时",
- "哇",
- "万一",
- "往",
- "望",
- "为",
- "为何",
- "为了",
- "为什么",
- "为着",
- "喂",
- "嗡嗡",
- "我",
- "我们",
- "呜",
- "呜呼",
- "乌乎",
- "无论",
- "无宁",
- "毋宁",
- "嘻",
- "吓",
- "相对而言",
- "像",
- "向",
- "向着",
- "嘘",
- "呀",
- "焉",
- "沿",
- "沿着",
- "要",
- "要不",
- "要不然",
- "要不是",
- "要么",
- "要是",
- "也",
- "也罢",
- "也好",
- "一",
- "一般",
- "一旦",
- "一方面",
- "一来",
- "一切",
- "一样",
- "一则",
- "依",
- "依照",
- "矣",
- "以",
- "以便",
- "以及",
- "以免",
- "以至",
- "以至于",
- "以致",
- "抑或",
- "因",
- "因此",
- "因而",
- "因为",
- "哟",
- "用",
- "由",
- "由此可见",
- "由于",
- "有",
- "有的",
- "有关",
- "有些",
- "又",
- "于",
- "于是",
- "于是乎",
- "与",
- "与此同时",
- "与否",
- "与其",
- "越是",
- "云云",
- "哉",
- "再说",
- "再者",
- "在",
- "在下",
- "咱",
- "咱们",
- "则",
- "怎",
- "怎么",
- "怎么办",
- "怎么样",
- "怎样",
- "咋",
- "照",
- "照着",
- "者",
- "这",
- "这边",
- "这儿",
- "这个",
- "这会儿",
- "这就是说",
- "这里",
- "这么",
- "这么点儿",
- "这么些",
- "这么样",
- "这时",
- "这些",
- "这样",
- "正如",
- "吱",
- "之",
- "之类",
- "之所以",
- "之一",
- "只是",
- "只限",
- "只要",
- "只有",
- "至",
- "至于",
- "诸位",
- "着",
- "着呢",
- "自",
- "自从",
- "自个儿",
- "自各儿",
- "自己",
- "自家",
- "自身",
- "综上所述",
- "总的来看",
- "总的来说",
- "总的说来",
- "总而言之",
- "总之",
- "纵",
- "纵令",
- "纵然",
- "纵使",
- "遵照",
- "作为",
- "兮",
- "呃",
- "呗",
- "咚",
- "咦",
- "喏",
- "啐",
- "喔唷",
- "嗬",
- "嗯",
- "嗳",
- "~",
- "!",
- ".",
- ":",
- '"',
- "'",
- "(",
- ")",
- "*",
- "A",
- "白",
- "社会主义",
- "--",
- "..",
- ">>",
- " [",
- " ]",
- "",
- "<",
- ">",
- "/",
- "\\",
- "|",
- "-",
- "_",
- "+",
- "=",
- "&",
- "^",
- "%",
- "#",
- "@",
- "`",
- ";",
- "$",
- "(",
- ")",
- "——",
- "—",
- "¥",
- "·",
- "...",
- "‘",
- "’",
- "〉",
- "〈",
- "…",
- " ",
- "0",
- "1",
- "2",
- "3",
- "4",
- "5",
- "6",
- "7",
- "8",
- "9",
- "二",
- "三",
- "四",
- "五",
- "六",
- "七",
- "八",
- "九",
- "零",
- ">",
- "<",
- "@",
- "#",
- "$",
- "%",
- "︿",
- "&",
- "*",
- "+",
- "~",
- "|",
- "[",
- "]",
- "{",
- "}",
- "啊哈",
- "啊呀",
- "啊哟",
- "挨次",
- "挨个",
- "挨家挨户",
- "挨门挨户",
- "挨门逐户",
- "挨着",
- "按理",
- "按期",
- "按时",
- "按说",
- "暗地里",
- "暗中",
- "暗自",
- "昂然",
- "八成",
- "白白",
- "半",
- "梆",
- "保管",
- "保险",
- "饱",
- "背地里",
- "背靠背",
- "倍感",
- "倍加",
- "本人",
- "本身",
- "甭",
- "比起",
- "比如说",
- "比照",
- "毕竟",
- "必",
- "必定",
- "必将",
- "必须",
- "便",
- "别人",
- "并非",
- "并肩",
- "并没",
- "并没有",
- "并排",
- "并无",
- "勃然",
- "不",
- "不必",
- "不常",
- "不大",
- "不但...而且",
- "不得",
- "不得不",
- "不得了",
- "不得已",
- "不迭",
- "不定",
- "不对",
- "不妨",
- "不管怎样",
- "不会",
- "不仅...而且",
- "不仅仅",
- "不仅仅是",
- "不经意",
- "不可开交",
- "不可抗拒",
- "不力",
- "不了",
- "不料",
- "不满",
- "不免",
- "不能不",
- "不起",
- "不巧",
- "不然的话",
- "不日",
- "不少",
- "不胜",
- "不时",
- "不是",
- "不同",
- "不能",
- "不要",
- "不外",
- "不外乎",
- "不下",
- "不限",
- "不消",
- "不已",
- "不亦乐乎",
- "不由得",
- "不再",
- "不择手段",
- "不怎么",
- "不曾",
- "不知不觉",
- "不止",
- "不止一次",
- "不至于",
- "才",
- "才能",
- "策略地",
- "差不多",
- "差一点",
- "常",
- "常常",
- "常言道",
- "常言说",
- "常言说得好",
- "长此下去",
- "长话短说",
- "长期以来",
- "长线",
- "敞开儿",
- "彻夜",
- "陈年",
- "趁便",
- "趁机",
- "趁热",
- "趁势",
- "趁早",
- "成年",
- "成年累月",
- "成心",
- "乘机",
- "乘胜",
- "乘势",
- "乘隙",
- "乘虚",
- "诚然",
- "迟早",
- "充分",
- "充其极",
- "充其量",
- "抽冷子",
- "臭",
- "初",
- "出",
- "出来",
- "出去",
- "除此",
- "除此而外",
- "除此以外",
- "除开",
- "除去",
- "除却",
- "除外",
- "处处",
- "川流不息",
- "传",
- "传说",
- "传闻",
- "串行",
- "纯",
- "纯粹",
- "此后",
- "此中",
- "次第",
- "匆匆",
- "从不",
- "从此",
- "从此以后",
- "从古到今",
- "从古至今",
- "从今以后",
- "从宽",
- "从来",
- "从轻",
- "从速",
- "从头",
- "从未",
- "从无到有",
- "从小",
- "从新",
- "从严",
- "从优",
- "从早到晚",
- "从中",
- "从重",
- "凑巧",
- "粗",
- "存心",
- "达旦",
- "打从",
- "打开天窗说亮话",
- "大",
- "大不了",
- "大大",
- "大抵",
- "大都",
- "大多",
- "大凡",
- "大概",
- "大家",
- "大举",
- "大略",
- "大面儿上",
- "大事",
- "大体",
- "大体上",
- "大约",
- "大张旗鼓",
- "大致",
- "呆呆地",
- "带",
- "殆",
- "待到",
- "单",
- "单纯",
- "单单",
- "但愿",
- "弹指之间",
- "当场",
- "当儿",
- "当即",
- "当口儿",
- "当然",
- "当庭",
- "当头",
- "当下",
- "当真",
- "当中",
- "倒不如",
- "倒不如说",
- "倒是",
- "到处",
- "到底",
- "到了儿",
- "到目前为止",
- "到头",
- "到头来",
- "得起",
- "得天独厚",
- "的确",
- "等到",
- "叮当",
- "顶多",
- "定",
- "动不动",
- "动辄",
- "陡然",
- "都",
- "独",
- "独自",
- "断然",
- "顿时",
- "多次",
- "多多",
- "多多少少",
- "多多益善",
- "多亏",
- "多年来",
- "多年前",
- "而后",
- "而论",
- "而又",
- "尔等",
- "二话不说",
- "二话没说",
- "反倒",
- "反倒是",
- "反而",
- "反手",
- "反之亦然",
- "反之则",
- "方",
- "方才",
- "方能",
- "放量",
- "非常",
- "非得",
- "分期",
- "分期分批",
- "分头",
- "奋勇",
- "愤然",
- "风雨无阻",
- "逢",
- "弗",
- "甫",
- "嘎嘎",
- "该当",
- "概",
- "赶快",
- "赶早不赶晚",
- "敢",
- "敢情",
- "敢于",
- "刚",
- "刚才",
- "刚好",
- "刚巧",
- "高低",
- "格外",
- "隔日",
- "隔夜",
- "个人",
- "各式",
- "更",
- "更加",
- "更进一步",
- "更为",
- "公然",
- "共",
- "共总",
- "够瞧的",
- "姑且",
- "古来",
- "故而",
- "故意",
- "固",
- "怪",
- "怪不得",
- "惯常",
- "光",
- "光是",
- "归根到底",
- "归根结底",
- "过于",
- "毫不",
- "毫无",
- "毫无保留地",
- "毫无例外",
- "好在",
- "何必",
- "何尝",
- "何妨",
- "何苦",
- "何乐而不为",
- "何须",
- "何止",
- "很",
- "很多",
- "很少",
- "轰然",
- "后来",
- "呼啦",
- "忽地",
- "忽然",
- "互",
- "互相",
- "哗啦",
- "话说",
- "还",
- "恍然",
- "会",
- "豁然",
- "活",
- "伙同",
- "或多或少",
- "或许",
- "基本",
- "基本上",
- "基于",
- "极",
- "极大",
- "极度",
- "极端",
- "极力",
- "极其",
- "极为",
- "急匆匆",
- "即将",
- "即刻",
- "即是说",
- "几度",
- "几番",
- "几乎",
- "几经",
- "既...又",
- "继之",
- "加上",
- "加以",
- "间或",
- "简而言之",
- "简言之",
- "简直",
- "见",
- "将才",
- "将近",
- "将要",
- "交口",
- "较比",
- "较为",
- "接连不断",
- "接下来",
- "皆可",
- "截然",
- "截至",
- "藉以",
- "借此",
- "借以",
- "届时",
- "仅",
- "仅仅",
- "谨",
- "进来",
- "进去",
- "近",
- "近几年来",
- "近来",
- "近年来",
- "尽管如此",
- "尽可能",
- "尽快",
- "尽量",
- "尽然",
- "尽如人意",
- "尽心竭力",
- "尽心尽力",
- "尽早",
- "精光",
- "经常",
- "竟",
- "竟然",
- "究竟",
- "就此",
- "就地",
- "就算",
- "居然",
- "局外",
- "举凡",
- "据称",
- "据此",
- "据实",
- "据说",
- "据我所知",
- "据悉",
- "具体来说",
- "决不",
- "决非",
- "绝",
- "绝不",
- "绝顶",
- "绝对",
- "绝非",
- "均",
- "喀",
- "看",
- "看来",
- "看起来",
- "看上去",
- "看样子",
- "可好",
- "可能",
- "恐怕",
- "快",
- "快要",
- "来不及",
- "来得及",
- "来讲",
- "来看",
- "拦腰",
- "牢牢",
- "老",
- "老大",
- "老老实实",
- "老是",
- "累次",
- "累年",
- "理当",
- "理该",
- "理应",
- "历",
- "立",
- "立地",
- "立刻",
- "立马",
- "立时",
- "联袂",
- "连连",
- "连日",
- "连日来",
- "连声",
- "连袂",
- "临到",
- "另方面",
- "另行",
- "另一个",
- "路经",
- "屡",
- "屡次",
- "屡次三番",
- "屡屡",
- "缕缕",
- "率尔",
- "率然",
- "略",
- "略加",
- "略微",
- "略为",
- "论说",
- "马上",
- "蛮",
- "满",
- "没",
- "没有",
- "每逢",
- "每每",
- "每时每刻",
- "猛然",
- "猛然间",
- "莫",
- "莫不",
- "莫非",
- "莫如",
- "默默地",
- "默然",
- "呐",
- "那末",
- "奈",
- "难道",
- "难得",
- "难怪",
- "难说",
- "内",
- "年复一年",
- "凝神",
- "偶而",
- "偶尔",
- "怕",
- "砰",
- "碰巧",
- "譬如",
- "偏偏",
- "乒",
- "平素",
- "颇",
- "迫于",
- "扑通",
- "其后",
- "其实",
- "奇",
- "齐",
- "起初",
- "起来",
- "起首",
- "起头",
- "起先",
- "岂",
- "岂非",
- "岂止",
- "迄",
- "恰逢",
- "恰好",
- "恰恰",
- "恰巧",
- "恰如",
- "恰似",
- "千",
- "千万",
- "千万千万",
- "切",
- "切不可",
- "切莫",
- "切切",
- "切勿",
- "窃",
- "亲口",
- "亲身",
- "亲手",
- "亲眼",
- "亲自",
- "顷",
- "顷刻",
- "顷刻间",
- "顷刻之间",
- "请勿",
- "穷年累月",
- "取道",
- "去",
- "权时",
- "全都",
- "全力",
- "全年",
- "全然",
- "全身心",
- "然",
- "人人",
- "仍",
- "仍旧",
- "仍然",
- "日复一日",
- "日见",
- "日渐",
- "日益",
- "日臻",
- "如常",
- "如此等等",
- "如次",
- "如今",
- "如期",
- "如前所述",
- "如上",
- "如下",
- "汝",
- "三番两次",
- "三番五次",
- "三天两头",
- "瑟瑟",
- "沙沙",
- "上",
- "上来",
- "上去",
- "一个",
- "月",
- "日",
- "\n",
-}
+STOPWORDS: frozenset[str] = frozenset(
+ (
+ "during",
+ "when",
+ "but",
+ "then",
+ "further",
+ "isn",
+ "mustn't",
+ "until",
+ "own",
+ "i",
+ "couldn",
+ "y",
+ "only",
+ "you've",
+ "ours",
+ "who",
+ "where",
+ "ourselves",
+ "has",
+ "to",
+ "was",
+ "didn't",
+ "themselves",
+ "if",
+ "against",
+ "through",
+ "her",
+ "an",
+ "your",
+ "can",
+ "those",
+ "didn",
+ "about",
+ "aren't",
+ "shan't",
+ "be",
+ "not",
+ "these",
+ "again",
+ "so",
+ "t",
+ "theirs",
+ "weren",
+ "won't",
+ "won",
+ "itself",
+ "just",
+ "same",
+ "while",
+ "why",
+ "doesn",
+ "aren",
+ "him",
+ "haven",
+ "for",
+ "you'll",
+ "that",
+ "we",
+ "am",
+ "d",
+ "by",
+ "having",
+ "wasn't",
+ "than",
+ "weren't",
+ "out",
+ "from",
+ "now",
+ "their",
+ "too",
+ "hadn",
+ "o",
+ "needn",
+ "most",
+ "it",
+ "under",
+ "needn't",
+ "any",
+ "some",
+ "few",
+ "ll",
+ "hers",
+ "which",
+ "m",
+ "you're",
+ "off",
+ "other",
+ "had",
+ "she",
+ "you'd",
+ "do",
+ "you",
+ "does",
+ "s",
+ "will",
+ "each",
+ "wouldn't",
+ "hasn't",
+ "such",
+ "more",
+ "whom",
+ "she's",
+ "my",
+ "yours",
+ "yourself",
+ "of",
+ "on",
+ "very",
+ "hadn't",
+ "with",
+ "yourselves",
+ "been",
+ "ma",
+ "them",
+ "mightn't",
+ "shan",
+ "mustn",
+ "they",
+ "what",
+ "both",
+ "that'll",
+ "how",
+ "is",
+ "he",
+ "because",
+ "down",
+ "haven't",
+ "are",
+ "no",
+ "it's",
+ "our",
+ "being",
+ "the",
+ "or",
+ "above",
+ "myself",
+ "once",
+ "don't",
+ "doesn't",
+ "as",
+ "nor",
+ "here",
+ "herself",
+ "hasn",
+ "mightn",
+ "have",
+ "its",
+ "all",
+ "were",
+ "ain",
+ "this",
+ "at",
+ "after",
+ "over",
+ "shouldn't",
+ "into",
+ "before",
+ "don",
+ "wouldn",
+ "re",
+ "couldn't",
+ "wasn",
+ "in",
+ "should",
+ "there",
+ "himself",
+ "isn't",
+ "should've",
+ "doing",
+ "ve",
+ "shouldn",
+ "a",
+ "did",
+ "and",
+ "his",
+ "between",
+ "me",
+ "up",
+ "below",
+ "人民",
+ "末##末",
+ "啊",
+ "阿",
+ "哎",
+ "哎呀",
+ "哎哟",
+ "唉",
+ "俺",
+ "俺们",
+ "按",
+ "按照",
+ "吧",
+ "吧哒",
+ "把",
+ "罢了",
+ "被",
+ "本",
+ "本着",
+ "比",
+ "比方",
+ "比如",
+ "鄙人",
+ "彼",
+ "彼此",
+ "边",
+ "别",
+ "别的",
+ "别说",
+ "并",
+ "并且",
+ "不比",
+ "不成",
+ "不单",
+ "不但",
+ "不独",
+ "不管",
+ "不光",
+ "不过",
+ "不仅",
+ "不拘",
+ "不论",
+ "不怕",
+ "不然",
+ "不如",
+ "不特",
+ "不惟",
+ "不问",
+ "不只",
+ "朝",
+ "朝着",
+ "趁",
+ "趁着",
+ "乘",
+ "冲",
+ "除",
+ "除此之外",
+ "除非",
+ "除了",
+ "此",
+ "此间",
+ "此外",
+ "从",
+ "从而",
+ "打",
+ "待",
+ "但",
+ "但是",
+ "当",
+ "当着",
+ "到",
+ "得",
+ "的",
+ "的话",
+ "等",
+ "等等",
+ "地",
+ "第",
+ "叮咚",
+ "对",
+ "对于",
+ "多",
+ "多少",
+ "而",
+ "而况",
+ "而且",
+ "而是",
+ "而外",
+ "而言",
+ "而已",
+ "尔后",
+ "反过来",
+ "反过来说",
+ "反之",
+ "非但",
+ "非徒",
+ "否则",
+ "嘎",
+ "嘎登",
+ "该",
+ "赶",
+ "个",
+ "各",
+ "各个",
+ "各位",
+ "各种",
+ "各自",
+ "给",
+ "根据",
+ "跟",
+ "故",
+ "故此",
+ "固然",
+ "关于",
+ "管",
+ "归",
+ "果然",
+ "果真",
+ "过",
+ "哈",
+ "哈哈",
+ "呵",
+ "和",
+ "何",
+ "何处",
+ "何况",
+ "何时",
+ "嘿",
+ "哼",
+ "哼唷",
+ "呼哧",
+ "乎",
+ "哗",
+ "还是",
+ "还有",
+ "换句话说",
+ "换言之",
+ "或",
+ "或是",
+ "或者",
+ "极了",
+ "及",
+ "及其",
+ "及至",
+ "即",
+ "即便",
+ "即或",
+ "即令",
+ "即若",
+ "即使",
+ "几",
+ "几时",
+ "己",
+ "既",
+ "既然",
+ "既是",
+ "继而",
+ "加之",
+ "假如",
+ "假若",
+ "假使",
+ "鉴于",
+ "将",
+ "较",
+ "较之",
+ "叫",
+ "接着",
+ "结果",
+ "借",
+ "紧接着",
+ "进而",
+ "尽",
+ "尽管",
+ "经",
+ "经过",
+ "就",
+ "就是",
+ "就是说",
+ "据",
+ "具体地说",
+ "具体说来",
+ "开始",
+ "开外",
+ "靠",
+ "咳",
+ "可",
+ "可见",
+ "可是",
+ "可以",
+ "况且",
+ "啦",
+ "来",
+ "来着",
+ "离",
+ "例如",
+ "哩",
+ "连",
+ "连同",
+ "两者",
+ "了",
+ "临",
+ "另",
+ "另外",
+ "另一方面",
+ "论",
+ "嘛",
+ "吗",
+ "慢说",
+ "漫说",
+ "冒",
+ "么",
+ "每",
+ "每当",
+ "们",
+ "莫若",
+ "某",
+ "某个",
+ "某些",
+ "拿",
+ "哪",
+ "哪边",
+ "哪儿",
+ "哪个",
+ "哪里",
+ "哪年",
+ "哪怕",
+ "哪天",
+ "哪些",
+ "哪样",
+ "那",
+ "那边",
+ "那儿",
+ "那个",
+ "那会儿",
+ "那里",
+ "那么",
+ "那么些",
+ "那么样",
+ "那时",
+ "那些",
+ "那样",
+ "乃",
+ "乃至",
+ "呢",
+ "能",
+ "你",
+ "你们",
+ "您",
+ "宁",
+ "宁可",
+ "宁肯",
+ "宁愿",
+ "哦",
+ "呕",
+ "啪达",
+ "旁人",
+ "呸",
+ "凭",
+ "凭借",
+ "其",
+ "其次",
+ "其二",
+ "其他",
+ "其它",
+ "其一",
+ "其余",
+ "其中",
+ "起",
+ "起见",
+ "岂但",
+ "恰恰相反",
+ "前后",
+ "前者",
+ "且",
+ "然而",
+ "然后",
+ "然则",
+ "让",
+ "人家",
+ "任",
+ "任何",
+ "任凭",
+ "如",
+ "如此",
+ "如果",
+ "如何",
+ "如其",
+ "如若",
+ "如上所述",
+ "若",
+ "若非",
+ "若是",
+ "啥",
+ "上下",
+ "尚且",
+ "设若",
+ "设使",
+ "甚而",
+ "甚么",
+ "甚至",
+ "省得",
+ "时候",
+ "什么",
+ "什么样",
+ "使得",
+ "是",
+ "是的",
+ "首先",
+ "谁",
+ "谁知",
+ "顺",
+ "顺着",
+ "似的",
+ "虽",
+ "虽然",
+ "虽说",
+ "虽则",
+ "随",
+ "随着",
+ "所",
+ "所以",
+ "他",
+ "他们",
+ "他人",
+ "它",
+ "它们",
+ "她",
+ "她们",
+ "倘",
+ "倘或",
+ "倘然",
+ "倘若",
+ "倘使",
+ "腾",
+ "替",
+ "通过",
+ "同",
+ "同时",
+ "哇",
+ "万一",
+ "往",
+ "望",
+ "为",
+ "为何",
+ "为了",
+ "为什么",
+ "为着",
+ "喂",
+ "嗡嗡",
+ "我",
+ "我们",
+ "呜",
+ "呜呼",
+ "乌乎",
+ "无论",
+ "无宁",
+ "毋宁",
+ "嘻",
+ "吓",
+ "相对而言",
+ "像",
+ "向",
+ "向着",
+ "嘘",
+ "呀",
+ "焉",
+ "沿",
+ "沿着",
+ "要",
+ "要不",
+ "要不然",
+ "要不是",
+ "要么",
+ "要是",
+ "也",
+ "也罢",
+ "也好",
+ "一",
+ "一般",
+ "一旦",
+ "一方面",
+ "一来",
+ "一切",
+ "一样",
+ "一则",
+ "依",
+ "依照",
+ "矣",
+ "以",
+ "以便",
+ "以及",
+ "以免",
+ "以至",
+ "以至于",
+ "以致",
+ "抑或",
+ "因",
+ "因此",
+ "因而",
+ "因为",
+ "哟",
+ "用",
+ "由",
+ "由此可见",
+ "由于",
+ "有",
+ "有的",
+ "有关",
+ "有些",
+ "又",
+ "于",
+ "于是",
+ "于是乎",
+ "与",
+ "与此同时",
+ "与否",
+ "与其",
+ "越是",
+ "云云",
+ "哉",
+ "再说",
+ "再者",
+ "在",
+ "在下",
+ "咱",
+ "咱们",
+ "则",
+ "怎",
+ "怎么",
+ "怎么办",
+ "怎么样",
+ "怎样",
+ "咋",
+ "照",
+ "照着",
+ "者",
+ "这",
+ "这边",
+ "这儿",
+ "这个",
+ "这会儿",
+ "这就是说",
+ "这里",
+ "这么",
+ "这么点儿",
+ "这么些",
+ "这么样",
+ "这时",
+ "这些",
+ "这样",
+ "正如",
+ "吱",
+ "之",
+ "之类",
+ "之所以",
+ "之一",
+ "只是",
+ "只限",
+ "只要",
+ "只有",
+ "至",
+ "至于",
+ "诸位",
+ "着",
+ "着呢",
+ "自",
+ "自从",
+ "自个儿",
+ "自各儿",
+ "自己",
+ "自家",
+ "自身",
+ "综上所述",
+ "总的来看",
+ "总的来说",
+ "总的说来",
+ "总而言之",
+ "总之",
+ "纵",
+ "纵令",
+ "纵然",
+ "纵使",
+ "遵照",
+ "作为",
+ "兮",
+ "呃",
+ "呗",
+ "咚",
+ "咦",
+ "喏",
+ "啐",
+ "喔唷",
+ "嗬",
+ "嗯",
+ "嗳",
+ "~",
+ "!",
+ ".",
+ ":",
+ '"',
+ "'",
+ "(",
+ ")",
+ "*",
+ "A",
+ "白",
+ "社会主义",
+ "--",
+ "..",
+ ">>",
+ " [",
+ " ]",
+ "",
+ "<",
+ ">",
+ "/",
+ "\\",
+ "|",
+ "-",
+ "_",
+ "+",
+ "=",
+ "&",
+ "^",
+ "%",
+ "#",
+ "@",
+ "`",
+ ";",
+ "$",
+ "(",
+ ")",
+ "——",
+ "—",
+ "¥",
+ "·",
+ "...",
+ "‘",
+ "’",
+ "〉",
+ "〈",
+ "…",
+ " ",
+ "0",
+ "1",
+ "2",
+ "3",
+ "4",
+ "5",
+ "6",
+ "7",
+ "8",
+ "9",
+ "二",
+ "三",
+ "四",
+ "五",
+ "六",
+ "七",
+ "八",
+ "九",
+ "零",
+ ">",
+ "<",
+ "@",
+ "#",
+ "$",
+ "%",
+ "︿",
+ "&",
+ "*",
+ "+",
+ "~",
+ "|",
+ "[",
+ "]",
+ "{",
+ "}",
+ "啊哈",
+ "啊呀",
+ "啊哟",
+ "挨次",
+ "挨个",
+ "挨家挨户",
+ "挨门挨户",
+ "挨门逐户",
+ "挨着",
+ "按理",
+ "按期",
+ "按时",
+ "按说",
+ "暗地里",
+ "暗中",
+ "暗自",
+ "昂然",
+ "八成",
+ "白白",
+ "半",
+ "梆",
+ "保管",
+ "保险",
+ "饱",
+ "背地里",
+ "背靠背",
+ "倍感",
+ "倍加",
+ "本人",
+ "本身",
+ "甭",
+ "比起",
+ "比如说",
+ "比照",
+ "毕竟",
+ "必",
+ "必定",
+ "必将",
+ "必须",
+ "便",
+ "别人",
+ "并非",
+ "并肩",
+ "并没",
+ "并没有",
+ "并排",
+ "并无",
+ "勃然",
+ "不",
+ "不必",
+ "不常",
+ "不大",
+ "不但...而且",
+ "不得",
+ "不得不",
+ "不得了",
+ "不得已",
+ "不迭",
+ "不定",
+ "不对",
+ "不妨",
+ "不管怎样",
+ "不会",
+ "不仅...而且",
+ "不仅仅",
+ "不仅仅是",
+ "不经意",
+ "不可开交",
+ "不可抗拒",
+ "不力",
+ "不了",
+ "不料",
+ "不满",
+ "不免",
+ "不能不",
+ "不起",
+ "不巧",
+ "不然的话",
+ "不日",
+ "不少",
+ "不胜",
+ "不时",
+ "不是",
+ "不同",
+ "不能",
+ "不要",
+ "不外",
+ "不外乎",
+ "不下",
+ "不限",
+ "不消",
+ "不已",
+ "不亦乐乎",
+ "不由得",
+ "不再",
+ "不择手段",
+ "不怎么",
+ "不曾",
+ "不知不觉",
+ "不止",
+ "不止一次",
+ "不至于",
+ "才",
+ "才能",
+ "策略地",
+ "差不多",
+ "差一点",
+ "常",
+ "常常",
+ "常言道",
+ "常言说",
+ "常言说得好",
+ "长此下去",
+ "长话短说",
+ "长期以来",
+ "长线",
+ "敞开儿",
+ "彻夜",
+ "陈年",
+ "趁便",
+ "趁机",
+ "趁热",
+ "趁势",
+ "趁早",
+ "成年",
+ "成年累月",
+ "成心",
+ "乘机",
+ "乘胜",
+ "乘势",
+ "乘隙",
+ "乘虚",
+ "诚然",
+ "迟早",
+ "充分",
+ "充其极",
+ "充其量",
+ "抽冷子",
+ "臭",
+ "初",
+ "出",
+ "出来",
+ "出去",
+ "除此",
+ "除此而外",
+ "除此以外",
+ "除开",
+ "除去",
+ "除却",
+ "除外",
+ "处处",
+ "川流不息",
+ "传",
+ "传说",
+ "传闻",
+ "串行",
+ "纯",
+ "纯粹",
+ "此后",
+ "此中",
+ "次第",
+ "匆匆",
+ "从不",
+ "从此",
+ "从此以后",
+ "从古到今",
+ "从古至今",
+ "从今以后",
+ "从宽",
+ "从来",
+ "从轻",
+ "从速",
+ "从头",
+ "从未",
+ "从无到有",
+ "从小",
+ "从新",
+ "从严",
+ "从优",
+ "从早到晚",
+ "从中",
+ "从重",
+ "凑巧",
+ "粗",
+ "存心",
+ "达旦",
+ "打从",
+ "打开天窗说亮话",
+ "大",
+ "大不了",
+ "大大",
+ "大抵",
+ "大都",
+ "大多",
+ "大凡",
+ "大概",
+ "大家",
+ "大举",
+ "大略",
+ "大面儿上",
+ "大事",
+ "大体",
+ "大体上",
+ "大约",
+ "大张旗鼓",
+ "大致",
+ "呆呆地",
+ "带",
+ "殆",
+ "待到",
+ "单",
+ "单纯",
+ "单单",
+ "但愿",
+ "弹指之间",
+ "当场",
+ "当儿",
+ "当即",
+ "当口儿",
+ "当然",
+ "当庭",
+ "当头",
+ "当下",
+ "当真",
+ "当中",
+ "倒不如",
+ "倒不如说",
+ "倒是",
+ "到处",
+ "到底",
+ "到了儿",
+ "到目前为止",
+ "到头",
+ "到头来",
+ "得起",
+ "得天独厚",
+ "的确",
+ "等到",
+ "叮当",
+ "顶多",
+ "定",
+ "动不动",
+ "动辄",
+ "陡然",
+ "都",
+ "独",
+ "独自",
+ "断然",
+ "顿时",
+ "多次",
+ "多多",
+ "多多少少",
+ "多多益善",
+ "多亏",
+ "多年来",
+ "多年前",
+ "而后",
+ "而论",
+ "而又",
+ "尔等",
+ "二话不说",
+ "二话没说",
+ "反倒",
+ "反倒是",
+ "反而",
+ "反手",
+ "反之亦然",
+ "反之则",
+ "方",
+ "方才",
+ "方能",
+ "放量",
+ "非常",
+ "非得",
+ "分期",
+ "分期分批",
+ "分头",
+ "奋勇",
+ "愤然",
+ "风雨无阻",
+ "逢",
+ "弗",
+ "甫",
+ "嘎嘎",
+ "该当",
+ "概",
+ "赶快",
+ "赶早不赶晚",
+ "敢",
+ "敢情",
+ "敢于",
+ "刚",
+ "刚才",
+ "刚好",
+ "刚巧",
+ "高低",
+ "格外",
+ "隔日",
+ "隔夜",
+ "个人",
+ "各式",
+ "更",
+ "更加",
+ "更进一步",
+ "更为",
+ "公然",
+ "共",
+ "共总",
+ "够瞧的",
+ "姑且",
+ "古来",
+ "故而",
+ "故意",
+ "固",
+ "怪",
+ "怪不得",
+ "惯常",
+ "光",
+ "光是",
+ "归根到底",
+ "归根结底",
+ "过于",
+ "毫不",
+ "毫无",
+ "毫无保留地",
+ "毫无例外",
+ "好在",
+ "何必",
+ "何尝",
+ "何妨",
+ "何苦",
+ "何乐而不为",
+ "何须",
+ "何止",
+ "很",
+ "很多",
+ "很少",
+ "轰然",
+ "后来",
+ "呼啦",
+ "忽地",
+ "忽然",
+ "互",
+ "互相",
+ "哗啦",
+ "话说",
+ "还",
+ "恍然",
+ "会",
+ "豁然",
+ "活",
+ "伙同",
+ "或多或少",
+ "或许",
+ "基本",
+ "基本上",
+ "基于",
+ "极",
+ "极大",
+ "极度",
+ "极端",
+ "极力",
+ "极其",
+ "极为",
+ "急匆匆",
+ "即将",
+ "即刻",
+ "即是说",
+ "几度",
+ "几番",
+ "几乎",
+ "几经",
+ "既...又",
+ "继之",
+ "加上",
+ "加以",
+ "间或",
+ "简而言之",
+ "简言之",
+ "简直",
+ "见",
+ "将才",
+ "将近",
+ "将要",
+ "交口",
+ "较比",
+ "较为",
+ "接连不断",
+ "接下来",
+ "皆可",
+ "截然",
+ "截至",
+ "藉以",
+ "借此",
+ "借以",
+ "届时",
+ "仅",
+ "仅仅",
+ "谨",
+ "进来",
+ "进去",
+ "近",
+ "近几年来",
+ "近来",
+ "近年来",
+ "尽管如此",
+ "尽可能",
+ "尽快",
+ "尽量",
+ "尽然",
+ "尽如人意",
+ "尽心竭力",
+ "尽心尽力",
+ "尽早",
+ "精光",
+ "经常",
+ "竟",
+ "竟然",
+ "究竟",
+ "就此",
+ "就地",
+ "就算",
+ "居然",
+ "局外",
+ "举凡",
+ "据称",
+ "据此",
+ "据实",
+ "据说",
+ "据我所知",
+ "据悉",
+ "具体来说",
+ "决不",
+ "决非",
+ "绝",
+ "绝不",
+ "绝顶",
+ "绝对",
+ "绝非",
+ "均",
+ "喀",
+ "看",
+ "看来",
+ "看起来",
+ "看上去",
+ "看样子",
+ "可好",
+ "可能",
+ "恐怕",
+ "快",
+ "快要",
+ "来不及",
+ "来得及",
+ "来讲",
+ "来看",
+ "拦腰",
+ "牢牢",
+ "老",
+ "老大",
+ "老老实实",
+ "老是",
+ "累次",
+ "累年",
+ "理当",
+ "理该",
+ "理应",
+ "历",
+ "立",
+ "立地",
+ "立刻",
+ "立马",
+ "立时",
+ "联袂",
+ "连连",
+ "连日",
+ "连日来",
+ "连声",
+ "连袂",
+ "临到",
+ "另方面",
+ "另行",
+ "另一个",
+ "路经",
+ "屡",
+ "屡次",
+ "屡次三番",
+ "屡屡",
+ "缕缕",
+ "率尔",
+ "率然",
+ "略",
+ "略加",
+ "略微",
+ "略为",
+ "论说",
+ "马上",
+ "蛮",
+ "满",
+ "没",
+ "没有",
+ "每逢",
+ "每每",
+ "每时每刻",
+ "猛然",
+ "猛然间",
+ "莫",
+ "莫不",
+ "莫非",
+ "莫如",
+ "默默地",
+ "默然",
+ "呐",
+ "那末",
+ "奈",
+ "难道",
+ "难得",
+ "难怪",
+ "难说",
+ "内",
+ "年复一年",
+ "凝神",
+ "偶而",
+ "偶尔",
+ "怕",
+ "砰",
+ "碰巧",
+ "譬如",
+ "偏偏",
+ "乒",
+ "平素",
+ "颇",
+ "迫于",
+ "扑通",
+ "其后",
+ "其实",
+ "奇",
+ "齐",
+ "起初",
+ "起来",
+ "起首",
+ "起头",
+ "起先",
+ "岂",
+ "岂非",
+ "岂止",
+ "迄",
+ "恰逢",
+ "恰好",
+ "恰恰",
+ "恰巧",
+ "恰如",
+ "恰似",
+ "千",
+ "千万",
+ "千万千万",
+ "切",
+ "切不可",
+ "切莫",
+ "切切",
+ "切勿",
+ "窃",
+ "亲口",
+ "亲身",
+ "亲手",
+ "亲眼",
+ "亲自",
+ "顷",
+ "顷刻",
+ "顷刻间",
+ "顷刻之间",
+ "请勿",
+ "穷年累月",
+ "取道",
+ "去",
+ "权时",
+ "全都",
+ "全力",
+ "全年",
+ "全然",
+ "全身心",
+ "然",
+ "人人",
+ "仍",
+ "仍旧",
+ "仍然",
+ "日复一日",
+ "日见",
+ "日渐",
+ "日益",
+ "日臻",
+ "如常",
+ "如此等等",
+ "如次",
+ "如今",
+ "如期",
+ "如前所述",
+ "如上",
+ "如下",
+ "汝",
+ "三番两次",
+ "三番五次",
+ "三天两头",
+ "瑟瑟",
+ "沙沙",
+ "上",
+ "上来",
+ "上去",
+ "一个",
+ "月",
+ "日",
+ "\n",
+ )
+)
diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py
index 203a8588d6..fcbc3ffbfa 100644
--- a/api/core/rag/datasource/retrieval_service.py
+++ b/api/core/rag/datasource/retrieval_service.py
@@ -1,13 +1,12 @@
import concurrent.futures
import logging
from concurrent.futures import ThreadPoolExecutor
-from typing import Any, NotRequired
+from typing import Any, NotRequired, TypedDict
from flask import Flask, current_app
from graphon.model_runtime.entities.model_entities import ModelType
from sqlalchemy import select
from sqlalchemy.orm import Session, load_only
-from typing_extensions import TypedDict
from configs import dify_config
from core.db.session_factory import session_factory
diff --git a/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py b/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py
index fdb5ffebfc..6e76827a42 100644
--- a/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py
+++ b/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py
@@ -10,6 +10,7 @@ from mysql.connector import Error as MySQLError
from pydantic import BaseModel, model_validator
from configs import dify_config
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -178,9 +179,7 @@ class AlibabaCloudMySQLVector(BaseVector):
cur.execute(f"SELECT meta, text FROM {self.table_name} WHERE id IN ({placeholders})", ids)
docs = []
for record in cur:
- metadata = record["meta"]
- if isinstance(metadata, str):
- metadata = json.loads(metadata)
+ metadata = parse_metadata_json(record["meta"])
docs.append(Document(page_content=record["text"], metadata=metadata))
return docs
@@ -263,15 +262,13 @@ class AlibabaCloudMySQLVector(BaseVector):
# similarity = 1 / (1 + distance)
similarity = 1.0 / (1.0 + distance)
- metadata = record["meta"]
- if isinstance(metadata, str):
- metadata = json.loads(metadata)
+ metadata = parse_metadata_json(record["meta"])
metadata["score"] = similarity
metadata["distance"] = distance
if similarity >= score_threshold:
docs.append(Document(page_content=record["text"], metadata=metadata))
- except (ValueError, json.JSONDecodeError) as e:
+ except (ValueError, TypeError) as e:
logger.warning("Error processing search result: %s", e)
continue
@@ -306,9 +303,7 @@ class AlibabaCloudMySQLVector(BaseVector):
)
docs = []
for record in cur:
- metadata = record["meta"]
- if isinstance(metadata, str):
- metadata = json.loads(metadata)
+ metadata = parse_metadata_json(record["meta"])
metadata["score"] = float(record["score"])
docs.append(Document(page_content=record["text"], metadata=metadata))
return docs
diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py
index 702200e0ac..ce626bbd7e 100644
--- a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py
+++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py
@@ -8,6 +8,7 @@ _import_err_msg = (
"please run `pip install alibabacloud_gpdb20160503 alibabacloud_tea_openapi`"
)
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.models.document import Document
from extensions.ext_redis import redis_client
@@ -257,7 +258,7 @@ class AnalyticdbVectorOpenAPI:
documents = []
for match in response.body.matches.match:
if match.score >= score_threshold:
- metadata = json.loads(match.metadata.get("metadata_"))
+ metadata = parse_metadata_json(match.metadata.get("metadata_"))
metadata["score"] = match.score
doc = Document(
page_content=match.metadata.get("page_content"),
@@ -294,7 +295,7 @@ class AnalyticdbVectorOpenAPI:
documents = []
for match in response.body.matches.match:
if match.score >= score_threshold:
- metadata = json.loads(match.metadata.get("metadata_"))
+ metadata = parse_metadata_json(match.metadata.get("metadata_"))
metadata["score"] = match.score
doc = Document(
page_content=match.metadata.get("page_content"),
diff --git a/api/core/rag/datasource/vdb/baidu/baidu_vector.py b/api/core/rag/datasource/vdb/baidu/baidu_vector.py
index 9f5842e449..2b220fc04d 100644
--- a/api/core/rag/datasource/vdb/baidu/baidu_vector.py
+++ b/api/core/rag/datasource/vdb/baidu/baidu_vector.py
@@ -29,6 +29,7 @@ from pymochow.model.table import AnnSearch, BM25SearchRequest, HNSWSearchParams,
from configs import dify_config
from core.rag.datasource.vdb.field import Field as VDBField
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -173,15 +174,9 @@ class BaiduVector(BaseVector):
score = row.get("score", 0.0)
meta = row_data.get(VDBField.METADATA_KEY, {})
- # Handle both JSON string and dict formats for backward compatibility
- if isinstance(meta, str):
- try:
- import json
-
- meta = json.loads(meta)
- except (json.JSONDecodeError, TypeError):
- meta = {}
- elif not isinstance(meta, dict):
+ try:
+ meta = parse_metadata_json(meta)
+ except (ValueError, TypeError):
meta = {}
if score >= score_threshold:
@@ -200,7 +195,11 @@ class BaiduVector(BaseVector):
raise
def _init_client(self, config) -> MochowClient:
- config = Configuration(credentials=BceCredentials(config.account, config.api_key), endpoint=config.endpoint)
+ config = Configuration(
+ credentials=BceCredentials(config.account, config.api_key),
+ endpoint=config.endpoint,
+ connection_timeout_in_mills=config.connection_timeout_in_mills,
+ )
client = MochowClient(config)
return client
diff --git a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py
index 8e8120fc10..a4dddc68f0 100644
--- a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py
+++ b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
from clickzetta.connector.v0.connection import Connection # type: ignore
from configs import dify_config
-from core.rag.datasource.vdb.field import Field
+from core.rag.datasource.vdb.field import Field, parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.embedding.embedding_base import Embeddings
@@ -357,18 +357,19 @@ class ClickzettaVector(BaseVector):
"""
try:
if raw_metadata:
- metadata = json.loads(raw_metadata)
+ # First parse may yield a string (double-encoded JSON) so use json.loads
+ first_pass = json.loads(raw_metadata)
# Handle double-encoded JSON
- if isinstance(metadata, str):
- metadata = json.loads(metadata)
-
- # Ensure we have a dict
- if not isinstance(metadata, dict):
+ if isinstance(first_pass, str):
+ metadata = parse_metadata_json(first_pass)
+ elif isinstance(first_pass, dict):
+ metadata = first_pass
+ else:
metadata = {}
else:
metadata = {}
- except (json.JSONDecodeError, TypeError):
+ except (json.JSONDecodeError, ValueError, TypeError):
logger.exception("JSON parsing failed for metadata")
# Fallback: extract document_id with regex
doc_id_match = re.search(r'"document_id":\s*"([^"]+)"', raw_metadata or "")
@@ -930,17 +931,18 @@ class ClickzettaVector(BaseVector):
# Parse metadata from JSON string (may be double-encoded)
try:
if row[2]:
- metadata = json.loads(row[2])
+ # First parse may yield a string (double-encoded JSON)
+ first_pass = json.loads(row[2])
- # If result is a string, it's double-encoded JSON - parse again
- if isinstance(metadata, str):
- metadata = json.loads(metadata)
-
- if not isinstance(metadata, dict):
+ if isinstance(first_pass, str):
+ metadata = parse_metadata_json(first_pass)
+ elif isinstance(first_pass, dict):
+ metadata = first_pass
+ else:
metadata = {}
else:
metadata = {}
- except (json.JSONDecodeError, TypeError):
+ except (json.JSONDecodeError, ValueError, TypeError):
logger.exception("JSON parsing failed")
# Fallback: extract document_id with regex
diff --git a/api/core/rag/datasource/vdb/field.py b/api/core/rag/datasource/vdb/field.py
index 8fc94be360..5a0fabc572 100644
--- a/api/core/rag/datasource/vdb/field.py
+++ b/api/core/rag/datasource/vdb/field.py
@@ -1,4 +1,24 @@
from enum import StrEnum, auto
+from typing import Any
+
+from pydantic import TypeAdapter
+
+_metadata_adapter: TypeAdapter[dict[str, Any]] = TypeAdapter(dict[str, Any])
+
+
+def parse_metadata_json(raw: Any) -> dict[str, Any]:
+ """Parse metadata from a JSON string or pass through an existing dict.
+
+ Many VDB drivers return metadata as either a JSON string or an already-
+ decoded dict depending on the column type and driver version.
+ """
+ if raw is None or raw in ("", b""):
+ return {}
+ if isinstance(raw, dict):
+ return raw
+ if not isinstance(raw, (str, bytes, bytearray)):
+ return {}
+ return _metadata_adapter.validate_json(raw)
class Field(StrEnum):
diff --git a/api/core/rag/datasource/vdb/hologres/hologres_vector.py b/api/core/rag/datasource/vdb/hologres/hologres_vector.py
index 36b259e494..13d48b5668 100644
--- a/api/core/rag/datasource/vdb/hologres/hologres_vector.py
+++ b/api/core/rag/datasource/vdb/hologres/hologres_vector.py
@@ -9,6 +9,7 @@ from psycopg import sql as psql
from pydantic import BaseModel, model_validator
from configs import dify_config
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -217,8 +218,7 @@ class HologresVector(BaseVector):
text = row[2]
meta = row[3]
- if isinstance(meta, str):
- meta = json.loads(meta)
+ meta = parse_metadata_json(meta)
# Convert distance to similarity score (consistent with pgvector)
score = 1 - distance
@@ -265,8 +265,7 @@ class HologresVector(BaseVector):
meta = row[2]
score = row[-1] # score is the last column from return_score
- if isinstance(meta, str):
- meta = json.loads(meta)
+ meta = parse_metadata_json(meta)
meta["score"] = score
docs.append(Document(page_content=text, metadata=meta))
diff --git a/api/core/rag/datasource/vdb/iris/iris_vector.py b/api/core/rag/datasource/vdb/iris/iris_vector.py
index 50bb2429ec..aae445e6ff 100644
--- a/api/core/rag/datasource/vdb/iris/iris_vector.py
+++ b/api/core/rag/datasource/vdb/iris/iris_vector.py
@@ -15,6 +15,7 @@ from typing import TYPE_CHECKING, Any
from configs import dify_config
from configs.middleware.vdb.iris_config import IrisVectorConfig
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -269,7 +270,7 @@ class IrisVector(BaseVector):
if len(row) >= 4:
text, meta_str, score = row[1], row[2], float(row[3])
if score >= score_threshold:
- metadata = json.loads(meta_str) if meta_str else {}
+ metadata = parse_metadata_json(meta_str)
metadata["score"] = score
docs.append(Document(page_content=text, metadata=metadata))
return docs
@@ -384,7 +385,7 @@ class IrisVector(BaseVector):
meta_str = row[2]
score_value = row[3]
- metadata = json.loads(meta_str) if meta_str else {}
+ metadata = parse_metadata_json(meta_str)
# Add score to metadata for hybrid search compatibility
score = float(score_value) if score_value is not None else 0.0
metadata["score"] = score
diff --git a/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py b/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py
index 14955c8d7c..c6ebccd204 100644
--- a/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py
+++ b/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py
@@ -3,12 +3,13 @@ import logging
import uuid
from collections.abc import Callable
from functools import wraps
-from typing import Any, Concatenate, ParamSpec, TypeVar
+from typing import Any, Concatenate
from mo_vector.client import MoVectorClient # type: ignore
from pydantic import BaseModel, model_validator
from configs import dify_config
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -19,15 +20,12 @@ from models.dataset import Dataset
logger = logging.getLogger(__name__)
-P = ParamSpec("P")
-R = TypeVar("R")
-T = TypeVar("T", bound="MatrixoneVector")
-
-
-def ensure_client(func: Callable[Concatenate[T, P], R]):
+def ensure_client[T: MatrixoneVector, **P, R](
+ func: Callable[Concatenate[T, P], R],
+) -> Callable[Concatenate[T, P], R]:
@wraps(func)
- def wrapper(self: T, *args: P.args, **kwargs: P.kwargs):
+ def wrapper(self: T, *args: P.args, **kwargs: P.kwargs) -> R:
if self.client is None:
self.client = self._get_client(None, False)
return func(self, *args, **kwargs)
@@ -196,11 +194,7 @@ class MatrixoneVector(BaseVector):
docs = []
for result in results:
- metadata = result.metadata
- if isinstance(metadata, str):
- import json
-
- metadata = json.loads(metadata)
+ metadata = parse_metadata_json(result.metadata)
score = 1 - result.distance
if score >= score_threshold:
metadata["score"] = score
diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py
index 17aac25b87..6c62671380 100644
--- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py
+++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py
@@ -4,7 +4,7 @@ import uuid
from enum import StrEnum
from typing import Any
-from clickhouse_connect import get_client
+from clickhouse_connect import get_client # type: ignore[import-untyped]
from pydantic import BaseModel
from configs import dify_config
diff --git a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py
index 86c1e65f47..82f419871c 100644
--- a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py
+++ b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py
@@ -10,6 +10,7 @@ from sqlalchemy.dialects.mysql import LONGTEXT
from sqlalchemy.exc import SQLAlchemyError
from configs import dify_config
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -366,8 +367,8 @@ class OceanBaseVector(BaseVector):
# Parse metadata JSON
try:
- metadata = json.loads(metadata_str) if isinstance(metadata_str, str) else metadata_str
- except json.JSONDecodeError:
+ metadata = parse_metadata_json(metadata_str)
+ except (ValueError, TypeError):
logger.warning("Invalid JSON metadata: %s", metadata_str)
metadata = {}
diff --git a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py
index 4a4a458f2e..a9f946dd43 100644
--- a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py
+++ b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py
@@ -3,7 +3,7 @@ import os
import uuid
from collections.abc import Generator, Iterable, Sequence
from itertools import islice
-from typing import TYPE_CHECKING, Any, Union
+from typing import TYPE_CHECKING, Any
import qdrant_client
from flask import current_app
@@ -36,8 +36,8 @@ if TYPE_CHECKING:
from qdrant_client.conversions import common_types
from qdrant_client.http import models as rest
- DictFilter = dict[str, Union[str, int, bool, dict, list]]
- MetadataFilter = Union[DictFilter, common_types.Filter]
+ type DictFilter = dict[str, str | int | bool | dict | list]
+ type MetadataFilter = DictFilter | common_types.Filter
class PathQdrantParams(BaseModel):
diff --git a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py
index f2156afa59..4a734232ec 100644
--- a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py
+++ b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py
@@ -9,7 +9,7 @@ from pydantic import BaseModel, model_validator
from tablestore import BatchGetRowRequest, TableInBatchGetRowItem
from configs import dify_config
-from core.rag.datasource.vdb.field import Field
+from core.rag.datasource.vdb.field import Field, parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -73,7 +73,8 @@ class TableStoreVector(BaseVector):
for item in table_result:
if item.is_ok and item.row:
kv = {k: v for k, v, _ in item.row.attribute_columns}
- docs.append(Document(page_content=kv[Field.CONTENT_KEY], metadata=json.loads(kv[Field.METADATA_KEY])))
+ metadata = parse_metadata_json(kv[Field.METADATA_KEY])
+ docs.append(Document(page_content=kv[Field.CONTENT_KEY], metadata=metadata))
return docs
def get_type(self) -> str:
@@ -311,7 +312,7 @@ class TableStoreVector(BaseVector):
metadata_str = ots_column_map.get(Field.METADATA_KEY)
vector = json.loads(vector_str) if vector_str else None
- metadata = json.loads(metadata_str) if metadata_str else {}
+ metadata = parse_metadata_json(metadata_str)
metadata["score"] = search_hit.score
@@ -371,7 +372,7 @@ class TableStoreVector(BaseVector):
ots_column_map[col[0]] = col[1]
metadata_str = ots_column_map.get(Field.METADATA_KEY)
- metadata = json.loads(metadata_str) if metadata_str else {}
+ metadata = parse_metadata_json(metadata_str)
vector_str = ots_column_map.get(Field.VECTOR)
vector = json.loads(vector_str) if vector_str else None
diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py
index 291d047c04..829db9db20 100644
--- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py
+++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py
@@ -11,6 +11,7 @@ from tcvectordb.model import index as vdb_index # type: ignore
from tcvectordb.model.document import AnnSearch, Filter, KeywordSearch, WeightedRerank # type: ignore
from configs import dify_config
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -286,13 +287,10 @@ class TencentVector(BaseVector):
return docs
for result in res[0]:
- meta = result.get(self.field_metadata)
- if isinstance(meta, str):
- # Compatible with version 1.1.3 and below.
- meta = json.loads(meta)
- score = 1 - result.get("score", 0.0)
- else:
- score = result.get("score", 0.0)
+ raw_meta = result.get(self.field_metadata)
+ # Compatible with version 1.1.3 and below: str means old driver.
+ score = (1 - result.get("score", 0.0)) if isinstance(raw_meta, str) else result.get("score", 0.0)
+ meta = parse_metadata_json(raw_meta)
if score >= score_threshold:
meta["score"] = score
doc = Document(page_content=result.get(self.field_text), metadata=meta)
diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py
index 69c81d521c..499a48ac76 100644
--- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py
+++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py
@@ -3,7 +3,7 @@ import os
import uuid
from collections.abc import Generator, Iterable, Sequence
from itertools import islice
-from typing import TYPE_CHECKING, Any, Union
+from typing import TYPE_CHECKING, Any
import httpx
import qdrant_client
@@ -40,8 +40,8 @@ if TYPE_CHECKING:
from qdrant_client.conversions import common_types
from qdrant_client.http import models as rest
- DictFilter = dict[str, Union[str, int, bool, dict, list]]
- MetadataFilter = Union[DictFilter, common_types.Filter]
+ type DictFilter = dict[str, str | int | bool | dict | list]
+ type MetadataFilter = DictFilter | common_types.Filter
class TidbOnQdrantConfig(BaseModel):
diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py
index 06b17b9e62..37114be6e7 100644
--- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py
+++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py
@@ -6,11 +6,18 @@ import httpx
from httpx import DigestAuth
from configs import dify_config
+from core.helper.http_client_pooling import get_pooled_http_client
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.dataset import TidbAuthBinding
from models.enums import TidbAuthBindingStatus
+# Reuse a pooled HTTP client for all TiDB Cloud requests to minimize connection churn
+_tidb_http_client: httpx.Client = get_pooled_http_client(
+ "tidb:cloud",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100)),
+)
+
class TidbService:
@staticmethod
@@ -50,7 +57,9 @@ class TidbService:
"rootPassword": password,
}
- response = httpx.post(f"{api_url}/clusters", json=cluster_data, auth=DigestAuth(public_key, private_key))
+ response = _tidb_http_client.post(
+ f"{api_url}/clusters", json=cluster_data, auth=DigestAuth(public_key, private_key)
+ )
if response.status_code == 200:
response_data = response.json()
@@ -84,7 +93,9 @@ class TidbService:
:return: The response from the API.
"""
- response = httpx.delete(f"{api_url}/clusters/{cluster_id}", auth=DigestAuth(public_key, private_key))
+ response = _tidb_http_client.delete(
+ f"{api_url}/clusters/{cluster_id}", auth=DigestAuth(public_key, private_key)
+ )
if response.status_code == 200:
return response.json()
@@ -103,7 +114,7 @@ class TidbService:
:return: The response from the API.
"""
- response = httpx.get(f"{api_url}/clusters/{cluster_id}", auth=DigestAuth(public_key, private_key))
+ response = _tidb_http_client.get(f"{api_url}/clusters/{cluster_id}", auth=DigestAuth(public_key, private_key))
if response.status_code == 200:
return response.json()
@@ -128,7 +139,7 @@ class TidbService:
body = {"password": new_password, "builtinRole": "role_admin", "customRoles": []}
- response = httpx.patch(
+ response = _tidb_http_client.patch(
f"{api_url}/clusters/{cluster_id}/sqlUsers/{account}",
json=body,
auth=DigestAuth(public_key, private_key),
@@ -162,7 +173,9 @@ class TidbService:
tidb_serverless_list_map = {item.cluster_id: item for item in tidb_serverless_list}
cluster_ids = [item.cluster_id for item in tidb_serverless_list]
params = {"clusterIds": cluster_ids, "view": "BASIC"}
- response = httpx.get(f"{api_url}/clusters:batchGet", params=params, auth=DigestAuth(public_key, private_key))
+ response = _tidb_http_client.get(
+ f"{api_url}/clusters:batchGet", params=params, auth=DigestAuth(public_key, private_key)
+ )
if response.status_code == 200:
response_data = response.json()
@@ -223,7 +236,7 @@ class TidbService:
clusters.append(cluster_data)
request_body = {"requests": clusters}
- response = httpx.post(
+ response = _tidb_http_client.post(
f"{api_url}/clusters:batchCreate", json=request_body, auth=DigestAuth(public_key, private_key)
)
diff --git a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py
index 27ae038a06..c948917374 100644
--- a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py
+++ b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py
@@ -9,7 +9,7 @@ from sqlalchemy import text as sql_text
from sqlalchemy.orm import Session, declarative_base
from configs import dify_config
-from core.rag.datasource.vdb.field import Field
+from core.rag.datasource.vdb.field import Field, parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -228,7 +228,7 @@ class TiDBVector(BaseVector):
)
results = [(row[0], row[1], row[2]) for row in res]
for meta, text, distance in results:
- metadata = json.loads(meta)
+ metadata = parse_metadata_json(meta)
metadata["score"] = 1 - distance
docs.append(Document(page_content=text, metadata=metadata))
return docs
diff --git a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py
index e5feecf2bc..83fd3626d9 100644
--- a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py
+++ b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py
@@ -15,6 +15,7 @@ from volcengine.viking_db import ( # type: ignore
from configs import dify_config
from core.rag.datasource.vdb.field import Field as vdb_Field
+from core.rag.datasource.vdb.field import parse_metadata_json
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@@ -163,7 +164,7 @@ class VikingDBVector(BaseVector):
for result in results:
metadata = result.fields.get(vdb_Field.METADATA_KEY)
if metadata is not None:
- metadata = json.loads(metadata)
+ metadata = parse_metadata_json(metadata)
if metadata.get(key) == value:
ids.append(result.id)
return ids
@@ -189,9 +190,7 @@ class VikingDBVector(BaseVector):
docs = []
for result in results:
- metadata = result.fields.get(vdb_Field.METADATA_KEY)
- if metadata is not None:
- metadata = json.loads(metadata)
+ metadata = parse_metadata_json(result.fields.get(vdb_Field.METADATA_KEY))
if result.score >= score_threshold:
metadata["score"] = result.score
doc = Document(page_content=result.fields.get(vdb_Field.CONTENT_KEY), metadata=metadata)
diff --git a/api/core/rag/embedding/retrieval.py b/api/core/rag/embedding/retrieval.py
index 030237559d..89eeccdf2f 100644
--- a/api/core/rag/embedding/retrieval.py
+++ b/api/core/rag/embedding/retrieval.py
@@ -1,5 +1,6 @@
+from typing import TypedDict
+
from pydantic import BaseModel
-from typing_extensions import TypedDict
from models.dataset import DocumentSegment
diff --git a/api/core/rag/extractor/blob/blob.py b/api/core/rag/extractor/blob/blob.py
index 1f91a3ece1..b2e6d782d8 100644
--- a/api/core/rag/extractor/blob/blob.py
+++ b/api/core/rag/extractor/blob/blob.py
@@ -12,11 +12,11 @@ import mimetypes
from collections.abc import Generator, Mapping
from io import BufferedReader, BytesIO
from pathlib import Path, PurePath
-from typing import Any, Union
+from typing import Any
from pydantic import BaseModel, ConfigDict, model_validator
-PathLike = Union[str, PurePath]
+type PathLike = str | PurePath
class Blob(BaseModel):
@@ -29,7 +29,7 @@ class Blob(BaseModel):
Inspired by: https://developer.mozilla.org/en-US/docs/Web/API/Blob
"""
- data: Union[bytes, str, None] = None # Raw data
+ data: bytes | str | None = None # Raw data
mimetype: str | None = None # Not to be confused with a file extension
encoding: str = "utf-8" # Use utf-8 as default encoding, if decoding to string
# Location where the original content was found
@@ -75,7 +75,7 @@ class Blob(BaseModel):
raise ValueError(f"Unable to get bytes for blob {self}")
@contextlib.contextmanager
- def as_bytes_io(self) -> Generator[Union[BytesIO, BufferedReader], None, None]:
+ def as_bytes_io(self) -> Generator[BytesIO | BufferedReader, None, None]:
"""Read data as a byte stream."""
if isinstance(self.data, bytes):
yield BytesIO(self.data)
@@ -117,7 +117,7 @@ class Blob(BaseModel):
@classmethod
def from_data(
cls,
- data: Union[str, bytes],
+ data: str | bytes,
*,
encoding: str = "utf-8",
mime_type: str | None = None,
diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py
index e1ddd2dd96..89bdd56a6c 100644
--- a/api/core/rag/extractor/firecrawl/firecrawl_app.py
+++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py
@@ -1,9 +1,8 @@
import json
import time
-from typing import Any, NotRequired, cast
+from typing import Any, NotRequired, TypedDict, cast
import httpx
-from typing_extensions import TypedDict
from extensions.ext_storage import storage
diff --git a/api/core/rag/extractor/pdf_extractor.py b/api/core/rag/extractor/pdf_extractor.py
index 9abdb31325..02f0efc908 100644
--- a/api/core/rag/extractor/pdf_extractor.py
+++ b/api/core/rag/extractor/pdf_extractor.py
@@ -35,7 +35,7 @@ class PdfExtractor(BaseExtractor):
"""
# Magic bytes for image format detection: (magic_bytes, extension, mime_type)
- IMAGE_FORMATS = [
+ IMAGE_FORMATS: tuple[tuple[bytes, str, str], ...] = (
(b"\xff\xd8\xff", "jpg", "image/jpeg"),
(b"\x89PNG\r\n\x1a\n", "png", "image/png"),
(b"\x00\x00\x00\x0c\x6a\x50\x20\x20\x0d\x0a\x87\x0a", "jp2", "image/jp2"),
@@ -45,7 +45,7 @@ class PdfExtractor(BaseExtractor):
(b"MM\x00*", "tiff", "image/tiff"),
(b"II+\x00", "tiff", "image/tiff"),
(b"MM\x00+", "tiff", "image/tiff"),
- ]
+ )
MAX_MAGIC_LEN = max(len(m) for m, _, _ in IMAGE_FORMATS)
def __init__(self, file_path: str, tenant_id: str, user_id: str, file_cache_key: str | None = None):
diff --git a/api/core/rag/extractor/watercrawl/client.py b/api/core/rag/extractor/watercrawl/client.py
index e8da866870..7b4a388df9 100644
--- a/api/core/rag/extractor/watercrawl/client.py
+++ b/api/core/rag/extractor/watercrawl/client.py
@@ -1,11 +1,10 @@
import json
from collections.abc import Generator
-from typing import Any, Union
+from typing import Any, TypedDict
from urllib.parse import urljoin
import httpx
from httpx import Response
-from typing_extensions import TypedDict
from core.rag.extractor.watercrawl.exceptions import (
WaterCrawlAuthenticationError,
@@ -142,7 +141,7 @@ class WaterCrawlAPIClient(BaseAPIClient):
def create_crawl_request(
self,
- url: Union[list, str] | None = None,
+ url: list | str | None = None,
spider_options: SpiderOptions | None = None,
page_options: PageOptions | None = None,
plugin_options: dict[str, Any] | None = None,
diff --git a/api/core/rag/extractor/watercrawl/provider.py b/api/core/rag/extractor/watercrawl/provider.py
index 81c19005db..2a9403eda0 100644
--- a/api/core/rag/extractor/watercrawl/provider.py
+++ b/api/core/rag/extractor/watercrawl/provider.py
@@ -1,8 +1,6 @@
from collections.abc import Generator
from datetime import datetime
-from typing import Any
-
-from typing_extensions import TypedDict
+from typing import Any, TypedDict
from core.rag.extractor.watercrawl.client import PageOptions, SpiderOptions, WaterCrawlAPIClient
diff --git a/api/core/rag/index_processor/index_processor.py b/api/core/rag/index_processor/index_processor.py
index a6d1db214b..825ae01226 100644
--- a/api/core/rag/index_processor/index_processor.py
+++ b/api/core/rag/index_processor/index_processor.py
@@ -35,7 +35,10 @@ class IndexProcessor:
if "parent_mode" in preview:
data.parent_mode = preview["parent_mode"]
- for item in preview["preview"]:
+ # Different index processors return different preview shapes:
+ # - paragraph/parent-child processors: {"preview": [...]}
+ # - QA processor: {"qa_preview": [...]} (no "preview" key)
+ for item in preview.get("preview", []):
if "content" in item and "child_chunks" in item:
data.preview.append(
PreviewItem(content=item["content"], child_chunks=item["child_chunks"], summary=None)
@@ -44,6 +47,10 @@ class IndexProcessor:
data.qa_preview.append(QaPreview(question=item["question"], answer=item["answer"]))
elif "content" in item:
data.preview.append(PreviewItem(content=item["content"], child_chunks=None, summary=None))
+
+ for item in preview.get("qa_preview", []):
+ if "question" in item and "answer" in item:
+ data.qa_preview.append(QaPreview(question=item["question"], answer=item["answer"]))
return data
def index_and_clean(
diff --git a/api/core/rag/index_processor/index_processor_base.py b/api/core/rag/index_processor/index_processor_base.py
index 7d504fdb35..a3b6e0dbd2 100644
--- a/api/core/rag/index_processor/index_processor_base.py
+++ b/api/core/rag/index_processor/index_processor_base.py
@@ -7,12 +7,11 @@ import os
import re
from abc import ABC, abstractmethod
from collections.abc import Mapping
-from typing import TYPE_CHECKING, Any, NotRequired, Optional
+from typing import TYPE_CHECKING, Any, NotRequired, TypedDict
from urllib.parse import unquote, urlparse
import httpx
from sqlalchemy import select
-from typing_extensions import TypedDict
from configs import dify_config
from core.entities.knowledge_entities import PreviewDetail
@@ -118,11 +117,12 @@ class BaseIndexProcessor(ABC):
max_tokens: int,
chunk_overlap: int,
separator: str,
- embedding_model_instance: Optional["ModelInstance"],
+ embedding_model_instance: "ModelInstance | None",
) -> TextSplitter:
"""
Get the NodeParser object according to the processing rule.
"""
+ character_splitter: TextSplitter
if processing_rule_mode in ["custom", "hierarchical"]:
# The user-defined segmentation rule
max_segmentation_tokens_length = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
@@ -148,7 +148,7 @@ class BaseIndexProcessor(ABC):
embedding_model_instance=embedding_model_instance,
)
- return character_splitter # type: ignore
+ return character_splitter
def _get_content_files(self, document: Document, current_user: Account | None = None) -> list[AttachmentDocument]:
"""
diff --git a/api/core/rag/splitter/fixed_text_splitter.py b/api/core/rag/splitter/fixed_text_splitter.py
index e6aec4a3af..3383c7f3bd 100644
--- a/api/core/rag/splitter/fixed_text_splitter.py
+++ b/api/core/rag/splitter/fixed_text_splitter.py
@@ -4,19 +4,13 @@ from __future__ import annotations
import codecs
import re
-from typing import Any
+from collections.abc import Collection
+from typing import Any, Literal
from graphon.model_runtime.model_providers.__base.tokenizers.gpt2_tokenizer import GPT2Tokenizer
from core.model_manager import ModelInstance
-from core.rag.splitter.text_splitter import (
- TS,
- Collection,
- Literal,
- RecursiveCharacterTextSplitter,
- Set,
- Union,
-)
+from core.rag.splitter.text_splitter import RecursiveCharacterTextSplitter
class EnhanceRecursiveCharacterTextSplitter(RecursiveCharacterTextSplitter):
@@ -25,13 +19,13 @@ class EnhanceRecursiveCharacterTextSplitter(RecursiveCharacterTextSplitter):
"""
@classmethod
- def from_encoder(
- cls: type[TS],
+ def from_encoder[T: EnhanceRecursiveCharacterTextSplitter](
+ cls: type[T],
embedding_model_instance: ModelInstance | None,
- allowed_special: Union[Literal["all"], Set[str]] = set(), # noqa: UP037
- disallowed_special: Union[Literal["all"], Collection[str]] = "all", # noqa: UP037
+ allowed_special: Literal["all"] | set[str] = set(),
+ disallowed_special: Literal["all"] | Collection[str] = "all",
**kwargs: Any,
- ):
+ ) -> T:
def _token_encoder(texts: list[str]) -> list[int]:
if not texts:
return []
diff --git a/api/core/rag/splitter/text_splitter.py b/api/core/rag/splitter/text_splitter.py
index 41e6d771e9..8977611f93 100644
--- a/api/core/rag/splitter/text_splitter.py
+++ b/api/core/rag/splitter/text_splitter.py
@@ -6,19 +6,12 @@ import re
from abc import ABC, abstractmethod
from collections.abc import Callable, Collection, Iterable, Sequence, Set
from dataclasses import dataclass
-from typing import (
- Any,
- Literal,
- TypeVar,
- Union,
-)
+from typing import Any, Literal
from core.rag.models.document import BaseDocumentTransformer, Document
logger = logging.getLogger(__name__)
-TS = TypeVar("TS", bound="TextSplitter")
-
def _split_text_with_regex(text: str, separator: str, keep_separator: bool) -> list[str]:
# Now that we have the separator, split the text
@@ -194,8 +187,8 @@ class TokenTextSplitter(TextSplitter):
self,
encoding_name: str = "gpt2",
model_name: str | None = None,
- allowed_special: Union[Literal["all"], Set[str]] = set(),
- disallowed_special: Union[Literal["all"], Collection[str]] = "all",
+ allowed_special: Literal["all"] | Set[str] = set(),
+ disallowed_special: Literal["all"] | Collection[str] = "all",
**kwargs: Any,
):
"""Create a new TextSplitter."""
diff --git a/api/core/repositories/celery_workflow_execution_repository.py b/api/core/repositories/celery_workflow_execution_repository.py
index 465f43da73..b07c63fdf0 100644
--- a/api/core/repositories/celery_workflow_execution_repository.py
+++ b/api/core/repositories/celery_workflow_execution_repository.py
@@ -6,7 +6,6 @@ providing improved performance by offloading database operations to background w
"""
import logging
-from typing import Union
from graphon.entities import WorkflowExecution
from sqlalchemy.engine import Engine
@@ -47,7 +46,7 @@ class CeleryWorkflowExecutionRepository(WorkflowExecutionRepository):
def __init__(
self,
session_factory: sessionmaker | Engine,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
app_id: str | None,
triggered_from: WorkflowRunTriggeredFrom | None,
):
diff --git a/api/core/repositories/celery_workflow_node_execution_repository.py b/api/core/repositories/celery_workflow_node_execution_repository.py
index 22ef44b3dc..cdb3af01a8 100644
--- a/api/core/repositories/celery_workflow_node_execution_repository.py
+++ b/api/core/repositories/celery_workflow_node_execution_repository.py
@@ -7,7 +7,6 @@ providing improved performance by offloading database operations to background w
import logging
from collections.abc import Sequence
-from typing import Union
from graphon.entities import WorkflowNodeExecution
from sqlalchemy.engine import Engine
@@ -54,7 +53,7 @@ class CeleryWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository):
def __init__(
self,
session_factory: sessionmaker | Engine,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
app_id: str | None,
triggered_from: WorkflowNodeExecutionTriggeredFrom | None,
):
diff --git a/api/core/repositories/factory.py b/api/core/repositories/factory.py
index ed6d44f434..ce3ad15759 100644
--- a/api/core/repositories/factory.py
+++ b/api/core/repositories/factory.py
@@ -7,7 +7,7 @@ allowing users to configure different repository backends through string paths.
from collections.abc import Sequence
from dataclasses import dataclass
-from typing import Literal, Protocol, Union
+from typing import Literal, Protocol
from graphon.entities import WorkflowExecution, WorkflowNodeExecution
from sqlalchemy.engine import Engine
@@ -61,8 +61,8 @@ class DifyCoreRepositoryFactory:
@classmethod
def create_workflow_execution_repository(
cls,
- session_factory: Union[sessionmaker, Engine],
- user: Union[Account, EndUser],
+ session_factory: sessionmaker | Engine,
+ user: Account | EndUser,
app_id: str,
triggered_from: WorkflowRunTriggeredFrom,
) -> WorkflowExecutionRepository:
@@ -97,8 +97,8 @@ class DifyCoreRepositoryFactory:
@classmethod
def create_workflow_node_execution_repository(
cls,
- session_factory: Union[sessionmaker, Engine],
- user: Union[Account, EndUser],
+ session_factory: sessionmaker | Engine,
+ user: Account | EndUser,
app_id: str,
triggered_from: WorkflowNodeExecutionTriggeredFrom,
) -> WorkflowNodeExecutionRepository:
diff --git a/api/core/repositories/sqlalchemy_workflow_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_execution_repository.py
index 85d20b675d..d74cc8f231 100644
--- a/api/core/repositories/sqlalchemy_workflow_execution_repository.py
+++ b/api/core/repositories/sqlalchemy_workflow_execution_repository.py
@@ -4,7 +4,6 @@ SQLAlchemy implementation of the WorkflowExecutionRepository.
import json
import logging
-from typing import Union
from graphon.entities import WorkflowExecution
from graphon.enums import WorkflowExecutionStatus, WorkflowType
@@ -40,7 +39,7 @@ class SQLAlchemyWorkflowExecutionRepository(WorkflowExecutionRepository):
def __init__(
self,
session_factory: sessionmaker | Engine,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
app_id: str | None,
triggered_from: WorkflowRunTriggeredFrom | None,
):
diff --git a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
index a72bfa378b..13e885672a 100644
--- a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
+++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
@@ -7,7 +7,7 @@ import json
import logging
from collections.abc import Callable, Mapping, Sequence
from concurrent.futures import ThreadPoolExecutor
-from typing import Any, TypeVar, Union
+from typing import Any
import psycopg2.errors
from graphon.entities import WorkflowNodeExecution
@@ -63,7 +63,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
def __init__(
self,
session_factory: sessionmaker | Engine,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
app_id: str | None,
triggered_from: WorkflowNodeExecutionTriggeredFrom | None,
):
@@ -551,10 +551,7 @@ def _deterministic_json_dump(value: Mapping[str, Any]) -> str:
return json.dumps(value, sort_keys=True)
-_T = TypeVar("_T")
-
-
-def _find_first(seq: Sequence[_T], pred: Callable[[_T], bool]) -> _T | None:
+def _find_first[T](seq: Sequence[T], pred: Callable[[T], bool]) -> T | None:
filtered = [i for i in seq if pred(i)]
if filtered:
return filtered[0]
diff --git a/api/core/schemas/resolver.py b/api/core/schemas/resolver.py
index 1b57f5bb94..6e26664ac2 100644
--- a/api/core/schemas/resolver.py
+++ b/api/core/schemas/resolver.py
@@ -3,15 +3,15 @@ import re
import threading
from collections import deque
from dataclasses import dataclass
-from typing import Any, Union
+from typing import Any
from core.schemas.registry import SchemaRegistry
logger = logging.getLogger(__name__)
# Type aliases for better clarity
-SchemaType = Union[dict[str, Any], list[Any], str, int, float, bool, None]
-SchemaDict = dict[str, Any]
+type SchemaType = dict[str, Any] | list[Any] | str | int | float | bool | None
+type SchemaDict = dict[str, Any]
# Pre-compiled pattern for better performance
_DIFY_SCHEMA_PATTERN = re.compile(r"^https://dify\.ai/schemas/(v\d+)/(.+)\.json$")
@@ -54,7 +54,7 @@ class QueueItem:
current: Any
parent: Any | None
- key: Union[str, int] | None
+ key: str | int | None
depth: int
ref_path: set[str]
diff --git a/api/core/trigger/constants.py b/api/core/trigger/constants.py
index 192faa2d3e..4047e9bc88 100644
--- a/api/core/trigger/constants.py
+++ b/api/core/trigger/constants.py
@@ -5,11 +5,11 @@ TRIGGER_SCHEDULE_NODE_TYPE: Final[str] = "trigger-schedule"
TRIGGER_PLUGIN_NODE_TYPE: Final[str] = "trigger-plugin"
TRIGGER_NODE_TYPES: Final[frozenset[str]] = frozenset(
- {
+ (
TRIGGER_WEBHOOK_NODE_TYPE,
TRIGGER_SCHEDULE_NODE_TYPE,
TRIGGER_PLUGIN_NODE_TYPE,
- }
+ )
)
diff --git a/api/core/trigger/debug/event_bus.py b/api/core/trigger/debug/event_bus.py
index e3fb6a13d9..eb1f6f6472 100644
--- a/api/core/trigger/debug/event_bus.py
+++ b/api/core/trigger/debug/event_bus.py
@@ -1,6 +1,5 @@
import hashlib
import logging
-from typing import TypeVar
from redis import RedisError
@@ -11,8 +10,6 @@ logger = logging.getLogger(__name__)
TRIGGER_DEBUG_EVENT_TTL = 300
-TTriggerDebugEvent = TypeVar("TTriggerDebugEvent", bound="BaseDebugEvent")
-
class TriggerDebugEventBus:
"""
@@ -81,15 +78,15 @@ class TriggerDebugEventBus:
return 0
@classmethod
- def poll(
+ def poll[T: BaseDebugEvent](
cls,
- event_type: type[TTriggerDebugEvent],
+ event_type: type[T],
pool_key: str,
tenant_id: str,
user_id: str,
app_id: str,
node_id: str,
- ) -> TTriggerDebugEvent | None:
+ ) -> T | None:
"""
Poll for an event or register to the waiting pool.
diff --git a/api/core/workflow/node_factory.py b/api/core/workflow/node_factory.py
index 8cc21d2cd9..f6c3aee4c1 100644
--- a/api/core/workflow/node_factory.py
+++ b/api/core/workflow/node_factory.py
@@ -2,7 +2,7 @@ import importlib
import pkgutil
from collections.abc import Callable, Iterator, Mapping, MutableMapping
from functools import lru_cache
-from typing import TYPE_CHECKING, Any, TypeAlias, cast, final
+from typing import TYPE_CHECKING, Any, cast, final, override
from graphon.entities.base_node_data import BaseNodeData
from graphon.entities.graph_config import NodeConfigDict, NodeConfigDictAdapter
@@ -22,7 +22,6 @@ from graphon.nodes.parameter_extractor.entities import ParameterExtractorNodeDat
from graphon.nodes.question_classifier.entities import QuestionClassifierNodeData
from sqlalchemy import select
from sqlalchemy.orm import Session
-from typing_extensions import override
from configs import dify_config
from core.app.entities.app_invoke_entities import DIFY_RUN_CONTEXT_KEY, DifyRunContext
@@ -192,7 +191,7 @@ class _LazyNodeTypeClassesMapping(MutableMapping[NodeType, Mapping[str, type[Nod
NODE_TYPE_CLASSES_MAPPING: MutableMapping[NodeType, Mapping[str, type[Node]]] = _LazyNodeTypeClassesMapping()
-LLMCompatibleNodeData: TypeAlias = LLMNodeData | QuestionClassifierNodeData | ParameterExtractorNodeData
+type LLMCompatibleNodeData = LLMNodeData | QuestionClassifierNodeData | ParameterExtractorNodeData
def fetch_memory(
diff --git a/api/core/workflow/nodes/trigger_webhook/entities.py b/api/core/workflow/nodes/trigger_webhook/entities.py
index 4d5ad72154..a30f877e4b 100644
--- a/api/core/workflow/nodes/trigger_webhook/entities.py
+++ b/api/core/workflow/nodes/trigger_webhook/entities.py
@@ -8,24 +8,20 @@ from pydantic import BaseModel, Field, field_validator
from core.trigger.constants import TRIGGER_WEBHOOK_NODE_TYPE
-_WEBHOOK_HEADER_ALLOWED_TYPES = frozenset(
- {
- SegmentType.STRING,
- }
-)
+_WEBHOOK_HEADER_ALLOWED_TYPES: frozenset[SegmentType] = frozenset((SegmentType.STRING,))
-_WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES = frozenset(
- {
+_WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES: frozenset[SegmentType] = frozenset(
+ (
SegmentType.STRING,
SegmentType.NUMBER,
SegmentType.BOOLEAN,
- }
+ )
)
_WEBHOOK_PARAMETER_ALLOWED_TYPES = _WEBHOOK_HEADER_ALLOWED_TYPES | _WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES
-_WEBHOOK_BODY_ALLOWED_TYPES = frozenset(
- {
+_WEBHOOK_BODY_ALLOWED_TYPES: frozenset[SegmentType] = frozenset(
+ (
SegmentType.STRING,
SegmentType.NUMBER,
SegmentType.BOOLEAN,
@@ -35,7 +31,7 @@ _WEBHOOK_BODY_ALLOWED_TYPES = frozenset(
SegmentType.ARRAY_BOOLEAN,
SegmentType.ARRAY_OBJECT,
SegmentType.FILE,
- }
+ )
)
diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py
index 1ddcc8f792..f68cdaadde 100644
--- a/api/events/event_handlers/update_provider_when_message_created.py
+++ b/api/events/event_handlers/update_provider_when_message_created.py
@@ -157,7 +157,7 @@ def handle(sender: Message, **kwargs):
tenant_id=tenant_id,
provider_name=ModelProviderID(model_config.provider).provider_name,
provider_type=ProviderType.SYSTEM.value,
- quota_type=provider_configuration.system_configuration.current_quota_type.value,
+ quota_type=provider_configuration.system_configuration.current_quota_type,
),
values=_ProviderUpdateValues(quota_used=Provider.quota_used + used_quota, last_used=current_time),
additional_filters=_ProviderUpdateAdditionalFilters(
diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py
index 26262484f9..5f528dbf9e 100644
--- a/api/extensions/ext_redis.py
+++ b/api/extensions/ext_redis.py
@@ -3,7 +3,7 @@ import logging
import ssl
from collections.abc import Callable
from datetime import timedelta
-from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar, Union
+from typing import TYPE_CHECKING, Any, Union
import redis
from redis import RedisError
@@ -297,12 +297,7 @@ def get_pubsub_broadcast_channel() -> BroadcastChannelProtocol:
return RedisBroadcastChannel(_pubsub_redis_client)
-P = ParamSpec("P")
-R = TypeVar("R")
-T = TypeVar("T")
-
-
-def redis_fallback(default_return: T | None = None): # type: ignore
+def redis_fallback[T](default_return: T | None = None): # type: ignore
"""
decorator to handle Redis operation exceptions and return a default value when Redis is unavailable.
@@ -310,9 +305,9 @@ def redis_fallback(default_return: T | None = None): # type: ignore
default_return: The value to return when a Redis operation fails. Defaults to None.
"""
- def decorator(func: Callable[P, R]):
+ def decorator[**P, R](func: Callable[P, R]) -> Callable[P, R | T | None]:
@functools.wraps(func)
- def wrapper(*args: P.args, **kwargs: P.kwargs):
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | T | None:
try:
return func(*args, **kwargs)
except RedisError as e:
diff --git a/api/extensions/logstore/repositories/logstore_workflow_execution_repository.py b/api/extensions/logstore/repositories/logstore_workflow_execution_repository.py
index f71b2fa1df..d0f3e2e244 100644
--- a/api/extensions/logstore/repositories/logstore_workflow_execution_repository.py
+++ b/api/extensions/logstore/repositories/logstore_workflow_execution_repository.py
@@ -2,7 +2,6 @@ import json
import logging
import os
import time
-from typing import Union
from graphon.entities import WorkflowExecution
from graphon.workflow_type_encoder import WorkflowRuntimeTypeConverter
@@ -27,7 +26,7 @@ class LogstoreWorkflowExecutionRepository(WorkflowExecutionRepository):
def __init__(
self,
session_factory: sessionmaker | Engine,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
app_id: str | None,
triggered_from: WorkflowRunTriggeredFrom | None,
):
diff --git a/api/extensions/logstore/repositories/logstore_workflow_node_execution_repository.py b/api/extensions/logstore/repositories/logstore_workflow_node_execution_repository.py
index b725436681..37952d6464 100644
--- a/api/extensions/logstore/repositories/logstore_workflow_node_execution_repository.py
+++ b/api/extensions/logstore/repositories/logstore_workflow_node_execution_repository.py
@@ -11,7 +11,7 @@ import os
import time
from collections.abc import Sequence
from datetime import datetime
-from typing import Any, Union
+from typing import Any
from graphon.entities import WorkflowNodeExecution
from graphon.enums import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
@@ -20,6 +20,7 @@ from graphon.workflow_type_encoder import WorkflowRuntimeTypeConverter
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker
+from core.ops.utils import JSON_DICT_ADAPTER
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.repositories.factory import OrderConfig, WorkflowNodeExecutionRepository
from extensions.logstore.aliyun_logstore import AliyunLogStore
@@ -48,10 +49,10 @@ def _dict_to_workflow_node_execution(data: dict[str, Any]) -> WorkflowNodeExecut
"""
logger.debug("_dict_to_workflow_node_execution: data keys=%s", list(data.keys())[:5])
# Parse JSON fields
- inputs = json.loads(data.get("inputs", "{}"))
- process_data = json.loads(data.get("process_data", "{}"))
- outputs = json.loads(data.get("outputs", "{}"))
- metadata = json.loads(data.get("execution_metadata", "{}"))
+ inputs = JSON_DICT_ADAPTER.validate_json(data.get("inputs") or "{}")
+ process_data = JSON_DICT_ADAPTER.validate_json(data.get("process_data") or "{}")
+ outputs = JSON_DICT_ADAPTER.validate_json(data.get("outputs") or "{}")
+ metadata = JSON_DICT_ADAPTER.validate_json(data.get("execution_metadata") or "{}")
# Convert metadata to domain enum keys
domain_metadata = {}
@@ -108,7 +109,7 @@ class LogstoreWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository):
def __init__(
self,
session_factory: sessionmaker | Engine,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
app_id: str | None,
triggered_from: WorkflowNodeExecutionTriggeredFrom | None,
):
diff --git a/api/extensions/otel/decorators/base.py b/api/extensions/otel/decorators/base.py
index a7bb8d051b..1dd92caeae 100644
--- a/api/extensions/otel/decorators/base.py
+++ b/api/extensions/otel/decorators/base.py
@@ -1,6 +1,6 @@
import functools
from collections.abc import Callable
-from typing import ParamSpec, TypeVar, cast
+from typing import cast
from opentelemetry.trace import get_tracer
@@ -8,9 +8,6 @@ from configs import dify_config
from extensions.otel.decorators.handler import SpanHandler
from extensions.otel.runtime import is_instrument_flag_enabled
-P = ParamSpec("P")
-R = TypeVar("R")
-
_HANDLER_INSTANCES: dict[type[SpanHandler], SpanHandler] = {SpanHandler: SpanHandler()}
@@ -21,7 +18,7 @@ def _get_handler_instance(handler_class: type[SpanHandler]) -> SpanHandler:
return _HANDLER_INSTANCES[handler_class]
-def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Callable[P, R]], Callable[P, R]]:
+def trace_span[**P, R](handler_class: type[SpanHandler] | None = None) -> Callable[[Callable[P, R]], Callable[P, R]]:
"""
Decorator that traces a function with an OpenTelemetry span.
diff --git a/api/extensions/otel/decorators/handler.py b/api/extensions/otel/decorators/handler.py
index 6915b63dce..e465a615a6 100644
--- a/api/extensions/otel/decorators/handler.py
+++ b/api/extensions/otel/decorators/handler.py
@@ -1,11 +1,9 @@
import inspect
from collections.abc import Callable, Mapping
-from typing import Any, TypeVar
+from typing import Any
from opentelemetry.trace import SpanKind, Status, StatusCode
-R = TypeVar("R")
-
class SpanHandler:
"""
@@ -31,9 +29,9 @@ class SpanHandler:
"""
return f"{wrapped.__module__}.{wrapped.__qualname__}"
- def _extract_arguments(
+ def _extract_arguments[T](
self,
- wrapped: Callable[..., R],
+ wrapped: Callable[..., T],
args: tuple[object, ...],
kwargs: Mapping[str, object],
) -> dict[str, Any] | None:
@@ -61,13 +59,13 @@ class SpanHandler:
except Exception:
return None
- def wrapper(
+ def wrapper[T](
self,
tracer: Any,
- wrapped: Callable[..., R],
+ wrapped: Callable[..., T],
args: tuple[object, ...],
kwargs: Mapping[str, object],
- ) -> R:
+ ) -> T:
"""
Fully control the wrapper behavior.
diff --git a/api/extensions/otel/decorators/handlers/generate_handler.py b/api/extensions/otel/decorators/handlers/generate_handler.py
index b37aca664a..cc6c75304f 100644
--- a/api/extensions/otel/decorators/handlers/generate_handler.py
+++ b/api/extensions/otel/decorators/handlers/generate_handler.py
@@ -1,6 +1,6 @@
import logging
from collections.abc import Callable, Mapping
-from typing import Any, TypeVar
+from typing import Any
from opentelemetry.trace import SpanKind, Status, StatusCode
from opentelemetry.util.types import AttributeValue
@@ -12,19 +12,16 @@ from models.model import Account
logger = logging.getLogger(__name__)
-R = TypeVar("R")
-
-
class AppGenerateHandler(SpanHandler):
"""Span handler for ``AppGenerateService.generate``."""
- def wrapper(
+ def wrapper[T](
self,
tracer: Any,
- wrapped: Callable[..., R],
+ wrapped: Callable[..., T],
args: tuple[object, ...],
kwargs: Mapping[str, object],
- ) -> R:
+ ) -> T:
try:
arguments = self._extract_arguments(wrapped, args, kwargs)
if not arguments:
diff --git a/api/extensions/otel/instrumentation.py b/api/extensions/otel/instrumentation.py
index b73ba8df8c..0a70f6ebe9 100644
--- a/api/extensions/otel/instrumentation.py
+++ b/api/extensions/otel/instrumentation.py
@@ -1,5 +1,7 @@
import contextlib
import logging
+from collections.abc import Callable
+from typing import Protocol, cast
import flask
from opentelemetry.instrumentation.celery import CeleryInstrumentor
@@ -21,6 +23,38 @@ from extensions.otel.runtime import is_celery_worker
logger = logging.getLogger(__name__)
+class SupportsInstrument(Protocol):
+ def instrument(self, **kwargs: object) -> None: ...
+
+
+class SupportsFlaskInstrumentor(Protocol):
+ def instrument_app(
+ self, app: DifyApp, response_hook: Callable[[Span, str, list], None] | None = None, **kwargs: object
+ ) -> None: ...
+
+
+# Some OpenTelemetry instrumentor constructors are typed loosely enough that
+# pyrefly infers `NoneType`. Narrow the instances to just the methods we use
+# while leaving runtime behavior unchanged.
+def _new_celery_instrumentor() -> SupportsInstrument:
+ return cast(
+ SupportsInstrument,
+ CeleryInstrumentor(tracer_provider=get_tracer_provider(), meter_provider=get_meter_provider()),
+ )
+
+
+def _new_httpx_instrumentor() -> SupportsInstrument:
+ return cast(SupportsInstrument, HTTPXClientInstrumentor())
+
+
+def _new_redis_instrumentor() -> SupportsInstrument:
+ return cast(SupportsInstrument, RedisInstrumentor())
+
+
+def _new_sqlalchemy_instrumentor() -> SupportsInstrument:
+ return cast(SupportsInstrument, SQLAlchemyInstrumentor())
+
+
class ExceptionLoggingHandler(logging.Handler):
"""
Handler that records exceptions to the current OpenTelemetry span.
@@ -97,7 +131,7 @@ def init_flask_instrumentor(app: DifyApp) -> None:
from opentelemetry.instrumentation.flask import FlaskInstrumentor
- instrumentor = FlaskInstrumentor()
+ instrumentor = cast(SupportsFlaskInstrumentor, FlaskInstrumentor())
if dify_config.DEBUG:
logger.info("Initializing Flask instrumentor")
instrumentor.instrument_app(app, response_hook=response_hook)
@@ -106,21 +140,21 @@ def init_flask_instrumentor(app: DifyApp) -> None:
def init_sqlalchemy_instrumentor(app: DifyApp) -> None:
with app.app_context():
engines = list(app.extensions["sqlalchemy"].engines.values())
- SQLAlchemyInstrumentor().instrument(enable_commenter=True, engines=engines)
+ _new_sqlalchemy_instrumentor().instrument(enable_commenter=True, engines=engines)
def init_redis_instrumentor() -> None:
- RedisInstrumentor().instrument()
+ _new_redis_instrumentor().instrument()
def init_httpx_instrumentor() -> None:
- HTTPXClientInstrumentor().instrument()
+ _new_httpx_instrumentor().instrument()
def init_instruments(app: DifyApp) -> None:
if not is_celery_worker():
init_flask_instrumentor(app)
- CeleryInstrumentor(tracer_provider=get_tracer_provider(), meter_provider=get_meter_provider()).instrument()
+ _new_celery_instrumentor().instrument()
instrument_exception_logging()
init_sqlalchemy_instrumentor(app)
diff --git a/api/extensions/storage/clickzetta_volume/file_lifecycle.py b/api/extensions/storage/clickzetta_volume/file_lifecycle.py
index 1d9911465b..483bd6bbf6 100644
--- a/api/extensions/storage/clickzetta_volume/file_lifecycle.py
+++ b/api/extensions/storage/clickzetta_volume/file_lifecycle.py
@@ -15,8 +15,12 @@ from datetime import datetime
from enum import StrEnum, auto
from typing import Any
+from pydantic import TypeAdapter
+
logger = logging.getLogger(__name__)
+_metadata_adapter: TypeAdapter[dict[str, Any]] = TypeAdapter(dict[str, Any])
+
class FileStatus(StrEnum):
"""File status enumeration"""
@@ -455,8 +459,8 @@ class FileLifecycleManager:
try:
if self._storage.exists(self._metadata_file):
metadata_content = self._storage.load_once(self._metadata_file)
- result = json.loads(metadata_content.decode("utf-8"))
- return dict(result) if result else {}
+ result = _metadata_adapter.validate_json(metadata_content)
+ return result or {}
else:
return {}
except Exception as e:
diff --git a/api/extensions/storage/google_cloud_storage.py b/api/extensions/storage/google_cloud_storage.py
index 4ad7e2d159..00f7289aa4 100644
--- a/api/extensions/storage/google_cloud_storage.py
+++ b/api/extensions/storage/google_cloud_storage.py
@@ -1,13 +1,16 @@
import base64
import io
-import json
from collections.abc import Generator
+from typing import Any
from google.cloud import storage as google_cloud_storage # type: ignore
+from pydantic import TypeAdapter
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
+_service_account_adapter: TypeAdapter[dict[str, Any]] = TypeAdapter(dict[str, Any])
+
class GoogleCloudStorage(BaseStorage):
"""Implementation for Google Cloud storage."""
@@ -21,7 +24,7 @@ class GoogleCloudStorage(BaseStorage):
if service_account_json_str:
service_account_json = base64.b64decode(service_account_json_str).decode("utf-8")
# convert str to object
- service_account_obj = json.loads(service_account_json)
+ service_account_obj = _service_account_adapter.validate_json(service_account_json)
self.client = google_cloud_storage.Client.from_service_account_info(service_account_obj)
else:
self.client = google_cloud_storage.Client()
diff --git a/api/fields/conversation_fields.py b/api/fields/conversation_fields.py
index 30d02aeedc..b1d1b4caac 100644
--- a/api/fields/conversation_fields.py
+++ b/api/fields/conversation_fields.py
@@ -1,12 +1,12 @@
from __future__ import annotations
from datetime import datetime
-from typing import Any, TypeAlias
+from typing import Any
from graphon.file import File
from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
-JSONValue: TypeAlias = Any
+type JSONValue = Any
class ResponseModel(BaseModel):
diff --git a/api/fields/message_fields.py b/api/fields/message_fields.py
index d982c31aee..a063a643b4 100644
--- a/api/fields/message_fields.py
+++ b/api/fields/message_fields.py
@@ -1,7 +1,6 @@
from __future__ import annotations
from datetime import datetime
-from typing import TypeAlias
from uuid import uuid4
from graphon.file import File
@@ -10,7 +9,7 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator
from core.entities.execution_extra_content import ExecutionExtraContentDomainModel
from fields.conversation_fields import AgentThought, JSONValue, MessageFile
-JSONValueType: TypeAlias = JSONValue
+type JSONValueType = JSONValue
class ResponseModel(BaseModel):
diff --git a/api/libs/collection_utils.py b/api/libs/collection_utils.py
index f97308ca44..7054fe401e 100644
--- a/api/libs/collection_utils.py
+++ b/api/libs/collection_utils.py
@@ -1,9 +1,12 @@
-def convert_to_lower_and_upper_set(inputs: list[str] | set[str]) -> set[str]:
+from collections.abc import Collection
+
+
+def convert_to_lower_and_upper_set(inputs: Collection[str]) -> set[str]:
"""
- Convert a list or set of strings to a set containing both lower and upper case versions of each string.
+ Convert a collection of strings to a set containing both lower and upper case versions of each string.
Args:
- inputs (list[str] | set[str]): A list or set of strings to be converted.
+ inputs (Collection[str]): A collection of strings to be converted.
Returns:
set[str]: A set containing both lower and upper case versions of each string.
diff --git a/api/libs/flask_utils.py b/api/libs/flask_utils.py
index e45c8fe319..52fc787c79 100644
--- a/api/libs/flask_utils.py
+++ b/api/libs/flask_utils.py
@@ -1,12 +1,10 @@
import contextvars
from collections.abc import Iterator
from contextlib import contextmanager
-from typing import TYPE_CHECKING, TypeVar
+from typing import TYPE_CHECKING
from flask import Flask, g
-T = TypeVar("T")
-
if TYPE_CHECKING:
from models import Account, EndUser
diff --git a/api/libs/login.py b/api/libs/login.py
index dce332b01d..68a2050747 100644
--- a/api/libs/login.py
+++ b/api/libs/login.py
@@ -42,13 +42,7 @@ def current_account_with_tenant():
return user, user.current_tenant_id
-from typing import ParamSpec, TypeVar
-
-P = ParamSpec("P")
-R = TypeVar("R")
-
-
-def login_required(func: Callable[P, R]) -> Callable[P, R | ResponseReturnValue]:
+def login_required[**P, R](func: Callable[P, R]) -> Callable[P, R | ResponseReturnValue]:
"""
If you decorate a view with this, it will ensure that the current user is
logged in and authenticated before calling the actual view. (If they are
diff --git a/api/libs/oauth.py b/api/libs/oauth.py
index 76e741301c..3daaa038e0 100644
--- a/api/libs/oauth.py
+++ b/api/libs/oauth.py
@@ -1,24 +1,26 @@
import logging
-import sys
import urllib.parse
from dataclasses import dataclass
-from typing import NotRequired
+from typing import NotRequired, TypedDict
import httpx
from pydantic import TypeAdapter, ValidationError
-if sys.version_info >= (3, 12):
- from typing import TypedDict
-else:
- from typing_extensions import TypedDict
+from core.helper.http_client_pooling import get_pooled_http_client
logger = logging.getLogger(__name__)
-JsonObject = dict[str, object]
-JsonObjectList = list[JsonObject]
+type JsonObject = dict[str, object]
+type JsonObjectList = list[JsonObject]
-JSON_OBJECT_ADAPTER = TypeAdapter(JsonObject)
-JSON_OBJECT_LIST_ADAPTER = TypeAdapter(JsonObjectList)
+JSON_OBJECT_ADAPTER: TypeAdapter[JsonObject] = TypeAdapter(JsonObject)
+JSON_OBJECT_LIST_ADAPTER: TypeAdapter[JsonObjectList] = TypeAdapter(JsonObjectList)
+
+# Reuse a pooled httpx.Client for OAuth flows (public endpoints, no SSRF proxy).
+_http_client: httpx.Client = get_pooled_http_client(
+ "oauth:default",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100)),
+)
class AccessTokenResponse(TypedDict, total=False):
@@ -115,7 +117,7 @@ class GitHubOAuth(OAuth):
"redirect_uri": self.redirect_uri,
}
headers = {"Accept": "application/json"}
- response = httpx.post(self._TOKEN_URL, data=data, headers=headers)
+ response = _http_client.post(self._TOKEN_URL, data=data, headers=headers)
response_json = ACCESS_TOKEN_RESPONSE_ADAPTER.validate_python(_json_object(response))
access_token = response_json.get("access_token")
@@ -127,7 +129,7 @@ class GitHubOAuth(OAuth):
def get_raw_user_info(self, token: str) -> JsonObject:
headers = {"Authorization": f"token {token}"}
- response = httpx.get(self._USER_INFO_URL, headers=headers)
+ response = _http_client.get(self._USER_INFO_URL, headers=headers)
response.raise_for_status()
user_info = GITHUB_RAW_USER_INFO_ADAPTER.validate_python(_json_object(response))
@@ -147,7 +149,7 @@ class GitHubOAuth(OAuth):
Returns an empty string when no usable email is found.
"""
try:
- email_response = httpx.get(GitHubOAuth._EMAIL_INFO_URL, headers=headers)
+ email_response = _http_client.get(GitHubOAuth._EMAIL_INFO_URL, headers=headers)
email_response.raise_for_status()
email_records = GITHUB_EMAIL_RECORDS_ADAPTER.validate_python(_json_list(email_response))
except (httpx.HTTPStatusError, ValidationError):
@@ -204,7 +206,7 @@ class GoogleOAuth(OAuth):
"redirect_uri": self.redirect_uri,
}
headers = {"Accept": "application/json"}
- response = httpx.post(self._TOKEN_URL, data=data, headers=headers)
+ response = _http_client.post(self._TOKEN_URL, data=data, headers=headers)
response_json = ACCESS_TOKEN_RESPONSE_ADAPTER.validate_python(_json_object(response))
access_token = response_json.get("access_token")
@@ -216,7 +218,7 @@ class GoogleOAuth(OAuth):
def get_raw_user_info(self, token: str) -> JsonObject:
headers = {"Authorization": f"Bearer {token}"}
- response = httpx.get(self._USER_INFO_URL, headers=headers)
+ response = _http_client.get(self._USER_INFO_URL, headers=headers)
response.raise_for_status()
return _json_object(response)
diff --git a/api/libs/oauth_data_source.py b/api/libs/oauth_data_source.py
index d5dc35ac97..9b53918f24 100644
--- a/api/libs/oauth_data_source.py
+++ b/api/libs/oauth_data_source.py
@@ -1,21 +1,16 @@
-import sys
import urllib.parse
-from typing import Any, Literal
+from typing import Any, Literal, TypedDict
import httpx
from flask_login import current_user
from pydantic import TypeAdapter
from sqlalchemy import select
+from core.helper.http_client_pooling import get_pooled_http_client
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.source import DataSourceOauthBinding
-if sys.version_info >= (3, 12):
- from typing import TypedDict
-else:
- from typing_extensions import TypedDict
-
class NotionPageSummary(TypedDict):
page_id: str
@@ -38,6 +33,13 @@ NOTION_SOURCE_INFO_ADAPTER = TypeAdapter(NotionSourceInfo)
NOTION_PAGE_SUMMARY_ADAPTER = TypeAdapter(NotionPageSummary)
+# Reuse a small pooled client for OAuth data source flows.
+_http_client: httpx.Client = get_pooled_http_client(
+ "oauth:notion",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100)),
+)
+
+
class OAuthDataSource:
client_id: str
client_secret: str
@@ -75,7 +77,7 @@ class NotionOAuth(OAuthDataSource):
data = {"code": code, "grant_type": "authorization_code", "redirect_uri": self.redirect_uri}
headers = {"Accept": "application/json"}
auth = (self.client_id, self.client_secret)
- response = httpx.post(self._TOKEN_URL, data=data, auth=auth, headers=headers)
+ response = _http_client.post(self._TOKEN_URL, data=data, auth=auth, headers=headers)
response_json = response.json()
access_token = response_json.get("access_token")
@@ -268,7 +270,7 @@ class NotionOAuth(OAuthDataSource):
"Notion-Version": "2022-06-28",
}
- response = httpx.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers)
+ response = _http_client.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers)
response_json = response.json()
results.extend(response_json.get("results", []))
@@ -283,7 +285,7 @@ class NotionOAuth(OAuthDataSource):
"Authorization": f"Bearer {access_token}",
"Notion-Version": "2022-06-28",
}
- response = httpx.get(url=f"{self._NOTION_BLOCK_SEARCH}/{block_id}", headers=headers)
+ response = _http_client.get(url=f"{self._NOTION_BLOCK_SEARCH}/{block_id}", headers=headers)
response_json = response.json()
if response.status_code != 200:
message = response_json.get("message", "unknown error")
@@ -299,7 +301,7 @@ class NotionOAuth(OAuthDataSource):
"Authorization": f"Bearer {access_token}",
"Notion-Version": "2022-06-28",
}
- response = httpx.get(url=self._NOTION_BOT_USER, headers=headers)
+ response = _http_client.get(url=self._NOTION_BOT_USER, headers=headers)
response_json = response.json()
if "object" in response_json and response_json["object"] == "user":
user_type = response_json["type"]
@@ -323,7 +325,7 @@ class NotionOAuth(OAuthDataSource):
"Authorization": f"Bearer {access_token}",
"Notion-Version": "2022-06-28",
}
- response = httpx.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers)
+ response = _http_client.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers)
response_json = response.json()
results.extend(response_json.get("results", []))
diff --git a/api/models/enums.py b/api/models/enums.py
index bf2e927f00..f13fa448db 100644
--- a/api/models/enums.py
+++ b/api/models/enums.py
@@ -113,6 +113,7 @@ class DataSourceType(StrEnum):
WEBSITE_CRAWL = "website_crawl"
LOCAL_FILE = "local_file"
ONLINE_DOCUMENT = "online_document"
+ ONLINE_DRIVE = "online_drive"
class ProcessRuleMode(StrEnum):
diff --git a/api/models/model.py b/api/models/model.py
index 066d2acdce..1d73aadf09 100644
--- a/api/models/model.py
+++ b/api/models/model.py
@@ -8,7 +8,7 @@ from datetime import datetime
from decimal import Decimal
from enum import StrEnum, auto
from functools import lru_cache
-from typing import TYPE_CHECKING, Any, Literal, NotRequired, cast
+from typing import TYPE_CHECKING, Any, Literal, NotRequired, TypedDict, cast
from uuid import uuid4
import sqlalchemy as sa
@@ -19,7 +19,6 @@ from graphon.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType
from graphon.file import helpers as file_helpers
from sqlalchemy import BigInteger, Float, Index, PrimaryKeyConstraint, String, exists, func, select, text
from sqlalchemy.orm import Mapped, Session, mapped_column
-from typing_extensions import TypedDict
from configs import dify_config
from constants import DEFAULT_FILE_NUMBER_LIMITS
diff --git a/api/models/provider.py b/api/models/provider.py
index afeee20b1e..8270961b31 100644
--- a/api/models/provider.py
+++ b/api/models/provider.py
@@ -6,6 +6,7 @@ from functools import cached_property
from uuid import uuid4
import sqlalchemy as sa
+from graphon.model_runtime.entities.model_entities import ModelType
from sqlalchemy import DateTime, String, func, select, text
from sqlalchemy.orm import Mapped, mapped_column
@@ -13,7 +14,7 @@ from libs.uuid_utils import uuidv7
from .base import TypeBase
from .engine import db
-from .enums import CredentialSourceType, PaymentStatus
+from .enums import CredentialSourceType, PaymentStatus, ProviderQuotaType
from .types import EnumText, LongText, StringUUID
@@ -29,24 +30,6 @@ class ProviderType(StrEnum):
raise ValueError(f"No matching enum found for value '{value}'")
-class ProviderQuotaType(StrEnum):
- PAID = auto()
- """hosted paid quota"""
-
- FREE = auto()
- """third-party free quota"""
-
- TRIAL = auto()
- """hosted trial quota"""
-
- @staticmethod
- def value_of(value: str) -> ProviderQuotaType:
- for member in ProviderQuotaType:
- if member.value == value:
- return member
- raise ValueError(f"No matching enum found for value '{value}'")
-
-
class Provider(TypeBase):
"""
Provider model representing the API providers and their configurations.
@@ -77,7 +60,9 @@ class Provider(TypeBase):
last_used: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, init=False)
credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
- quota_type: Mapped[str | None] = mapped_column(String(40), nullable=True, server_default=text("''"), default="")
+ quota_type: Mapped[ProviderQuotaType | None] = mapped_column(
+ EnumText(ProviderQuotaType, length=40), nullable=True, server_default=text("''"), default=None
+ )
quota_limit: Mapped[int | None] = mapped_column(sa.BigInteger, nullable=True, default=None)
quota_used: Mapped[int | None] = mapped_column(sa.BigInteger, nullable=True, default=0)
@@ -147,7 +132,7 @@ class ProviderModel(TypeBase):
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
- model_type: Mapped[str] = mapped_column(String(40), nullable=False)
+ model_type: Mapped[ModelType] = mapped_column(EnumText(ModelType, length=40), nullable=False)
credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
is_valid: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("false"), default=False)
created_at: Mapped[datetime] = mapped_column(
@@ -189,7 +174,7 @@ class TenantDefaultModel(TypeBase):
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
- model_type: Mapped[str] = mapped_column(String(40), nullable=False)
+ model_type: Mapped[ModelType] = mapped_column(EnumText(ModelType, length=40), nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
@@ -269,7 +254,7 @@ class ProviderModelSetting(TypeBase):
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
- model_type: Mapped[str] = mapped_column(String(40), nullable=False)
+ model_type: Mapped[ModelType] = mapped_column(EnumText(ModelType, length=40), nullable=False)
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("true"), default=True)
load_balancing_enabled: Mapped[bool] = mapped_column(
sa.Boolean, nullable=False, server_default=text("false"), default=False
@@ -299,7 +284,7 @@ class LoadBalancingModelConfig(TypeBase):
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
- model_type: Mapped[str] = mapped_column(String(40), nullable=False)
+ model_type: Mapped[ModelType] = mapped_column(EnumText(ModelType, length=40), nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
encrypted_config: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
@@ -364,7 +349,7 @@ class ProviderModelCredential(TypeBase):
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
- model_type: Mapped[str] = mapped_column(String(40), nullable=False)
+ model_type: Mapped[ModelType] = mapped_column(EnumText(ModelType, length=40), nullable=False)
credential_name: Mapped[str] = mapped_column(String(255), nullable=False)
encrypted_config: Mapped[str] = mapped_column(LongText, nullable=False)
created_at: Mapped[datetime] = mapped_column(
diff --git a/api/models/types.py b/api/models/types.py
index f8369dab9e..9ab694759f 100644
--- a/api/models/types.py
+++ b/api/models/types.py
@@ -1,6 +1,6 @@
import enum
import uuid
-from typing import Any, Generic, TypeVar
+from typing import Any
import sqlalchemy as sa
from sqlalchemy import CHAR, TEXT, VARCHAR, LargeBinary, TypeDecorator
@@ -110,17 +110,14 @@ class AdjustedJSON(TypeDecorator[dict | list | None]):
return value
-_E = TypeVar("_E", bound=enum.StrEnum)
-
-
-class EnumText(TypeDecorator[_E | None], Generic[_E]):
+class EnumText[T: enum.StrEnum](TypeDecorator[T | None]):
impl = VARCHAR
cache_ok = True
_length: int
- _enum_class: type[_E]
+ _enum_class: type[T]
- def __init__(self, enum_class: type[_E], length: int | None = None):
+ def __init__(self, enum_class: type[T], length: int | None = None):
self._enum_class = enum_class
max_enum_value_len = max(len(e.value) for e in enum_class)
if length is not None:
@@ -131,25 +128,25 @@ class EnumText(TypeDecorator[_E | None], Generic[_E]):
# leave some rooms for future longer enum values.
self._length = max(max_enum_value_len, 20)
- def process_bind_param(self, value: _E | str | None, dialect: Dialect) -> str | None:
+ def process_bind_param(self, value: T | str | None, dialect: Dialect) -> str | None:
if value is None:
return value
if isinstance(value, self._enum_class):
return value.value
- # Since _E is bound to StrEnum which inherits from str, at this point value must be str
+ # Since T is bound to StrEnum which inherits from str, at this point value must be str
self._enum_class(value)
return value
def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
return dialect.type_descriptor(VARCHAR(self._length))
- def process_result_value(self, value: str | None, dialect: Dialect) -> _E | None:
- if value is None:
- return value
+ def process_result_value(self, value: str | None, dialect: Dialect) -> T | None:
+ if value is None or value == "":
+ return None
# Type annotation guarantees value is str at this point
return self._enum_class(value)
- def compare_values(self, x: _E | None, y: _E | None) -> bool:
+ def compare_values(self, x: T | None, y: T | None) -> bool:
if x is None or y is None:
return x is y
return x == y
diff --git a/api/models/workflow.py b/api/models/workflow.py
index f8868cb73c..1063016370 100644
--- a/api/models/workflow.py
+++ b/api/models/workflow.py
@@ -1386,7 +1386,7 @@ class ConversationVariable(TypeBase):
# Only `sys.query` and `sys.files` could be modified.
-_EDITABLE_SYSTEM_VARIABLE = frozenset(["query", "files"])
+_EDITABLE_SYSTEM_VARIABLE = frozenset(("query", "files"))
class WorkflowDraftVariable(Base):
diff --git a/api/pyproject.toml b/api/pyproject.toml
index a09b474bf5..863b61cad1 100644
--- a/api/pyproject.toml
+++ b/api/pyproject.toml
@@ -1,7 +1,7 @@
[project]
name = "dify-api"
version = "1.13.3"
-requires-python = ">=3.11,<3.13"
+requires-python = "~=3.12.0"
dependencies = [
"aliyun-log-python-sdk~=0.9.37",
@@ -232,5 +232,5 @@ vdb = [
project-includes = ["."]
project-excludes = [".venv", "migrations/"]
python-platform = "linux"
-python-version = "3.11.0"
+python-version = "3.12.0"
infer-with-first-use = false
diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json
index 48271aab61..a8b884ea81 100644
--- a/api/pyrightconfig.json
+++ b/api/pyrightconfig.json
@@ -50,6 +50,6 @@
"reportUntypedFunctionDecorator": "hint",
"reportUnnecessaryTypeIgnoreComment": "hint",
"reportAttributeAccessIssue": "hint",
- "pythonVersion": "3.11",
+ "pythonVersion": "3.12",
"pythonPlatform": "All"
-}
\ No newline at end of file
+}
diff --git a/api/services/account_service.py b/api/services/account_service.py
index cc8ef08857..29b1444730 100644
--- a/api/services/account_service.py
+++ b/api/services/account_service.py
@@ -5,12 +5,11 @@ import secrets
import uuid
from datetime import UTC, datetime, timedelta
from hashlib import sha256
-from typing import Any, cast
+from typing import Any, TypedDict, cast
from pydantic import BaseModel, TypeAdapter
from sqlalchemy import func, select
from sqlalchemy.orm import Session
-from typing_extensions import TypedDict
class InvitationData(TypedDict):
diff --git a/api/services/agent_service.py b/api/services/agent_service.py
index 2b8a3ee594..d8f4e11e75 100644
--- a/api/services/agent_service.py
+++ b/api/services/agent_service.py
@@ -2,6 +2,7 @@ import threading
from typing import Any
import pytz
+from sqlalchemy import select
import contexts
from core.app.app_config.easy_ui_based_app.agent.manager import AgentConfigManager
@@ -23,25 +24,25 @@ class AgentService:
contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock())
- conversation: Conversation | None = (
- db.session.query(Conversation)
+ conversation: Conversation | None = db.session.scalar(
+ select(Conversation)
.where(
Conversation.id == conversation_id,
Conversation.app_id == app_model.id,
)
- .first()
+ .limit(1)
)
if not conversation:
raise ValueError(f"Conversation not found: {conversation_id}")
- message: Message | None = (
- db.session.query(Message)
+ message: Message | None = db.session.scalar(
+ select(Message)
.where(
Message.id == message_id,
Message.conversation_id == conversation_id,
)
- .first()
+ .limit(1)
)
if not message:
@@ -51,16 +52,11 @@ class AgentService:
if conversation.from_end_user_id:
# only select name field
- executor = (
- db.session.query(EndUser, EndUser.name).where(EndUser.id == conversation.from_end_user_id).first()
- )
+ executor_name = db.session.scalar(select(EndUser.name).where(EndUser.id == conversation.from_end_user_id))
else:
- executor = db.session.query(Account, Account.name).where(Account.id == conversation.from_account_id).first()
+ executor_name = db.session.scalar(select(Account.name).where(Account.id == conversation.from_account_id))
- if executor:
- executor = executor.name
- else:
- executor = "Unknown"
+ executor = executor_name or "Unknown"
assert isinstance(current_user, Account)
assert current_user.timezone is not None
timezone = pytz.timezone(current_user.timezone)
diff --git a/api/services/api_based_extension_service.py b/api/services/api_based_extension_service.py
index 3a0ed41be0..fdb377694b 100644
--- a/api/services/api_based_extension_service.py
+++ b/api/services/api_based_extension_service.py
@@ -1,3 +1,5 @@
+from sqlalchemy import select
+
from core.extension.api_based_extension_requestor import APIBasedExtensionRequestor
from core.helper.encrypter import decrypt_token, encrypt_token
from extensions.ext_database import db
@@ -7,11 +9,12 @@ from models.api_based_extension import APIBasedExtension, APIBasedExtensionPoint
class APIBasedExtensionService:
@staticmethod
def get_all_by_tenant_id(tenant_id: str) -> list[APIBasedExtension]:
- extension_list = (
- db.session.query(APIBasedExtension)
- .filter_by(tenant_id=tenant_id)
- .order_by(APIBasedExtension.created_at.desc())
- .all()
+ extension_list = list(
+ db.session.scalars(
+ select(APIBasedExtension)
+ .where(APIBasedExtension.tenant_id == tenant_id)
+ .order_by(APIBasedExtension.created_at.desc())
+ ).all()
)
for extension in extension_list:
@@ -36,11 +39,10 @@ class APIBasedExtensionService:
@staticmethod
def get_with_tenant_id(tenant_id: str, api_based_extension_id: str) -> APIBasedExtension:
- extension = (
- db.session.query(APIBasedExtension)
- .filter_by(tenant_id=tenant_id)
- .filter_by(id=api_based_extension_id)
- .first()
+ extension = db.session.scalar(
+ select(APIBasedExtension)
+ .where(APIBasedExtension.tenant_id == tenant_id, APIBasedExtension.id == api_based_extension_id)
+ .limit(1)
)
if not extension:
@@ -58,23 +60,27 @@ class APIBasedExtensionService:
if not extension_data.id:
# case one: check new data, name must be unique
- is_name_existed = (
- db.session.query(APIBasedExtension)
- .filter_by(tenant_id=extension_data.tenant_id)
- .filter_by(name=extension_data.name)
- .first()
+ is_name_existed = db.session.scalar(
+ select(APIBasedExtension)
+ .where(
+ APIBasedExtension.tenant_id == extension_data.tenant_id,
+ APIBasedExtension.name == extension_data.name,
+ )
+ .limit(1)
)
if is_name_existed:
raise ValueError("name must be unique, it is already existed")
else:
# case two: check existing data, name must be unique
- is_name_existed = (
- db.session.query(APIBasedExtension)
- .filter_by(tenant_id=extension_data.tenant_id)
- .filter_by(name=extension_data.name)
- .where(APIBasedExtension.id != extension_data.id)
- .first()
+ is_name_existed = db.session.scalar(
+ select(APIBasedExtension)
+ .where(
+ APIBasedExtension.tenant_id == extension_data.tenant_id,
+ APIBasedExtension.name == extension_data.name,
+ APIBasedExtension.id != extension_data.id,
+ )
+ .limit(1)
)
if is_name_existed:
diff --git a/api/services/app_service.py b/api/services/app_service.py
index e9aeb6c43d..87d52a3159 100644
--- a/api/services/app_service.py
+++ b/api/services/app_service.py
@@ -6,6 +6,7 @@ import sqlalchemy as sa
from flask_sqlalchemy.pagination import Pagination
from graphon.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
from graphon.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
+from sqlalchemy import select
from configs import dify_config
from constants.model_template import default_app_templates
@@ -433,9 +434,7 @@ class AppService:
meta["tool_icons"][tool_name] = url_prefix + provider_id + "/icon"
elif provider_type == "api":
try:
- provider: ApiToolProvider | None = (
- db.session.query(ApiToolProvider).where(ApiToolProvider.id == provider_id).first()
- )
+ provider: ApiToolProvider | None = db.session.get(ApiToolProvider, provider_id)
if provider is None:
raise ValueError(f"provider not found for tool {tool_name}")
meta["tool_icons"][tool_name] = json.loads(provider.icon)
@@ -451,7 +450,7 @@ class AppService:
:param app_id: app id
:return: app code
"""
- site = db.session.query(Site).where(Site.app_id == app_id).first()
+ site = db.session.scalar(select(Site).where(Site.app_id == app_id).limit(1))
if not site:
raise ValueError(f"App with id {app_id} not found")
return str(site.code)
@@ -463,7 +462,7 @@ class AppService:
:param app_code: app code
:return: app id
"""
- site = db.session.query(Site).where(Site.code == app_code).first()
+ site = db.session.scalar(select(Site).where(Site.code == app_code).limit(1))
if not site:
raise ValueError(f"App with code {app_code} not found")
return str(site.app_id)
diff --git a/api/services/audio_service.py b/api/services/audio_service.py
index 90e72d5f34..1c7027efb4 100644
--- a/api/services/audio_service.py
+++ b/api/services/audio_service.py
@@ -132,7 +132,7 @@ class AudioService:
uuid.UUID(message_id)
except ValueError:
return None
- message = db.session.query(Message).where(Message.id == message_id).first()
+ message = db.session.get(Message, message_id)
if message is None:
return None
if message.answer == "" and message.status in {MessageStatus.NORMAL, MessageStatus.PAUSED}:
diff --git a/api/services/auth/api_key_auth_base.py b/api/services/auth/api_key_auth_base.py
index 2e1b723e82..b255434333 100644
--- a/api/services/auth/api_key_auth_base.py
+++ b/api/services/auth/api_key_auth_base.py
@@ -1,7 +1,5 @@
from abc import ABC, abstractmethod
-from typing import Any
-
-from typing_extensions import TypedDict
+from typing import Any, TypedDict
class AuthCredentials(TypedDict):
diff --git a/api/services/auth/jina.py b/api/services/auth/jina.py
index e5e2319ce1..e63c9a3a4d 100644
--- a/api/services/auth/jina.py
+++ b/api/services/auth/jina.py
@@ -2,8 +2,14 @@ import json
import httpx
+from core.helper.http_client_pooling import get_pooled_http_client
from services.auth.api_key_auth_base import ApiKeyAuthBase, AuthCredentials
+_http_client: httpx.Client = get_pooled_http_client(
+ "auth:jina_standalone",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100)),
+)
+
class JinaAuth(ApiKeyAuthBase):
def __init__(self, credentials: AuthCredentials):
@@ -31,7 +37,7 @@ class JinaAuth(ApiKeyAuthBase):
return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"}
def _post_request(self, url, data, headers):
- return httpx.post(url, headers=headers, json=data)
+ return _http_client.post(url, headers=headers, json=data)
def _handle_error(self, response):
if response.status_code in {402, 409, 500}:
diff --git a/api/services/auth/jina/jina.py b/api/services/auth/jina/jina.py
index e5e2319ce1..8ea0b6cd69 100644
--- a/api/services/auth/jina/jina.py
+++ b/api/services/auth/jina/jina.py
@@ -2,8 +2,14 @@ import json
import httpx
+from core.helper.http_client_pooling import get_pooled_http_client
from services.auth.api_key_auth_base import ApiKeyAuthBase, AuthCredentials
+_http_client: httpx.Client = get_pooled_http_client(
+ "auth:jina",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100)),
+)
+
class JinaAuth(ApiKeyAuthBase):
def __init__(self, credentials: AuthCredentials):
@@ -31,7 +37,7 @@ class JinaAuth(ApiKeyAuthBase):
return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"}
def _post_request(self, url, data, headers):
- return httpx.post(url, headers=headers, json=data)
+ return _http_client.post(url, headers=headers, json=data)
def _handle_error(self, response):
if response.status_code in {402, 409, 500}:
diff --git a/api/services/billing_service.py b/api/services/billing_service.py
index 70d4ce1ee6..3c3e4aa6d2 100644
--- a/api/services/billing_service.py
+++ b/api/services/billing_service.py
@@ -2,14 +2,15 @@ import json
import logging
import os
from collections.abc import Sequence
-from typing import Literal
+from typing import Literal, TypedDict
import httpx
from pydantic import TypeAdapter
+from sqlalchemy import select
from tenacity import retry, retry_if_exception_type, stop_before_delay, wait_fixed
-from typing_extensions import TypedDict
from werkzeug.exceptions import InternalServerError
+from core.helper.http_client_pooling import get_pooled_http_client
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from extensions.ext_redis import redis_client
@@ -18,6 +19,11 @@ from models import Account, TenantAccountJoin, TenantAccountRole
logger = logging.getLogger(__name__)
+_http_client: httpx.Client = get_pooled_http_client(
+ "billing:default",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100)),
+)
+
class SubscriptionPlan(TypedDict):
"""Tenant subscriptionplan information."""
@@ -131,7 +137,7 @@ class BillingService:
headers = {"Content-Type": "application/json", "Billing-Api-Secret-Key": cls.secret_key}
url = f"{cls.base_url}{endpoint}"
- response = httpx.request(method, url, json=json, params=params, headers=headers, follow_redirects=True)
+ response = _http_client.request(method, url, json=json, params=params, headers=headers, follow_redirects=True)
if method == "GET" and response.status_code != httpx.codes.OK:
raise ValueError("Unable to retrieve billing information. Please try again later or contact support.")
if method == "PUT":
@@ -152,10 +158,10 @@ class BillingService:
def is_tenant_owner_or_admin(current_user: Account):
tenant_id = current_user.current_tenant_id
- join: TenantAccountJoin | None = (
- db.session.query(TenantAccountJoin)
+ join: TenantAccountJoin | None = db.session.scalar(
+ select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant_id, TenantAccountJoin.account_id == current_user.id)
- .first()
+ .limit(1)
)
if not join:
diff --git a/api/services/conversation_service.py b/api/services/conversation_service.py
index ba1e7bb826..f5085af59b 100644
--- a/api/services/conversation_service.py
+++ b/api/services/conversation_service.py
@@ -1,7 +1,7 @@
import contextlib
import logging
from collections.abc import Callable, Sequence
-from typing import Any, Union
+from typing import Any
from graphon.variables.types import SegmentType
from sqlalchemy import asc, desc, func, or_, select
@@ -37,7 +37,7 @@ class ConversationService:
*,
session: Session,
app_model: App,
- user: Union[Account, EndUser] | None,
+ user: Account | EndUser | None,
last_id: str | None,
limit: int,
invoke_from: InvokeFrom,
@@ -119,7 +119,7 @@ class ConversationService:
cls,
app_model: App,
conversation_id: str,
- user: Union[Account, EndUser] | None,
+ user: Account | EndUser | None,
name: str | None,
auto_generate: bool,
):
@@ -137,11 +137,11 @@ class ConversationService:
@classmethod
def auto_generate_name(cls, app_model: App, conversation: Conversation):
# get conversation first message
- message = (
- db.session.query(Message)
+ message = db.session.scalar(
+ select(Message)
.where(Message.app_id == app_model.id, Message.conversation_id == conversation.id)
.order_by(Message.created_at.asc())
- .first()
+ .limit(1)
)
if not message:
@@ -159,9 +159,9 @@ class ConversationService:
return conversation
@classmethod
- def get_conversation(cls, app_model: App, conversation_id: str, user: Union[Account, EndUser] | None):
- conversation = (
- db.session.query(Conversation)
+ def get_conversation(cls, app_model: App, conversation_id: str, user: Account | EndUser | None):
+ conversation = db.session.scalar(
+ select(Conversation)
.where(
Conversation.id == conversation_id,
Conversation.app_id == app_model.id,
@@ -170,7 +170,7 @@ class ConversationService:
Conversation.from_account_id == (user.id if isinstance(user, Account) else None),
Conversation.is_deleted == False,
)
- .first()
+ .limit(1)
)
if not conversation:
@@ -179,7 +179,7 @@ class ConversationService:
return conversation
@classmethod
- def delete(cls, app_model: App, conversation_id: str, user: Union[Account, EndUser] | None):
+ def delete(cls, app_model: App, conversation_id: str, user: Account | EndUser | None):
"""
Delete a conversation only if it belongs to the given user and app context.
@@ -209,7 +209,7 @@ class ConversationService:
cls,
app_model: App,
conversation_id: str,
- user: Union[Account, EndUser] | None,
+ user: Account | EndUser | None,
limit: int,
last_id: str | None,
variable_name: str | None = None,
@@ -278,7 +278,7 @@ class ConversationService:
app_model: App,
conversation_id: str,
variable_id: str,
- user: Union[Account, EndUser] | None,
+ user: Account | EndUser | None,
new_value: Any,
):
"""
diff --git a/api/services/credit_pool_service.py b/api/services/credit_pool_service.py
index 2894826935..7826695366 100644
--- a/api/services/credit_pool_service.py
+++ b/api/services/credit_pool_service.py
@@ -1,6 +1,6 @@
import logging
-from sqlalchemy import update
+from sqlalchemy import select, update
from sqlalchemy.orm import Session
from configs import dify_config
@@ -29,13 +29,13 @@ class CreditPoolService:
@classmethod
def get_pool(cls, tenant_id: str, pool_type: str = "trial") -> TenantCreditPool | None:
"""get tenant credit pool"""
- return (
- db.session.query(TenantCreditPool)
- .filter_by(
- tenant_id=tenant_id,
- pool_type=pool_type,
+ return db.session.scalar(
+ select(TenantCreditPool)
+ .where(
+ TenantCreditPool.tenant_id == tenant_id,
+ TenantCreditPool.pool_type == pool_type,
)
- .first()
+ .limit(1)
)
@classmethod
diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py
index 83363125c3..53bc51d457 100644
--- a/api/services/dataset_service.py
+++ b/api/services/dataset_service.py
@@ -274,7 +274,9 @@ class DatasetService:
db.session.flush()
if provider == "external" and external_knowledge_api_id:
- external_knowledge_api = ExternalDatasetService.get_external_knowledge_api(external_knowledge_api_id)
+ external_knowledge_api = ExternalDatasetService.get_external_knowledge_api(
+ external_knowledge_api_id, tenant_id
+ )
if not external_knowledge_api:
raise ValueError("External API template not found.")
if external_knowledge_id is None:
diff --git a/api/services/enterprise/account_deletion_sync.py b/api/services/enterprise/account_deletion_sync.py
index c7ff42894d..b5107fb0f6 100644
--- a/api/services/enterprise/account_deletion_sync.py
+++ b/api/services/enterprise/account_deletion_sync.py
@@ -4,6 +4,7 @@ import uuid
from datetime import UTC, datetime
from redis import RedisError
+from sqlalchemy import select
from configs import dify_config
from extensions.ext_database import db
@@ -104,7 +105,9 @@ def sync_account_deletion(account_id: str, *, source: str) -> bool:
return True
# Fetch all workspaces the account belongs to
- workspace_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).all()
+ workspace_joins = db.session.scalars(
+ select(TenantAccountJoin).where(TenantAccountJoin.account_id == account_id)
+ ).all()
# Queue sync task for each workspace
success = True
diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py
index 64852c222f..9a522ece52 100644
--- a/api/services/external_knowledge_service.py
+++ b/api/services/external_knowledge_service.py
@@ -102,9 +102,9 @@ class ExternalDatasetService:
raise ValueError(f"Forbidden: Authorization failed with api_key: {api_key}")
@staticmethod
- def get_external_knowledge_api(external_knowledge_api_id: str) -> ExternalKnowledgeApis:
+ def get_external_knowledge_api(external_knowledge_api_id: str, tenant_id: str) -> ExternalKnowledgeApis:
external_knowledge_api: ExternalKnowledgeApis | None = (
- db.session.query(ExternalKnowledgeApis).filter_by(id=external_knowledge_api_id).first()
+ db.session.query(ExternalKnowledgeApis).filter_by(id=external_knowledge_api_id, tenant_id=tenant_id).first()
)
if external_knowledge_api is None:
raise ValueError("api template not found")
diff --git a/api/services/feedback_service.py b/api/services/feedback_service.py
index e7473d371b..d6c338a830 100644
--- a/api/services/feedback_service.py
+++ b/api/services/feedback_service.py
@@ -4,7 +4,7 @@ import json
from datetime import datetime
from flask import Response
-from sqlalchemy import or_
+from sqlalchemy import or_, select
from extensions.ext_database import db
from models.enums import FeedbackRating
@@ -41,8 +41,8 @@ class FeedbackService:
raise ValueError(f"Unsupported format: {format_type}")
# Build base query
- query = (
- db.session.query(MessageFeedback, Message, Conversation, App, Account)
+ stmt = (
+ select(MessageFeedback, Message, Conversation, App, Account)
.join(Message, MessageFeedback.message_id == Message.id)
.join(Conversation, MessageFeedback.conversation_id == Conversation.id)
.join(App, MessageFeedback.app_id == App.id)
@@ -52,36 +52,36 @@ class FeedbackService:
# Apply filters
if from_source:
- query = query.filter(MessageFeedback.from_source == from_source)
+ stmt = stmt.where(MessageFeedback.from_source == from_source)
if rating:
- query = query.filter(MessageFeedback.rating == rating)
+ stmt = stmt.where(MessageFeedback.rating == rating)
if has_comment is not None:
if has_comment:
- query = query.filter(MessageFeedback.content.isnot(None), MessageFeedback.content != "")
+ stmt = stmt.where(MessageFeedback.content.isnot(None), MessageFeedback.content != "")
else:
- query = query.filter(or_(MessageFeedback.content.is_(None), MessageFeedback.content == ""))
+ stmt = stmt.where(or_(MessageFeedback.content.is_(None), MessageFeedback.content == ""))
if start_date:
try:
start_dt = datetime.strptime(start_date, "%Y-%m-%d")
- query = query.filter(MessageFeedback.created_at >= start_dt)
+ stmt = stmt.where(MessageFeedback.created_at >= start_dt)
except ValueError:
raise ValueError(f"Invalid start_date format: {start_date}. Use YYYY-MM-DD")
if end_date:
try:
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
- query = query.filter(MessageFeedback.created_at <= end_dt)
+ stmt = stmt.where(MessageFeedback.created_at <= end_dt)
except ValueError:
raise ValueError(f"Invalid end_date format: {end_date}. Use YYYY-MM-DD")
# Order by creation date (newest first)
- query = query.order_by(MessageFeedback.created_at.desc())
+ stmt = stmt.order_by(MessageFeedback.created_at.desc())
# Execute query
- results = query.all()
+ results = db.session.execute(stmt).all()
# Prepare data for export
export_data = []
diff --git a/api/services/message_service.py b/api/services/message_service.py
index a04f9cbe01..5b133b0c04 100644
--- a/api/services/message_service.py
+++ b/api/services/message_service.py
@@ -1,8 +1,8 @@
from collections.abc import Sequence
-from typing import Union
from graphon.model_runtime.entities.model_entities import ModelType
from pydantic import TypeAdapter
+from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
@@ -56,7 +56,7 @@ class MessageService:
def pagination_by_first_id(
cls,
app_model: App,
- user: Union[Account, EndUser] | None,
+ user: Account | EndUser | None,
conversation_id: str,
first_id: str | None,
limit: int,
@@ -75,17 +75,15 @@ class MessageService:
fetch_limit = limit + 1
if first_id:
- first_message = (
- db.session.query(Message)
- .where(Message.conversation_id == conversation.id, Message.id == first_id)
- .first()
+ first_message = db.session.scalar(
+ select(Message).where(Message.conversation_id == conversation.id, Message.id == first_id).limit(1)
)
if not first_message:
raise FirstMessageNotExistsError()
- history_messages = (
- db.session.query(Message)
+ history_messages = db.session.scalars(
+ select(Message)
.where(
Message.conversation_id == conversation.id,
Message.created_at < first_message.created_at,
@@ -93,16 +91,14 @@ class MessageService:
)
.order_by(Message.created_at.desc())
.limit(fetch_limit)
- .all()
- )
+ ).all()
else:
- history_messages = (
- db.session.query(Message)
+ history_messages = db.session.scalars(
+ select(Message)
.where(Message.conversation_id == conversation.id)
.order_by(Message.created_at.desc())
.limit(fetch_limit)
- .all()
- )
+ ).all()
has_more = False
if len(history_messages) > limit:
@@ -120,7 +116,7 @@ class MessageService:
def pagination_by_last_id(
cls,
app_model: App,
- user: Union[Account, EndUser] | None,
+ user: Account | EndUser | None,
last_id: str | None,
limit: int,
conversation_id: str | None = None,
@@ -129,7 +125,7 @@ class MessageService:
if not user:
return InfiniteScrollPagination(data=[], limit=limit, has_more=False)
- base_query = db.session.query(Message)
+ stmt = select(Message)
fetch_limit = limit + 1
@@ -138,28 +134,27 @@ class MessageService:
app_model=app_model, user=user, conversation_id=conversation_id
)
- base_query = base_query.where(Message.conversation_id == conversation.id)
+ stmt = stmt.where(Message.conversation_id == conversation.id)
# Check if include_ids is not None and not empty to avoid WHERE false condition
if include_ids is not None:
if len(include_ids) == 0:
return InfiniteScrollPagination(data=[], limit=limit, has_more=False)
- base_query = base_query.where(Message.id.in_(include_ids))
+ stmt = stmt.where(Message.id.in_(include_ids))
if last_id:
- last_message = base_query.where(Message.id == last_id).first()
+ last_message = db.session.scalar(stmt.where(Message.id == last_id).limit(1))
if not last_message:
raise LastMessageNotExistsError()
- history_messages = (
- base_query.where(Message.created_at < last_message.created_at, Message.id != last_message.id)
+ history_messages = db.session.scalars(
+ stmt.where(Message.created_at < last_message.created_at, Message.id != last_message.id)
.order_by(Message.created_at.desc())
.limit(fetch_limit)
- .all()
- )
+ ).all()
else:
- history_messages = base_query.order_by(Message.created_at.desc()).limit(fetch_limit).all()
+ history_messages = db.session.scalars(stmt.order_by(Message.created_at.desc()).limit(fetch_limit)).all()
has_more = False
if len(history_messages) > limit:
@@ -174,7 +169,7 @@ class MessageService:
*,
app_model: App,
message_id: str,
- user: Union[Account, EndUser] | None,
+ user: Account | EndUser | None,
rating: FeedbackRating | None,
content: str | None,
):
@@ -214,21 +209,20 @@ class MessageService:
def get_all_messages_feedbacks(cls, app_model: App, page: int, limit: int):
"""Get all feedbacks of an app"""
offset = (page - 1) * limit
- feedbacks = (
- db.session.query(MessageFeedback)
+ feedbacks = db.session.scalars(
+ select(MessageFeedback)
.where(MessageFeedback.app_id == app_model.id)
.order_by(MessageFeedback.created_at.desc(), MessageFeedback.id.desc())
.limit(limit)
.offset(offset)
- .all()
- )
+ ).all()
return [record.to_dict() for record in feedbacks]
@classmethod
- def get_message(cls, app_model: App, user: Union[Account, EndUser] | None, message_id: str):
- message = (
- db.session.query(Message)
+ def get_message(cls, app_model: App, user: Account | EndUser | None, message_id: str):
+ message = db.session.scalar(
+ select(Message)
.where(
Message.id == message_id,
Message.app_id == app_model.id,
@@ -236,7 +230,7 @@ class MessageService:
Message.from_end_user_id == (user.id if isinstance(user, EndUser) else None),
Message.from_account_id == (user.id if isinstance(user, Account) else None),
)
- .first()
+ .limit(1)
)
if not message:
@@ -246,7 +240,7 @@ class MessageService:
@classmethod
def get_suggested_questions_after_answer(
- cls, app_model: App, user: Union[Account, EndUser] | None, message_id: str, invoke_from: InvokeFrom
+ cls, app_model: App, user: Account | EndUser | None, message_id: str, invoke_from: InvokeFrom
) -> list[str]:
if not user:
raise ValueError("user cannot be None")
@@ -282,10 +276,10 @@ class MessageService:
)
else:
if not conversation.override_model_configs:
- app_model_config = (
- db.session.query(AppModelConfig)
+ app_model_config = db.session.scalar(
+ select(AppModelConfig)
.where(AppModelConfig.id == conversation.app_model_config_id, AppModelConfig.app_id == app_model.id)
- .first()
+ .limit(1)
)
else:
conversation_override_model_configs = _app_model_config_adapter.validate_json(
diff --git a/api/services/metadata_service.py b/api/services/metadata_service.py
index 2f47a647a8..12729278cc 100644
--- a/api/services/metadata_service.py
+++ b/api/services/metadata_service.py
@@ -65,7 +65,7 @@ class MetadataService:
raise ValueError("Metadata name already exists in Built-in fields.")
try:
MetadataService.knowledge_base_metadata_lock_check(dataset_id, None)
- metadata = db.session.query(DatasetMetadata).filter_by(id=metadata_id).first()
+ metadata = db.session.query(DatasetMetadata).filter_by(id=metadata_id, dataset_id=dataset_id).first()
if metadata is None:
raise ValueError("Metadata not found.")
old_name = metadata.name
@@ -101,7 +101,7 @@ class MetadataService:
lock_key = f"dataset_metadata_lock_{dataset_id}"
try:
MetadataService.knowledge_base_metadata_lock_check(dataset_id, None)
- metadata = db.session.query(DatasetMetadata).filter_by(id=metadata_id).first()
+ metadata = db.session.query(DatasetMetadata).filter_by(id=metadata_id, dataset_id=dataset_id).first()
if metadata is None:
raise ValueError("Metadata not found.")
db.session.delete(metadata)
diff --git a/api/services/model_load_balancing_service.py b/api/services/model_load_balancing_service.py
index 25de411e43..bc0bfd215c 100644
--- a/api/services/model_load_balancing_service.py
+++ b/api/services/model_load_balancing_service.py
@@ -110,20 +110,21 @@ class ModelLoadBalancingService:
credential_source_type = CredentialSourceType.CUSTOM_MODEL
# Get load balancing configurations
- load_balancing_configs = (
- db.session.query(LoadBalancingModelConfig)
- .where(
- LoadBalancingModelConfig.tenant_id == tenant_id,
- LoadBalancingModelConfig.provider_name == provider_configuration.provider.provider,
- LoadBalancingModelConfig.model_type == model_type_enum.to_origin_model_type(),
- LoadBalancingModelConfig.model_name == model,
- or_(
- LoadBalancingModelConfig.credential_source_type == credential_source_type,
- LoadBalancingModelConfig.credential_source_type.is_(None),
- ),
- )
- .order_by(LoadBalancingModelConfig.created_at)
- .all()
+ load_balancing_configs = list(
+ db.session.scalars(
+ select(LoadBalancingModelConfig)
+ .where(
+ LoadBalancingModelConfig.tenant_id == tenant_id,
+ LoadBalancingModelConfig.provider_name == provider_configuration.provider.provider,
+ LoadBalancingModelConfig.model_type == model_type_enum,
+ LoadBalancingModelConfig.model_name == model,
+ or_(
+ LoadBalancingModelConfig.credential_source_type == credential_source_type,
+ LoadBalancingModelConfig.credential_source_type.is_(None),
+ ),
+ )
+ .order_by(LoadBalancingModelConfig.created_at)
+ ).all()
)
if provider_configuration.custom_configuration.provider:
@@ -143,7 +144,7 @@ class ModelLoadBalancingService:
load_balancing_configs.insert(0, inherit_config)
else:
# move the inherit configuration to the first
- for i, load_balancing_config in enumerate(load_balancing_configs[:]):
+ for i, load_balancing_config in enumerate(load_balancing_configs.copy()):
if load_balancing_config.name == "__inherit__":
inherit_config = load_balancing_configs.pop(i)
load_balancing_configs.insert(0, inherit_config)
@@ -235,16 +236,16 @@ class ModelLoadBalancingService:
model_type_enum = ModelType.value_of(model_type)
# Get load balancing configurations
- load_balancing_model_config = (
- db.session.query(LoadBalancingModelConfig)
+ load_balancing_model_config = db.session.scalar(
+ select(LoadBalancingModelConfig)
.where(
LoadBalancingModelConfig.tenant_id == tenant_id,
LoadBalancingModelConfig.provider_name == provider_configuration.provider.provider,
- LoadBalancingModelConfig.model_type == model_type_enum.to_origin_model_type(),
+ LoadBalancingModelConfig.model_type == model_type_enum,
LoadBalancingModelConfig.model_name == model,
LoadBalancingModelConfig.id == config_id,
)
- .first()
+ .limit(1)
)
if not load_balancing_model_config:
@@ -288,7 +289,7 @@ class ModelLoadBalancingService:
inherit_config = LoadBalancingModelConfig(
tenant_id=tenant_id,
provider_name=provider,
- model_type=model_type.to_origin_model_type(),
+ model_type=model_type,
model_name=model,
name="__inherit__",
)
@@ -328,7 +329,7 @@ class ModelLoadBalancingService:
select(LoadBalancingModelConfig).where(
LoadBalancingModelConfig.tenant_id == tenant_id,
LoadBalancingModelConfig.provider_name == provider_configuration.provider.provider,
- LoadBalancingModelConfig.model_type == model_type_enum.to_origin_model_type(),
+ LoadBalancingModelConfig.model_type == model_type_enum,
LoadBalancingModelConfig.model_name == model,
)
).all()
@@ -351,26 +352,26 @@ class ModelLoadBalancingService:
if credential_id:
if config_from == "predefined-model":
- credential_record = (
- db.session.query(ProviderCredential)
- .filter_by(
- id=credential_id,
- tenant_id=tenant_id,
- provider_name=provider_configuration.provider.provider,
+ credential_record = db.session.scalar(
+ select(ProviderCredential)
+ .where(
+ ProviderCredential.id == credential_id,
+ ProviderCredential.tenant_id == tenant_id,
+ ProviderCredential.provider_name == provider_configuration.provider.provider,
)
- .first()
+ .limit(1)
)
else:
- credential_record = (
- db.session.query(ProviderModelCredential)
- .filter_by(
- id=credential_id,
- tenant_id=tenant_id,
- provider_name=provider_configuration.provider.provider,
- model_name=model,
- model_type=model_type_enum.to_origin_model_type(),
+ credential_record = db.session.scalar(
+ select(ProviderModelCredential)
+ .where(
+ ProviderModelCredential.id == credential_id,
+ ProviderModelCredential.tenant_id == tenant_id,
+ ProviderModelCredential.provider_name == provider_configuration.provider.provider,
+ ProviderModelCredential.model_name == model,
+ ProviderModelCredential.model_type == model_type_enum,
)
- .first()
+ .limit(1)
)
if not credential_record:
raise ValueError(f"Provider credential with id {credential_id} not found")
@@ -432,7 +433,7 @@ class ModelLoadBalancingService:
load_balancing_model_config = LoadBalancingModelConfig(
tenant_id=tenant_id,
provider_name=provider_configuration.provider.provider,
- model_type=model_type_enum.to_origin_model_type(),
+ model_type=model_type_enum,
model_name=model,
name=credential_record.credential_name,
encrypted_config=credential_record.encrypted_config,
@@ -460,7 +461,7 @@ class ModelLoadBalancingService:
load_balancing_model_config = LoadBalancingModelConfig(
tenant_id=tenant_id,
provider_name=provider_configuration.provider.provider,
- model_type=model_type_enum.to_origin_model_type(),
+ model_type=model_type_enum,
model_name=model,
name=name,
encrypted_config=json.dumps(credentials),
@@ -510,16 +511,16 @@ class ModelLoadBalancingService:
load_balancing_model_config = None
if config_id:
# Get load balancing config
- load_balancing_model_config = (
- db.session.query(LoadBalancingModelConfig)
+ load_balancing_model_config = db.session.scalar(
+ select(LoadBalancingModelConfig)
.where(
LoadBalancingModelConfig.tenant_id == tenant_id,
LoadBalancingModelConfig.provider_name == provider,
- LoadBalancingModelConfig.model_type == model_type_enum.to_origin_model_type(),
+ LoadBalancingModelConfig.model_type == model_type_enum,
LoadBalancingModelConfig.model_name == model,
LoadBalancingModelConfig.id == config_id,
)
- .first()
+ .limit(1)
)
if not load_balancing_model_config:
diff --git a/api/services/ops_service.py b/api/services/ops_service.py
index 50ea832085..0db3d3efec 100644
--- a/api/services/ops_service.py
+++ b/api/services/ops_service.py
@@ -1,7 +1,7 @@
-from typing import Any
+from sqlalchemy import select
from core.ops.entities.config_entity import BaseTracingConfig
-from core.ops.ops_trace_manager import OpsTraceManager, provider_config_map
+from core.ops.ops_trace_manager import OpsTraceManager, TracingProviderConfigEntry, provider_config_map
from extensions.ext_database import db
from models.model import App, TraceAppConfig
@@ -15,17 +15,17 @@ class OpsService:
:param tracing_provider: tracing provider
:return:
"""
- trace_config_data: TraceAppConfig | None = (
- db.session.query(TraceAppConfig)
+ trace_config_data: TraceAppConfig | None = db.session.scalar(
+ select(TraceAppConfig)
.where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider)
- .first()
+ .limit(1)
)
if not trace_config_data:
return None
# decrypt_token and obfuscated_token
- app = db.session.query(App).where(App.id == app_id).first()
+ app = db.session.get(App, app_id)
if not app:
return None
tenant_id = app.tenant_id
@@ -148,7 +148,7 @@ class OpsService:
except KeyError:
return {"error": f"Invalid tracing provider: {tracing_provider}"}
- provider_config: dict[str, Any] = provider_config_map[tracing_provider]
+ provider_config: TracingProviderConfigEntry = provider_config_map[tracing_provider]
config_class: type[BaseTracingConfig] = provider_config["config_class"]
other_keys: list[str] = provider_config["other_keys"]
@@ -182,17 +182,17 @@ class OpsService:
project_url = None
# check if trace config already exists
- trace_config_data: TraceAppConfig | None = (
- db.session.query(TraceAppConfig)
+ trace_config_data: TraceAppConfig | None = db.session.scalar(
+ select(TraceAppConfig)
.where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider)
- .first()
+ .limit(1)
)
if trace_config_data:
return None
# get tenant id
- app = db.session.query(App).where(App.id == app_id).first()
+ app = db.session.get(App, app_id)
if not app:
return None
tenant_id = app.tenant_id
@@ -224,17 +224,17 @@ class OpsService:
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
# check if trace config already exists
- current_trace_config = (
- db.session.query(TraceAppConfig)
+ current_trace_config = db.session.scalar(
+ select(TraceAppConfig)
.where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider)
- .first()
+ .limit(1)
)
if not current_trace_config:
return None
# get tenant id
- app = db.session.query(App).where(App.id == app_id).first()
+ app = db.session.get(App, app_id)
if not app:
return None
tenant_id = app.tenant_id
@@ -261,10 +261,10 @@ class OpsService:
:param tracing_provider: tracing provider
:return:
"""
- trace_config = (
- db.session.query(TraceAppConfig)
+ trace_config = db.session.scalar(
+ select(TraceAppConfig)
.where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider)
- .first()
+ .limit(1)
)
if not trace_config:
diff --git a/api/services/plugin/plugin_migration.py b/api/services/plugin/plugin_migration.py
index 1562d4e696..442ccef1da 100644
--- a/api/services/plugin/plugin_migration.py
+++ b/api/services/plugin/plugin_migration.py
@@ -5,7 +5,7 @@ import time
from collections.abc import Mapping, Sequence
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
-from typing import Any
+from typing import Any, TypedDict
from uuid import uuid4
import click
@@ -14,7 +14,6 @@ import tqdm
from flask import Flask, current_app
from pydantic import TypeAdapter
from sqlalchemy.orm import Session
-from typing_extensions import TypedDict
from core.agent.entities import AgentToolEntity
from core.helper import marketplace
diff --git a/api/services/rag_pipeline/pipeline_generate_service.py b/api/services/rag_pipeline/pipeline_generate_service.py
index 07e1b8f20e..10e89b1dba 100644
--- a/api/services/rag_pipeline/pipeline_generate_service.py
+++ b/api/services/rag_pipeline/pipeline_generate_service.py
@@ -110,7 +110,7 @@ class PipelineGenerateService:
Update document status to waiting
:param document_id: document id
"""
- document = db.session.query(Document).where(Document.id == document_id).first()
+ document = db.session.get(Document, document_id)
if document:
document.indexing_status = IndexingStatus.WAITING
db.session.add(document)
diff --git a/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py b/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py
index 4ac2e0792b..2ee871a266 100644
--- a/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py
+++ b/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py
@@ -1,4 +1,5 @@
import yaml
+from sqlalchemy import select
from extensions.ext_database import db
from libs.login import current_account_with_tenant
@@ -32,12 +33,11 @@ class CustomizedPipelineTemplateRetrieval(PipelineTemplateRetrievalBase):
:param language: language
:return:
"""
- pipeline_customized_templates = (
- db.session.query(PipelineCustomizedTemplate)
+ pipeline_customized_templates = db.session.scalars(
+ select(PipelineCustomizedTemplate)
.where(PipelineCustomizedTemplate.tenant_id == tenant_id, PipelineCustomizedTemplate.language == language)
.order_by(PipelineCustomizedTemplate.position.asc(), PipelineCustomizedTemplate.created_at.desc())
- .all()
- )
+ ).all()
recommended_pipelines_results = []
for pipeline_customized_template in pipeline_customized_templates:
recommended_pipeline_result = {
@@ -59,9 +59,7 @@ class CustomizedPipelineTemplateRetrieval(PipelineTemplateRetrievalBase):
:param template_id: Template ID
:return:
"""
- pipeline_template = (
- db.session.query(PipelineCustomizedTemplate).where(PipelineCustomizedTemplate.id == template_id).first()
- )
+ pipeline_template = db.session.get(PipelineCustomizedTemplate, template_id)
if not pipeline_template:
return None
diff --git a/api/services/rag_pipeline/pipeline_template/database/database_retrieval.py b/api/services/rag_pipeline/pipeline_template/database/database_retrieval.py
index 908f9a2684..43b21a7b32 100644
--- a/api/services/rag_pipeline/pipeline_template/database/database_retrieval.py
+++ b/api/services/rag_pipeline/pipeline_template/database/database_retrieval.py
@@ -1,4 +1,5 @@
import yaml
+from sqlalchemy import select
from extensions.ext_database import db
from models.dataset import PipelineBuiltInTemplate
@@ -30,8 +31,10 @@ class DatabasePipelineTemplateRetrieval(PipelineTemplateRetrievalBase):
:return:
"""
- pipeline_built_in_templates: list[PipelineBuiltInTemplate] = (
- db.session.query(PipelineBuiltInTemplate).where(PipelineBuiltInTemplate.language == language).all()
+ pipeline_built_in_templates = list(
+ db.session.scalars(
+ select(PipelineBuiltInTemplate).where(PipelineBuiltInTemplate.language == language)
+ ).all()
)
recommended_pipelines_results = []
@@ -58,9 +61,7 @@ class DatabasePipelineTemplateRetrieval(PipelineTemplateRetrievalBase):
:return:
"""
# is in public recommended list
- pipeline_template = (
- db.session.query(PipelineBuiltInTemplate).where(PipelineBuiltInTemplate.id == template_id).first()
- )
+ pipeline_template = db.session.get(PipelineBuiltInTemplate, template_id)
if not pipeline_template:
return None
diff --git a/api/services/rag_pipeline/rag_pipeline.py b/api/services/rag_pipeline/rag_pipeline.py
index bcf5973d7b..50f34d5a8a 100644
--- a/api/services/rag_pipeline/rag_pipeline.py
+++ b/api/services/rag_pipeline/rag_pipeline.py
@@ -574,7 +574,7 @@ class RagPipelineService:
outputs=workflow_node_execution.outputs,
)
session.commit()
- if workflow_node_execution_db_model is not None:
+ if isinstance(workflow_node_execution_db_model, WorkflowNodeExecutionModel):
enqueue_draft_node_execution_trace(
execution=workflow_node_execution_db_model,
outputs=workflow_node_execution.outputs,
diff --git a/api/services/rag_pipeline/rag_pipeline_transform_service.py b/api/services/rag_pipeline/rag_pipeline_transform_service.py
index 215a8c8528..c3b00fe109 100644
--- a/api/services/rag_pipeline/rag_pipeline_transform_service.py
+++ b/api/services/rag_pipeline/rag_pipeline_transform_service.py
@@ -6,6 +6,7 @@ from uuid import uuid4
import yaml
from flask_login import current_user
+from sqlalchemy import select
from constants import DOCUMENT_EXTENSIONS
from core.plugin.impl.plugin import PluginInstaller
@@ -26,7 +27,7 @@ logger = logging.getLogger(__name__)
class RagPipelineTransformService:
def transform_dataset(self, dataset_id: str):
- dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
+ dataset = db.session.get(Dataset, dataset_id)
if not dataset:
raise ValueError("Dataset not found")
if dataset.pipeline_id and dataset.runtime_mode == DatasetRuntimeMode.RAG_PIPELINE:
@@ -306,7 +307,7 @@ class RagPipelineTransformService:
jina_node_id = "1752491761974"
firecrawl_node_id = "1752565402678"
- documents = db.session.query(Document).where(Document.dataset_id == dataset.id).all()
+ documents = db.session.scalars(select(Document).where(Document.dataset_id == dataset.id)).all()
for document in documents:
data_source_info_dict = document.data_source_info_dict
@@ -316,7 +317,7 @@ class RagPipelineTransformService:
document.data_source_type = DataSourceType.LOCAL_FILE
file_id = data_source_info_dict.get("upload_file_id")
if file_id:
- file = db.session.query(UploadFile).where(UploadFile.id == file_id).first()
+ file = db.session.get(UploadFile, file_id)
if file:
data_source_info = json.dumps(
{
diff --git a/api/services/recommend_app/database/database_retrieval.py b/api/services/recommend_app/database/database_retrieval.py
index d0c49325dc..6fb90d356d 100644
--- a/api/services/recommend_app/database/database_retrieval.py
+++ b/api/services/recommend_app/database/database_retrieval.py
@@ -77,17 +77,15 @@ class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase):
:return:
"""
# is in public recommended list
- recommended_app = (
- db.session.query(RecommendedApp)
- .where(RecommendedApp.is_listed == True, RecommendedApp.app_id == app_id)
- .first()
+ recommended_app = db.session.scalar(
+ select(RecommendedApp).where(RecommendedApp.is_listed == True, RecommendedApp.app_id == app_id).limit(1)
)
if not recommended_app:
return None
# get app detail
- app_model = db.session.query(App).where(App.id == app_id).first()
+ app_model = db.session.get(App, app_id)
if not app_model or not app_model.is_public:
return None
diff --git a/api/services/recommended_app_service.py b/api/services/recommended_app_service.py
index 6b211a5632..9819822103 100644
--- a/api/services/recommended_app_service.py
+++ b/api/services/recommended_app_service.py
@@ -1,3 +1,5 @@
+from sqlalchemy import select
+
from configs import dify_config
from extensions.ext_database import db
from models.model import AccountTrialAppRecord, TrialApp
@@ -27,7 +29,7 @@ class RecommendedAppService:
apps = result["recommended_apps"]
for app in apps:
app_id = app["app_id"]
- trial_app_model = db.session.query(TrialApp).where(TrialApp.app_id == app_id).first()
+ trial_app_model = db.session.scalar(select(TrialApp).where(TrialApp.app_id == app_id).limit(1))
if trial_app_model:
app["can_trial"] = True
else:
@@ -46,7 +48,7 @@ class RecommendedAppService:
result: dict = retrieval_instance.get_recommend_app_detail(app_id)
if FeatureService.get_system_features().enable_trial_app:
app_id = result["id"]
- trial_app_model = db.session.query(TrialApp).where(TrialApp.app_id == app_id).first()
+ trial_app_model = db.session.scalar(select(TrialApp).where(TrialApp.app_id == app_id).limit(1))
if trial_app_model:
result["can_trial"] = True
else:
@@ -60,10 +62,10 @@ class RecommendedAppService:
:param app_id: app id
:return:
"""
- account_trial_app_record = (
- db.session.query(AccountTrialAppRecord)
+ account_trial_app_record = db.session.scalar(
+ select(AccountTrialAppRecord)
.where(AccountTrialAppRecord.app_id == app_id, AccountTrialAppRecord.account_id == account_id)
- .first()
+ .limit(1)
)
if account_trial_app_record:
account_trial_app_record.count += 1
diff --git a/api/services/retention/workflow_run/restore_archived_workflow_run.py b/api/services/retention/workflow_run/restore_archived_workflow_run.py
index c8362738ee..d6ab62a84b 100644
--- a/api/services/retention/workflow_run/restore_archived_workflow_run.py
+++ b/api/services/retention/workflow_run/restore_archived_workflow_run.py
@@ -13,13 +13,12 @@ from collections.abc import Callable
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
from datetime import datetime
-from typing import Any, cast
+from typing import Any, TypedDict, cast
import click
from pydantic import TypeAdapter
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.engine import CursorResult
-from typing_extensions import TypedDict
class _TableInfo(TypedDict, total=False):
diff --git a/api/services/saved_message_service.py b/api/services/saved_message_service.py
index d0f4f27968..90f0137712 100644
--- a/api/services/saved_message_service.py
+++ b/api/services/saved_message_service.py
@@ -1,4 +1,4 @@
-from typing import Union
+from sqlalchemy import select
from extensions.ext_database import db
from libs.infinite_scroll_pagination import InfiniteScrollPagination
@@ -12,20 +12,19 @@ from services.message_service import MessageService
class SavedMessageService:
@classmethod
def pagination_by_last_id(
- cls, app_model: App, user: Union[Account, EndUser] | None, last_id: str | None, limit: int
+ cls, app_model: App, user: Account | EndUser | None, last_id: str | None, limit: int
) -> InfiniteScrollPagination:
if not user:
raise ValueError("User is required")
- saved_messages = (
- db.session.query(SavedMessage)
+ saved_messages = db.session.scalars(
+ select(SavedMessage)
.where(
SavedMessage.app_id == app_model.id,
SavedMessage.created_by_role == ("account" if isinstance(user, Account) else "end_user"),
SavedMessage.created_by == user.id,
)
.order_by(SavedMessage.created_at.desc())
- .all()
- )
+ ).all()
message_ids = [sm.message_id for sm in saved_messages]
return MessageService.pagination_by_last_id(
@@ -33,18 +32,18 @@ class SavedMessageService:
)
@classmethod
- def save(cls, app_model: App, user: Union[Account, EndUser] | None, message_id: str):
+ def save(cls, app_model: App, user: Account | EndUser | None, message_id: str):
if not user:
return
- saved_message = (
- db.session.query(SavedMessage)
+ saved_message = db.session.scalar(
+ select(SavedMessage)
.where(
SavedMessage.app_id == app_model.id,
SavedMessage.message_id == message_id,
SavedMessage.created_by_role == ("account" if isinstance(user, Account) else "end_user"),
SavedMessage.created_by == user.id,
)
- .first()
+ .limit(1)
)
if saved_message:
@@ -63,18 +62,18 @@ class SavedMessageService:
db.session.commit()
@classmethod
- def delete(cls, app_model: App, user: Union[Account, EndUser] | None, message_id: str):
+ def delete(cls, app_model: App, user: Account | EndUser | None, message_id: str):
if not user:
return
- saved_message = (
- db.session.query(SavedMessage)
+ saved_message = db.session.scalar(
+ select(SavedMessage)
.where(
SavedMessage.app_id == app_model.id,
SavedMessage.message_id == message_id,
SavedMessage.created_by_role == ("account" if isinstance(user, Account) else "end_user"),
SavedMessage.created_by == user.id,
)
- .first()
+ .limit(1)
)
if not saved_message:
diff --git a/api/services/tag_service.py b/api/services/tag_service.py
index 70bf7f16f2..194622bd86 100644
--- a/api/services/tag_service.py
+++ b/api/services/tag_service.py
@@ -14,8 +14,8 @@ from models.model import App, Tag, TagBinding
class TagService:
@staticmethod
def get_tags(tag_type: str, current_tenant_id: str, keyword: str | None = None):
- query = (
- db.session.query(Tag.id, Tag.type, Tag.name, func.count(TagBinding.id).label("binding_count"))
+ stmt = (
+ select(Tag.id, Tag.type, Tag.name, func.count(TagBinding.id).label("binding_count"))
.outerjoin(TagBinding, Tag.id == TagBinding.tag_id)
.where(Tag.type == tag_type, Tag.tenant_id == current_tenant_id)
)
@@ -23,9 +23,9 @@ class TagService:
from libs.helper import escape_like_pattern
escaped_keyword = escape_like_pattern(keyword)
- query = query.where(sa.and_(Tag.name.ilike(f"%{escaped_keyword}%", escape="\\")))
- query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at)
- results: list = query.order_by(Tag.created_at.desc()).all()
+ stmt = stmt.where(sa.and_(Tag.name.ilike(f"%{escaped_keyword}%", escape="\\")))
+ stmt = stmt.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at)
+ results: list = list(db.session.execute(stmt.order_by(Tag.created_at.desc())).all())
return results
@staticmethod
@@ -64,8 +64,8 @@ class TagService:
@staticmethod
def get_tags_by_target_id(tag_type: str, current_tenant_id: str, target_id: str):
- tags = (
- db.session.query(Tag)
+ tags = db.session.scalars(
+ select(Tag)
.join(TagBinding, Tag.id == TagBinding.tag_id)
.where(
TagBinding.target_id == target_id,
@@ -73,8 +73,7 @@ class TagService:
Tag.tenant_id == current_tenant_id,
Tag.type == tag_type,
)
- .all()
- )
+ ).all()
return tags or []
@@ -97,7 +96,7 @@ class TagService:
def update_tags(args: dict, tag_id: str) -> Tag:
if TagService.get_tag_by_tag_name(args.get("type", ""), current_user.current_tenant_id, args.get("name", "")):
raise ValueError("Tag name already exists")
- tag = db.session.query(Tag).where(Tag.id == tag_id).first()
+ tag = db.session.scalar(select(Tag).where(Tag.id == tag_id).limit(1))
if not tag:
raise NotFound("Tag not found")
tag.name = args["name"]
@@ -106,12 +105,12 @@ class TagService:
@staticmethod
def get_tag_binding_count(tag_id: str) -> int:
- count = db.session.query(TagBinding).where(TagBinding.tag_id == tag_id).count()
+ count = db.session.scalar(select(func.count(TagBinding.id)).where(TagBinding.tag_id == tag_id)) or 0
return count
@staticmethod
def delete_tag(tag_id: str):
- tag = db.session.query(Tag).where(Tag.id == tag_id).first()
+ tag = db.session.scalar(select(Tag).where(Tag.id == tag_id).limit(1))
if not tag:
raise NotFound("Tag not found")
db.session.delete(tag)
@@ -128,10 +127,10 @@ class TagService:
TagService.check_target_exists(args["type"], args["target_id"])
# save tag binding
for tag_id in args["tag_ids"]:
- tag_binding = (
- db.session.query(TagBinding)
+ tag_binding = db.session.scalar(
+ select(TagBinding)
.where(TagBinding.tag_id == tag_id, TagBinding.target_id == args["target_id"])
- .first()
+ .limit(1)
)
if tag_binding:
continue
@@ -149,10 +148,10 @@ class TagService:
# check if target exists
TagService.check_target_exists(args["type"], args["target_id"])
# delete tag binding
- tag_bindings = (
- db.session.query(TagBinding)
- .where(TagBinding.target_id == args["target_id"], TagBinding.tag_id == (args["tag_id"]))
- .first()
+ tag_bindings = db.session.scalar(
+ select(TagBinding)
+ .where(TagBinding.target_id == args["target_id"], TagBinding.tag_id == args["tag_id"])
+ .limit(1)
)
if tag_bindings:
db.session.delete(tag_bindings)
@@ -161,18 +160,16 @@ class TagService:
@staticmethod
def check_target_exists(type: str, target_id: str):
if type == "knowledge":
- dataset = (
- db.session.query(Dataset)
+ dataset = db.session.scalar(
+ select(Dataset)
.where(Dataset.tenant_id == current_user.current_tenant_id, Dataset.id == target_id)
- .first()
+ .limit(1)
)
if not dataset:
raise NotFound("Dataset not found")
elif type == "app":
- app = (
- db.session.query(App)
- .where(App.tenant_id == current_user.current_tenant_id, App.id == target_id)
- .first()
+ app = db.session.scalar(
+ select(App).where(App.tenant_id == current_user.current_tenant_id, App.id == target_id).limit(1)
)
if not app:
raise NotFound("App not found")
diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py
index 2a56bc0c71..dfc0c2c63f 100644
--- a/api/services/tools/api_tools_manage_service.py
+++ b/api/services/tools/api_tools_manage_service.py
@@ -1,11 +1,10 @@
import json
import logging
-from typing import Any, cast
+from typing import Any, TypedDict, cast
from graphon.model_runtime.utils.encoders import jsonable_encoder
from httpx import get
from sqlalchemy import select
-from typing_extensions import TypedDict
from core.entities.provider_entities import ProviderConfig
from core.tools.__base.tool_runtime import ToolRuntime
@@ -124,13 +123,13 @@ class ApiToolManageService:
provider_name = provider_name.strip()
# check if the provider exists
- provider = (
- db.session.query(ApiToolProvider)
+ provider = db.session.scalar(
+ select(ApiToolProvider)
.where(
ApiToolProvider.tenant_id == tenant_id,
ApiToolProvider.name == provider_name,
)
- .first()
+ .limit(1)
)
if provider is not None:
@@ -215,13 +214,13 @@ class ApiToolManageService:
"""
list api tool provider tools
"""
- provider: ApiToolProvider | None = (
- db.session.query(ApiToolProvider)
+ provider: ApiToolProvider | None = db.session.scalar(
+ select(ApiToolProvider)
.where(
ApiToolProvider.tenant_id == tenant_id,
ApiToolProvider.name == provider_name,
)
- .first()
+ .limit(1)
)
if provider is None:
@@ -259,13 +258,13 @@ class ApiToolManageService:
provider_name = provider_name.strip()
# check if the provider exists
- provider = (
- db.session.query(ApiToolProvider)
+ provider = db.session.scalar(
+ select(ApiToolProvider)
.where(
ApiToolProvider.tenant_id == tenant_id,
ApiToolProvider.name == original_provider,
)
- .first()
+ .limit(1)
)
if provider is None:
@@ -328,13 +327,13 @@ class ApiToolManageService:
"""
delete tool provider
"""
- provider = (
- db.session.query(ApiToolProvider)
+ provider = db.session.scalar(
+ select(ApiToolProvider)
.where(
ApiToolProvider.tenant_id == tenant_id,
ApiToolProvider.name == provider_name,
)
- .first()
+ .limit(1)
)
if provider is None:
@@ -378,13 +377,13 @@ class ApiToolManageService:
if tool_bundle is None:
raise ValueError(f"invalid tool name {tool_name}")
- db_provider = (
- db.session.query(ApiToolProvider)
+ db_provider = db.session.scalar(
+ select(ApiToolProvider)
.where(
ApiToolProvider.tenant_id == tenant_id,
ApiToolProvider.name == provider_name,
)
- .first()
+ .limit(1)
)
if not db_provider:
diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py
index 8e3c36e099..d529d2f065 100644
--- a/api/services/tools/builtin_tools_manage_service.py
+++ b/api/services/tools/builtin_tools_manage_service.py
@@ -332,12 +332,11 @@ class BuiltinToolManageService:
get builtin tool provider credentials
"""
with db.session.no_autoflush:
- providers = (
- db.session.query(BuiltinToolProvider)
- .filter_by(tenant_id=tenant_id, provider=provider_name)
+ providers = db.session.scalars(
+ select(BuiltinToolProvider)
+ .where(BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.provider == provider_name)
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
- .all()
- )
+ ).all()
if len(providers) == 0:
return []
@@ -412,7 +411,7 @@ class BuiltinToolManageService:
"""
with Session(db.engine) as session:
# get provider
- target_provider = session.query(BuiltinToolProvider).filter_by(id=id).first()
+ target_provider = session.query(BuiltinToolProvider).filter_by(id=id, tenant_id=tenant_id).first()
if target_provider is None:
raise ValueError("provider not found")
diff --git a/api/services/variable_truncator.py b/api/services/variable_truncator.py
index 62916cc2c9..5427b7b3a7 100644
--- a/api/services/variable_truncator.py
+++ b/api/services/variable_truncator.py
@@ -3,7 +3,7 @@ from __future__ import annotations
import dataclasses
from abc import ABC, abstractmethod
from collections.abc import Mapping
-from typing import Any, Generic, TypeAlias, TypeVar, overload
+from typing import Any, overload
from graphon.file import File
from graphon.nodes.variable_assigner.common.helpers import UpdatedVariable
@@ -43,12 +43,9 @@ class _PCKeys:
CHILD_CONTENTS = "child_contents"
-_T = TypeVar("_T")
-
-
@dataclasses.dataclass(frozen=True)
-class _PartResult(Generic[_T]):
- value: _T
+class _PartResult[T]:
+ value: T
value_size: int
truncated: bool
@@ -61,7 +58,7 @@ class UnknownTypeError(Exception):
pass
-JSONTypes: TypeAlias = int | float | str | list[object] | dict[str, object] | None | bool
+type JSONTypes = int | float | str | list[object] | dict[str, object] | None | bool
@dataclasses.dataclass(frozen=True)
diff --git a/api/services/vector_service.py b/api/services/vector_service.py
index 3f78b823a6..e7266cb8e9 100644
--- a/api/services/vector_service.py
+++ b/api/services/vector_service.py
@@ -1,6 +1,7 @@
import logging
from graphon.model_runtime.entities.model_entities import ModelType
+from sqlalchemy import delete, select
from core.model_manager import ModelInstance, ModelManager
from core.rag.datasource.keyword.keyword_factory import Keyword
@@ -29,7 +30,7 @@ class VectorService:
for segment in segments:
if doc_form == IndexStructureType.PARENT_CHILD_INDEX:
- dataset_document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first()
+ dataset_document = db.session.get(DatasetDocument, segment.document_id)
if not dataset_document:
logger.warning(
"Expected DatasetDocument record to exist, but none was found, document_id=%s, segment_id=%s",
@@ -38,11 +39,7 @@ class VectorService:
)
continue
# get the process rule
- processing_rule = (
- db.session.query(DatasetProcessRule)
- .where(DatasetProcessRule.id == dataset_document.dataset_process_rule_id)
- .first()
- )
+ processing_rule = db.session.get(DatasetProcessRule, dataset_document.dataset_process_rule_id)
if not processing_rule:
raise ValueError("No processing rule found.")
# get embedding model instance
@@ -271,8 +268,8 @@ class VectorService:
vector.delete_by_ids(old_attachment_ids)
# Delete existing segment attachment bindings in one operation
- db.session.query(SegmentAttachmentBinding).where(SegmentAttachmentBinding.segment_id == segment.id).delete(
- synchronize_session=False
+ db.session.execute(
+ delete(SegmentAttachmentBinding).where(SegmentAttachmentBinding.segment_id == segment.id)
)
if not attachment_ids:
@@ -280,7 +277,7 @@ class VectorService:
return
# Bulk fetch upload files - only fetch needed fields
- upload_file_list = db.session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).all()
+ upload_file_list = db.session.scalars(select(UploadFile).where(UploadFile.id.in_(attachment_ids))).all()
if not upload_file_list:
db.session.commit()
diff --git a/api/services/web_conversation_service.py b/api/services/web_conversation_service.py
index e028e3e5e3..2c8a3be863 100644
--- a/api/services/web_conversation_service.py
+++ b/api/services/web_conversation_service.py
@@ -1,5 +1,3 @@
-from typing import Union
-
from sqlalchemy import select
from sqlalchemy.orm import Session
@@ -20,7 +18,7 @@ class WebConversationService:
*,
session: Session,
app_model: App,
- user: Union[Account, EndUser] | None,
+ user: Account | EndUser | None,
last_id: str | None,
limit: int,
invoke_from: InvokeFrom,
@@ -61,18 +59,18 @@ class WebConversationService:
)
@classmethod
- def pin(cls, app_model: App, conversation_id: str, user: Union[Account, EndUser] | None):
+ def pin(cls, app_model: App, conversation_id: str, user: Account | EndUser | None):
if not user:
return
- pinned_conversation = (
- db.session.query(PinnedConversation)
+ pinned_conversation = db.session.scalar(
+ select(PinnedConversation)
.where(
PinnedConversation.app_id == app_model.id,
PinnedConversation.conversation_id == conversation_id,
PinnedConversation.created_by_role == ("account" if isinstance(user, Account) else "end_user"),
PinnedConversation.created_by == user.id,
)
- .first()
+ .limit(1)
)
if pinned_conversation:
@@ -93,18 +91,18 @@ class WebConversationService:
db.session.commit()
@classmethod
- def unpin(cls, app_model: App, conversation_id: str, user: Union[Account, EndUser] | None):
+ def unpin(cls, app_model: App, conversation_id: str, user: Account | EndUser | None):
if not user:
return
- pinned_conversation = (
- db.session.query(PinnedConversation)
+ pinned_conversation = db.session.scalar(
+ select(PinnedConversation)
.where(
PinnedConversation.app_id == app_model.id,
PinnedConversation.conversation_id == conversation_id,
PinnedConversation.created_by_role == ("account" if isinstance(user, Account) else "end_user"),
PinnedConversation.created_by == user.id,
)
- .first()
+ .limit(1)
)
if not pinned_conversation:
diff --git a/api/services/webapp_auth_service.py b/api/services/webapp_auth_service.py
index 5ca0b63001..eaea79af2f 100644
--- a/api/services/webapp_auth_service.py
+++ b/api/services/webapp_auth_service.py
@@ -3,6 +3,7 @@ import secrets
from datetime import UTC, datetime, timedelta
from typing import Any
+from sqlalchemy import select
from werkzeug.exceptions import NotFound, Unauthorized
from configs import dify_config
@@ -92,10 +93,10 @@ class WebAppAuthService:
@classmethod
def create_end_user(cls, app_code, email) -> EndUser:
- site = db.session.query(Site).where(Site.code == app_code).first()
+ site = db.session.scalar(select(Site).where(Site.code == app_code).limit(1))
if not site:
raise NotFound("Site not found.")
- app_model = db.session.query(App).where(App.id == site.app_id).first()
+ app_model = db.session.get(App, site.app_id)
if not app_model:
raise NotFound("App not found.")
end_user = EndUser(
diff --git a/api/services/website_service.py b/api/services/website_service.py
index b2917ba152..6a521a9cc0 100644
--- a/api/services/website_service.py
+++ b/api/services/website_service.py
@@ -9,12 +9,23 @@ import httpx
from flask_login import current_user
from core.helper import encrypter
+from core.helper.http_client_pooling import get_pooled_http_client
from core.rag.extractor.firecrawl.firecrawl_app import CrawlStatusResponse, FirecrawlApp, FirecrawlDocumentData
from core.rag.extractor.watercrawl.provider import WaterCrawlProvider
from extensions.ext_redis import redis_client
from extensions.ext_storage import storage
from services.datasource_provider_service import DatasourceProviderService
+# Reuse pooled HTTP clients to avoid creating new connections per request and ease testing.
+_jina_http_client: httpx.Client = get_pooled_http_client(
+ "website:jinareader",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100)),
+)
+_adaptive_http_client: httpx.Client = get_pooled_http_client(
+ "website:adaptivecrawl",
+ lambda: httpx.Client(limits=httpx.Limits(max_keepalive_connections=50, max_connections=100)),
+)
+
@dataclass
class CrawlOptions:
@@ -225,7 +236,7 @@ class WebsiteService:
@classmethod
def _crawl_with_jinareader(cls, request: CrawlRequest, api_key: str) -> dict[str, Any]:
if not request.options.crawl_sub_pages:
- response = httpx.get(
+ response = _jina_http_client.get(
f"https://r.jina.ai/{request.url}",
headers={"Accept": "application/json", "Authorization": f"Bearer {api_key}"},
)
@@ -233,7 +244,7 @@ class WebsiteService:
raise ValueError("Failed to crawl:")
return {"status": "active", "data": response.json().get("data")}
else:
- response = httpx.post(
+ response = _adaptive_http_client.post(
"https://adaptivecrawl-kir3wx7b3a-uc.a.run.app",
json={
"url": request.url,
@@ -296,7 +307,7 @@ class WebsiteService:
@classmethod
def _get_jinareader_status(cls, job_id: str, api_key: str) -> dict[str, Any]:
- response = httpx.post(
+ response = _adaptive_http_client.post(
"https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app",
headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"},
json={"taskId": job_id},
@@ -312,7 +323,7 @@ class WebsiteService:
}
if crawl_status_data["status"] == "completed":
- response = httpx.post(
+ response = _adaptive_http_client.post(
"https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app",
headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"},
json={"taskId": job_id, "urls": list(data.get("processed", {}).keys())},
@@ -374,7 +385,7 @@ class WebsiteService:
@classmethod
def _get_jinareader_url_data(cls, job_id: str, url: str, api_key: str) -> dict[str, Any] | None:
if not job_id:
- response = httpx.get(
+ response = _jina_http_client.get(
f"https://r.jina.ai/{url}",
headers={"Accept": "application/json", "Authorization": f"Bearer {api_key}"},
)
@@ -383,7 +394,7 @@ class WebsiteService:
return dict(response.json().get("data", {}))
else:
# Get crawl status first
- status_response = httpx.post(
+ status_response = _adaptive_http_client.post(
"https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app",
headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"},
json={"taskId": job_id},
@@ -393,7 +404,7 @@ class WebsiteService:
raise ValueError("Crawl job is not completed")
# Get processed data
- data_response = httpx.post(
+ data_response = _adaptive_http_client.post(
"https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app",
headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"},
json={"taskId": job_id, "urls": list(status_data.get("processed", {}).keys())},
diff --git a/api/services/workflow/workflow_converter.py b/api/services/workflow/workflow_converter.py
index 31367f72fa..c1ad3f33ad 100644
--- a/api/services/workflow/workflow_converter.py
+++ b/api/services/workflow/workflow_converter.py
@@ -1,12 +1,12 @@
import json
-from typing import Any
+from typing import Any, TypedDict
from graphon.file import FileUploadConfig
from graphon.model_runtime.entities.llm_entities import LLMMode
from graphon.model_runtime.utils.encoders import jsonable_encoder
from graphon.nodes import BuiltinNodeTypes
from graphon.variables.input_entities import VariableEntity
-from typing_extensions import TypedDict
+from sqlalchemy import select
from core.app.app_config.entities import (
DatasetEntity,
@@ -648,10 +648,10 @@ class WorkflowConverter:
:param api_based_extension_id: api based extension id
:return:
"""
- api_based_extension = (
- db.session.query(APIBasedExtension)
+ api_based_extension = db.session.scalar(
+ select(APIBasedExtension)
.where(APIBasedExtension.tenant_id == tenant_id, APIBasedExtension.id == api_based_extension_id)
- .first()
+ .limit(1)
)
if not api_based_extension:
diff --git a/api/services/workflow_app_service.py b/api/services/workflow_app_service.py
index bf178e8a44..b5ab176ad2 100644
--- a/api/services/workflow_app_service.py
+++ b/api/services/workflow_app_service.py
@@ -1,12 +1,11 @@
import json
import uuid
from datetime import datetime
-from typing import Any
+from typing import Any, TypedDict
from graphon.enums import WorkflowExecutionStatus
from sqlalchemy import and_, func, or_, select
from sqlalchemy.orm import Session
-from typing_extensions import TypedDict
from models import Account, App, EndUser, TenantAccountJoin, WorkflowAppLog, WorkflowArchiveLog, WorkflowRun
from models.enums import AppTriggerType, CreatorUserRole
diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py
index 98e338a2d4..9ed60bf86b 100644
--- a/api/services/workflow_draft_variable_service.py
+++ b/api/services/workflow_draft_variable_service.py
@@ -800,8 +800,8 @@ class DraftVariableSaver:
# technical variables from being exposed in the draft environment, particularly those
# that aren't meant to be directly edited or viewed by users.
_EXCLUDE_VARIABLE_NAMES_MAPPING: dict[NodeType, frozenset[str]] = {
- BuiltinNodeTypes.LLM: frozenset(["finish_reason"]),
- BuiltinNodeTypes.LOOP: frozenset(["loop_round"]),
+ BuiltinNodeTypes.LLM: frozenset(("finish_reason",)),
+ BuiltinNodeTypes.LOOP: frozenset(("loop_round",)),
}
# Database session used for persisting draft variables.
diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py
index 3b3ee6dd92..8f365c7c51 100644
--- a/api/services/workflow_service.py
+++ b/api/services/workflow_service.py
@@ -138,14 +138,14 @@ class WorkflowService:
if workflow_id:
return self.get_published_workflow_by_id(app_model, workflow_id)
# fetch draft workflow by app_model
- workflow = (
- db.session.query(Workflow)
+ workflow = db.session.scalar(
+ select(Workflow)
.where(
Workflow.tenant_id == app_model.tenant_id,
Workflow.app_id == app_model.id,
Workflow.version == Workflow.VERSION_DRAFT,
)
- .first()
+ .limit(1)
)
# return draft workflow
@@ -155,14 +155,14 @@ class WorkflowService:
"""
fetch published workflow by workflow_id
"""
- workflow = (
- db.session.query(Workflow)
+ workflow = db.session.scalar(
+ select(Workflow)
.where(
Workflow.tenant_id == app_model.tenant_id,
Workflow.app_id == app_model.id,
Workflow.id == workflow_id,
)
- .first()
+ .limit(1)
)
if not workflow:
return None
@@ -182,14 +182,14 @@ class WorkflowService:
return None
# fetch published workflow by workflow_id
- workflow = (
- db.session.query(Workflow)
+ workflow = db.session.scalar(
+ select(Workflow)
.where(
Workflow.tenant_id == app_model.tenant_id,
Workflow.app_id == app_model.id,
Workflow.id == app_model.workflow_id,
)
- .first()
+ .limit(1)
)
return workflow
@@ -544,14 +544,14 @@ class WorkflowService:
# Use the same fallback logic as runtime: get the first available credential
# ordered by is_default DESC, created_at ASC (same as tool_manager.py)
- default_provider = (
- db.session.query(BuiltinToolProvider)
+ default_provider = db.session.scalar(
+ select(BuiltinToolProvider)
.where(
BuiltinToolProvider.tenant_id == tenant_id,
BuiltinToolProvider.provider == provider,
)
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
- .first()
+ .limit(1)
)
if not default_provider:
diff --git a/api/services/workspace_service.py b/api/services/workspace_service.py
index 84a8b03329..eb4671cfaa 100644
--- a/api/services/workspace_service.py
+++ b/api/services/workspace_service.py
@@ -1,4 +1,5 @@
from flask_login import current_user
+from sqlalchemy import select
from configs import dify_config
from enums.cloud_plan import CloudPlan
@@ -24,10 +25,10 @@ class WorkspaceService:
}
# Get role of user
- tenant_account_join = (
- db.session.query(TenantAccountJoin)
+ tenant_account_join = db.session.scalar(
+ select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == current_user.id)
- .first()
+ .limit(1)
)
assert tenant_account_join is not None, "TenantAccountJoin not found"
tenant_info["role"] = tenant_account_join.role
diff --git a/api/tasks/app_generate/workflow_execute_task.py b/api/tasks/app_generate/workflow_execute_task.py
index 489467651d..8f2f5f261e 100644
--- a/api/tasks/app_generate/workflow_execute_task.py
+++ b/api/tasks/app_generate/workflow_execute_task.py
@@ -3,7 +3,7 @@ import logging
import uuid
from collections.abc import Generator, Mapping
from enum import StrEnum
-from typing import Annotated, Any, TypeAlias, Union
+from typing import Annotated, Any
from celery import shared_task
from flask import current_app, json
@@ -68,7 +68,7 @@ def _get_user_type_descriminator(value: Any):
return None
-User: TypeAlias = Annotated[
+type User = Annotated[
(Annotated[_Account, Tag(_UserType.ACCOUNT)] | Annotated[_EndUser, Tag(_UserType.END_USER)]),
Discriminator(_get_user_type_descriminator),
]
@@ -93,7 +93,7 @@ class AppExecutionParams(BaseModel):
cls,
app_model: App,
workflow: Workflow,
- user: Union[Account, EndUser],
+ user: Account | EndUser,
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool = True,
diff --git a/api/tests/integration_tests/plugin/__mock/http.py b/api/tests/integration_tests/plugin/__mock/http.py
index d5cf47e2c2..b39e4a8e76 100644
--- a/api/tests/integration_tests/plugin/__mock/http.py
+++ b/api/tests/integration_tests/plugin/__mock/http.py
@@ -4,23 +4,28 @@ from typing import Literal
import httpx
import pytest
-from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse
+from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse, PluginToolProviderEntity
from core.tools.entities.common_entities import I18nObject
-from core.tools.entities.tool_entities import ToolProviderEntity, ToolProviderIdentity
+from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin, ToolProviderIdentity
class MockedHttp:
@classmethod
- def list_tools(cls) -> list[ToolProviderEntity]:
+ def list_tools(cls) -> list[PluginToolProviderEntity]:
return [
- ToolProviderEntity(
- identity=ToolProviderIdentity(
- author="Yeuoly",
- name="Yeuoly",
- description=I18nObject(en_US="Yeuoly"),
- icon="ssss.svg",
- label=I18nObject(en_US="Yeuoly"),
- )
+ PluginToolProviderEntity(
+ provider="Yeuoly",
+ plugin_unique_identifier="langgenius/yeuoly:0.0.1@mock",
+ plugin_id="mock-plugin",
+ declaration=ToolProviderEntityWithPlugin(
+ identity=ToolProviderIdentity(
+ author="Yeuoly",
+ name="Yeuoly",
+ description=I18nObject(en_US="Yeuoly"),
+ icon="ssss.svg",
+ label=I18nObject(en_US="Yeuoly"),
+ )
+ ),
)
]
@@ -33,7 +38,7 @@ class MockedHttp:
"""
request = httpx.Request(method, url)
if url.endswith("/tools"):
- content = PluginDaemonBasicResponse[list[ToolProviderEntity]](
+ content = PluginDaemonBasicResponse[list[PluginToolProviderEntity]](
code=0, message="success", data=cls.list_tools()
).model_dump_json()
else:
diff --git a/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py b/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py
index b6d583e338..9a4450a454 100644
--- a/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py
+++ b/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py
@@ -1,5 +1,6 @@
from core.plugin.impl.tool import PluginToolManager
-from tests.integration_tests.plugin.__mock.http import setup_http_mock
+
+pytest_plugins = ("tests.integration_tests.plugin.__mock.http",)
def test_fetch_all_plugin_tools(setup_http_mock):
diff --git a/api/tests/integration_tests/services/plugin/__init__.py b/api/tests/integration_tests/services/plugin/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/tests/integration_tests/services/plugin/test_plugin_lifecycle.py b/api/tests/integration_tests/services/plugin/test_plugin_lifecycle.py
new file mode 100644
index 0000000000..951a5ab4b4
--- /dev/null
+++ b/api/tests/integration_tests/services/plugin/test_plugin_lifecycle.py
@@ -0,0 +1,182 @@
+import pytest
+from sqlalchemy import delete
+
+from core.db.session_factory import session_factory
+from models import Tenant
+from models.account import TenantPluginAutoUpgradeStrategy, TenantPluginPermission
+from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+from services.plugin.plugin_permission_service import PluginPermissionService
+
+
+@pytest.fixture
+def tenant(flask_req_ctx):
+ with session_factory.create_session() as session:
+ t = Tenant(name="plugin_it_tenant")
+ session.add(t)
+ session.commit()
+ tenant_id = t.id
+
+ yield tenant_id
+
+ with session_factory.create_session() as session:
+ session.execute(delete(TenantPluginPermission).where(TenantPluginPermission.tenant_id == tenant_id))
+ session.execute(
+ delete(TenantPluginAutoUpgradeStrategy).where(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id)
+ )
+ session.execute(delete(Tenant).where(Tenant.id == tenant_id))
+ session.commit()
+
+
+class TestPluginPermissionLifecycle:
+ def test_get_returns_none_for_new_tenant(self, tenant):
+ assert PluginPermissionService.get_permission(tenant) is None
+
+ def test_change_creates_row(self, tenant):
+ result = PluginPermissionService.change_permission(
+ tenant,
+ TenantPluginPermission.InstallPermission.ADMINS,
+ TenantPluginPermission.DebugPermission.EVERYONE,
+ )
+ assert result is True
+
+ perm = PluginPermissionService.get_permission(tenant)
+ assert perm is not None
+ assert perm.install_permission == TenantPluginPermission.InstallPermission.ADMINS
+ assert perm.debug_permission == TenantPluginPermission.DebugPermission.EVERYONE
+
+ def test_change_updates_existing_row(self, tenant):
+ PluginPermissionService.change_permission(
+ tenant,
+ TenantPluginPermission.InstallPermission.ADMINS,
+ TenantPluginPermission.DebugPermission.NOBODY,
+ )
+ PluginPermissionService.change_permission(
+ tenant,
+ TenantPluginPermission.InstallPermission.EVERYONE,
+ TenantPluginPermission.DebugPermission.ADMINS,
+ )
+ perm = PluginPermissionService.get_permission(tenant)
+ assert perm is not None
+ assert perm.install_permission == TenantPluginPermission.InstallPermission.EVERYONE
+ assert perm.debug_permission == TenantPluginPermission.DebugPermission.ADMINS
+
+ with session_factory.create_session() as session:
+ count = session.query(TenantPluginPermission).where(TenantPluginPermission.tenant_id == tenant).count()
+ assert count == 1
+
+
+class TestPluginAutoUpgradeLifecycle:
+ def test_get_returns_none_for_new_tenant(self, tenant):
+ assert PluginAutoUpgradeService.get_strategy(tenant) is None
+
+ def test_change_creates_row(self, tenant):
+ result = PluginAutoUpgradeService.change_strategy(
+ tenant,
+ strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST,
+ upgrade_time_of_day=3,
+ upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
+ exclude_plugins=[],
+ include_plugins=[],
+ )
+ assert result is True
+
+ strategy = PluginAutoUpgradeService.get_strategy(tenant)
+ assert strategy is not None
+ assert strategy.strategy_setting == TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST
+ assert strategy.upgrade_time_of_day == 3
+
+ def test_change_updates_existing_row(self, tenant):
+ PluginAutoUpgradeService.change_strategy(
+ tenant,
+ strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
+ upgrade_time_of_day=0,
+ upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
+ exclude_plugins=[],
+ include_plugins=[],
+ )
+ PluginAutoUpgradeService.change_strategy(
+ tenant,
+ strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST,
+ upgrade_time_of_day=12,
+ upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL,
+ exclude_plugins=[],
+ include_plugins=["plugin-a"],
+ )
+
+ strategy = PluginAutoUpgradeService.get_strategy(tenant)
+ assert strategy is not None
+ assert strategy.strategy_setting == TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST
+ assert strategy.upgrade_time_of_day == 12
+ assert strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL
+ assert strategy.include_plugins == ["plugin-a"]
+
+ def test_exclude_plugin_creates_strategy_when_none_exists(self, tenant):
+ PluginAutoUpgradeService.exclude_plugin(tenant, "my-plugin")
+
+ strategy = PluginAutoUpgradeService.get_strategy(tenant)
+ assert strategy is not None
+ assert strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE
+ assert "my-plugin" in strategy.exclude_plugins
+
+ def test_exclude_plugin_appends_in_exclude_mode(self, tenant):
+ PluginAutoUpgradeService.change_strategy(
+ tenant,
+ strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
+ upgrade_time_of_day=0,
+ upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE,
+ exclude_plugins=["existing"],
+ include_plugins=[],
+ )
+ PluginAutoUpgradeService.exclude_plugin(tenant, "new-plugin")
+
+ strategy = PluginAutoUpgradeService.get_strategy(tenant)
+ assert strategy is not None
+ assert "existing" in strategy.exclude_plugins
+ assert "new-plugin" in strategy.exclude_plugins
+
+ def test_exclude_plugin_dedup_in_exclude_mode(self, tenant):
+ PluginAutoUpgradeService.change_strategy(
+ tenant,
+ strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
+ upgrade_time_of_day=0,
+ upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE,
+ exclude_plugins=["same-plugin"],
+ include_plugins=[],
+ )
+ PluginAutoUpgradeService.exclude_plugin(tenant, "same-plugin")
+
+ strategy = PluginAutoUpgradeService.get_strategy(tenant)
+ assert strategy is not None
+ assert strategy.exclude_plugins.count("same-plugin") == 1
+
+ def test_exclude_from_partial_mode_removes_from_include(self, tenant):
+ PluginAutoUpgradeService.change_strategy(
+ tenant,
+ strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
+ upgrade_time_of_day=0,
+ upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL,
+ exclude_plugins=[],
+ include_plugins=["p1", "p2"],
+ )
+ PluginAutoUpgradeService.exclude_plugin(tenant, "p1")
+
+ strategy = PluginAutoUpgradeService.get_strategy(tenant)
+ assert strategy is not None
+ assert "p1" not in strategy.include_plugins
+ assert "p2" in strategy.include_plugins
+
+ def test_exclude_from_all_mode_switches_to_exclude(self, tenant):
+ PluginAutoUpgradeService.change_strategy(
+ tenant,
+ strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST,
+ upgrade_time_of_day=0,
+ upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
+ exclude_plugins=[],
+ include_plugins=[],
+ )
+ PluginAutoUpgradeService.exclude_plugin(tenant, "excluded-plugin")
+
+ strategy = PluginAutoUpgradeService.get_strategy(tenant)
+ assert strategy is not None
+ assert strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE
+ assert "excluded-plugin" in strategy.exclude_plugins
diff --git a/api/tests/integration_tests/services/retention/__init__.py b/api/tests/integration_tests/services/retention/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/tests/integration_tests/services/retention/test_messages_clean_service.py b/api/tests/integration_tests/services/retention/test_messages_clean_service.py
new file mode 100644
index 0000000000..348bb0af4a
--- /dev/null
+++ b/api/tests/integration_tests/services/retention/test_messages_clean_service.py
@@ -0,0 +1,348 @@
+import datetime
+import math
+import uuid
+
+import pytest
+from sqlalchemy import delete
+
+from core.db.session_factory import session_factory
+from models import Tenant
+from models.enums import FeedbackFromSource, FeedbackRating
+from models.model import (
+ App,
+ Conversation,
+ Message,
+ MessageAnnotation,
+ MessageFeedback,
+)
+from services.retention.conversation.messages_clean_policy import BillingDisabledPolicy
+from services.retention.conversation.messages_clean_service import MessagesCleanService
+
+_NOW = datetime.datetime(2026, 1, 15, 12, 0, 0, tzinfo=datetime.UTC)
+_OLD = _NOW - datetime.timedelta(days=60)
+_VERY_OLD = _NOW - datetime.timedelta(days=90)
+_RECENT = _NOW - datetime.timedelta(days=5)
+
+_WINDOW_START = _VERY_OLD - datetime.timedelta(hours=1)
+_WINDOW_END = _RECENT + datetime.timedelta(hours=1)
+
+_DEFAULT_BATCH_SIZE = 100
+_PAGINATION_MESSAGE_COUNT = 25
+_PAGINATION_BATCH_SIZE = 8
+
+
+@pytest.fixture
+def tenant_and_app(flask_req_ctx):
+ """Creates a Tenant, App and Conversation for the test and cleans up after."""
+ with session_factory.create_session() as session:
+ tenant = Tenant(name="retention_it_tenant")
+ session.add(tenant)
+ session.flush()
+
+ app = App(
+ tenant_id=tenant.id,
+ name="Retention IT App",
+ mode="chat",
+ enable_site=True,
+ enable_api=True,
+ )
+ session.add(app)
+ session.flush()
+
+ conv = Conversation(
+ app_id=app.id,
+ mode="chat",
+ name="test_conv",
+ status="normal",
+ from_source="console",
+ _inputs={},
+ )
+ session.add(conv)
+ session.commit()
+
+ tenant_id = tenant.id
+ app_id = app.id
+ conv_id = conv.id
+
+ yield {"tenant_id": tenant_id, "app_id": app_id, "conversation_id": conv_id}
+
+ with session_factory.create_session() as session:
+ session.execute(delete(Conversation).where(Conversation.id == conv_id))
+ session.execute(delete(App).where(App.id == app_id))
+ session.execute(delete(Tenant).where(Tenant.id == tenant_id))
+ session.commit()
+
+
+def _make_message(app_id: str, conversation_id: str, created_at: datetime.datetime) -> Message:
+ return Message(
+ app_id=app_id,
+ conversation_id=conversation_id,
+ query="test",
+ message=[{"text": "hello"}],
+ answer="world",
+ message_tokens=1,
+ message_unit_price=0,
+ answer_tokens=1,
+ answer_unit_price=0,
+ from_source="console",
+ currency="USD",
+ _inputs={},
+ created_at=created_at,
+ )
+
+
+class TestMessagesCleanServiceIntegration:
+ @pytest.fixture
+ def seed_messages(self, tenant_and_app):
+ """Seeds one message at each of _VERY_OLD, _OLD, and _RECENT.
+ Yields a semantic mapping keyed by age label.
+ """
+ data = tenant_and_app
+ app_id = data["app_id"]
+ conv_id = data["conversation_id"]
+ # Ordered tuple of (label, timestamp) for deterministic seeding
+ timestamps = [
+ ("very_old", _VERY_OLD),
+ ("old", _OLD),
+ ("recent", _RECENT),
+ ]
+ msg_ids: dict[str, str] = {}
+
+ with session_factory.create_session() as session:
+ for label, ts in timestamps:
+ msg = _make_message(app_id, conv_id, ts)
+ session.add(msg)
+ session.flush()
+ msg_ids[label] = msg.id
+ session.commit()
+
+ yield {"msg_ids": msg_ids, **data}
+
+ with session_factory.create_session() as session:
+ session.execute(
+ delete(Message)
+ .where(Message.id.in_(list(msg_ids.values())))
+ .execution_options(synchronize_session=False)
+ )
+ session.commit()
+
+ @pytest.fixture
+ def paginated_seed_messages(self, tenant_and_app):
+ """Seeds multiple messages separated by 1-second increments starting at _OLD."""
+ data = tenant_and_app
+ app_id = data["app_id"]
+ conv_id = data["conversation_id"]
+ msg_ids: list[str] = []
+
+ with session_factory.create_session() as session:
+ for i in range(_PAGINATION_MESSAGE_COUNT):
+ ts = _OLD + datetime.timedelta(seconds=i)
+ msg = _make_message(app_id, conv_id, ts)
+ session.add(msg)
+ session.flush()
+ msg_ids.append(msg.id)
+ session.commit()
+
+ yield {"msg_ids": msg_ids, **data}
+
+ with session_factory.create_session() as session:
+ session.execute(delete(Message).where(Message.id.in_(msg_ids)).execution_options(synchronize_session=False))
+ session.commit()
+
+ @pytest.fixture
+ def cascade_test_data(self, tenant_and_app):
+ """Seeds one Message with an associated Feedback and Annotation."""
+ data = tenant_and_app
+ app_id = data["app_id"]
+ conv_id = data["conversation_id"]
+
+ with session_factory.create_session() as session:
+ msg = _make_message(app_id, conv_id, _OLD)
+ session.add(msg)
+ session.flush()
+
+ feedback = MessageFeedback(
+ app_id=app_id,
+ conversation_id=conv_id,
+ message_id=msg.id,
+ rating=FeedbackRating.LIKE,
+ from_source=FeedbackFromSource.USER,
+ )
+ annotation = MessageAnnotation(
+ app_id=app_id,
+ conversation_id=conv_id,
+ message_id=msg.id,
+ question="q",
+ content="a",
+ account_id=str(uuid.uuid4()),
+ )
+ session.add_all([feedback, annotation])
+ session.commit()
+
+ msg_id = msg.id
+ fb_id = feedback.id
+ ann_id = annotation.id
+
+ yield {"msg_id": msg_id, "fb_id": fb_id, "ann_id": ann_id, **data}
+
+ with session_factory.create_session() as session:
+ session.execute(delete(MessageAnnotation).where(MessageAnnotation.id == ann_id))
+ session.execute(delete(MessageFeedback).where(MessageFeedback.id == fb_id))
+ session.execute(delete(Message).where(Message.id == msg_id))
+ session.commit()
+
+ def test_dry_run_does_not_delete(self, seed_messages):
+ """Dry-run must count eligible rows without deleting any of them."""
+ data = seed_messages
+ msg_ids = data["msg_ids"]
+ all_ids = list(msg_ids.values())
+
+ svc = MessagesCleanService.from_time_range(
+ policy=BillingDisabledPolicy(),
+ start_from=_WINDOW_START,
+ end_before=_WINDOW_END,
+ batch_size=_DEFAULT_BATCH_SIZE,
+ dry_run=True,
+ )
+ stats = svc.run()
+
+ assert stats["filtered_messages"] == len(all_ids)
+ assert stats["total_deleted"] == 0
+
+ with session_factory.create_session() as session:
+ remaining = session.query(Message).where(Message.id.in_(all_ids)).count()
+ assert remaining == len(all_ids)
+
+ def test_billing_disabled_deletes_all_in_range(self, seed_messages):
+ """All 3 seeded messages fall within the window and must be deleted."""
+ data = seed_messages
+ msg_ids = data["msg_ids"]
+ all_ids = list(msg_ids.values())
+
+ svc = MessagesCleanService.from_time_range(
+ policy=BillingDisabledPolicy(),
+ start_from=_WINDOW_START,
+ end_before=_WINDOW_END,
+ batch_size=_DEFAULT_BATCH_SIZE,
+ dry_run=False,
+ )
+ stats = svc.run()
+
+ assert stats["total_deleted"] == len(all_ids)
+
+ with session_factory.create_session() as session:
+ remaining = session.query(Message).where(Message.id.in_(all_ids)).count()
+ assert remaining == 0
+
+ def test_start_from_filters_correctly(self, seed_messages):
+ """Only the message at _OLD falls within the narrow ±1 h window."""
+ data = seed_messages
+ msg_ids = data["msg_ids"]
+
+ start = _OLD - datetime.timedelta(hours=1)
+ end = _OLD + datetime.timedelta(hours=1)
+
+ svc = MessagesCleanService.from_time_range(
+ policy=BillingDisabledPolicy(),
+ start_from=start,
+ end_before=end,
+ batch_size=_DEFAULT_BATCH_SIZE,
+ )
+ stats = svc.run()
+
+ assert stats["total_deleted"] == 1
+
+ with session_factory.create_session() as session:
+ all_ids = list(msg_ids.values())
+ remaining_ids = {r[0] for r in session.query(Message.id).where(Message.id.in_(all_ids)).all()}
+
+ assert msg_ids["old"] not in remaining_ids
+ assert msg_ids["very_old"] in remaining_ids
+ assert msg_ids["recent"] in remaining_ids
+
+ def test_cursor_pagination_across_batches(self, paginated_seed_messages):
+ """Messages must be deleted across multiple batches."""
+ data = paginated_seed_messages
+ msg_ids = data["msg_ids"]
+
+ # _OLD is the earliest; the last one is _OLD + (_PAGINATION_MESSAGE_COUNT - 1) s.
+ pagination_window_start = _OLD - datetime.timedelta(seconds=1)
+ pagination_window_end = _OLD + datetime.timedelta(seconds=_PAGINATION_MESSAGE_COUNT)
+
+ svc = MessagesCleanService.from_time_range(
+ policy=BillingDisabledPolicy(),
+ start_from=pagination_window_start,
+ end_before=pagination_window_end,
+ batch_size=_PAGINATION_BATCH_SIZE,
+ )
+ stats = svc.run()
+
+ assert stats["total_deleted"] == _PAGINATION_MESSAGE_COUNT
+ expected_batches = math.ceil(_PAGINATION_MESSAGE_COUNT / _PAGINATION_BATCH_SIZE)
+ assert stats["batches"] >= expected_batches
+
+ with session_factory.create_session() as session:
+ remaining = session.query(Message).where(Message.id.in_(msg_ids)).count()
+ assert remaining == 0
+
+ def test_no_messages_in_range_returns_empty_stats(self, seed_messages):
+ """A window entirely in the future must yield zero matches."""
+ far_future = _NOW + datetime.timedelta(days=365)
+ even_further = far_future + datetime.timedelta(days=1)
+
+ svc = MessagesCleanService.from_time_range(
+ policy=BillingDisabledPolicy(),
+ start_from=far_future,
+ end_before=even_further,
+ batch_size=_DEFAULT_BATCH_SIZE,
+ )
+ stats = svc.run()
+
+ assert stats["total_messages"] == 0
+ assert stats["total_deleted"] == 0
+
+ def test_relation_cascade_deletes(self, cascade_test_data):
+ """Deleting a Message must cascade to its Feedback and Annotation rows."""
+ data = cascade_test_data
+ msg_id = data["msg_id"]
+ fb_id = data["fb_id"]
+ ann_id = data["ann_id"]
+
+ svc = MessagesCleanService.from_time_range(
+ policy=BillingDisabledPolicy(),
+ start_from=_OLD - datetime.timedelta(hours=1),
+ end_before=_OLD + datetime.timedelta(hours=1),
+ batch_size=_DEFAULT_BATCH_SIZE,
+ )
+ stats = svc.run()
+
+ assert stats["total_deleted"] == 1
+
+ with session_factory.create_session() as session:
+ assert session.query(Message).where(Message.id == msg_id).count() == 0
+ assert session.query(MessageFeedback).where(MessageFeedback.id == fb_id).count() == 0
+ assert session.query(MessageAnnotation).where(MessageAnnotation.id == ann_id).count() == 0
+
+ def test_factory_from_time_range_validation(self):
+ with pytest.raises(ValueError, match="start_from"):
+ MessagesCleanService.from_time_range(
+ policy=BillingDisabledPolicy(),
+ start_from=_NOW,
+ end_before=_OLD,
+ )
+
+ def test_factory_from_days_validation(self):
+ with pytest.raises(ValueError, match="days"):
+ MessagesCleanService.from_days(
+ policy=BillingDisabledPolicy(),
+ days=-1,
+ )
+
+ def test_factory_batch_size_validation(self):
+ with pytest.raises(ValueError, match="batch_size"):
+ MessagesCleanService.from_time_range(
+ policy=BillingDisabledPolicy(),
+ start_from=_OLD,
+ end_before=_NOW,
+ batch_size=0,
+ )
diff --git a/api/tests/integration_tests/services/retention/test_workflow_run_archiver.py b/api/tests/integration_tests/services/retention/test_workflow_run_archiver.py
new file mode 100644
index 0000000000..5728eacdfb
--- /dev/null
+++ b/api/tests/integration_tests/services/retention/test_workflow_run_archiver.py
@@ -0,0 +1,177 @@
+import datetime
+import io
+import json
+import uuid
+import zipfile
+from unittest.mock import MagicMock, patch
+
+import pytest
+
+from services.retention.workflow_run.archive_paid_plan_workflow_run import (
+ ArchiveSummary,
+ WorkflowRunArchiver,
+)
+from services.retention.workflow_run.constants import ARCHIVE_SCHEMA_VERSION
+
+
+class TestWorkflowRunArchiverInit:
+ def test_start_from_without_end_before_raises(self):
+ with pytest.raises(ValueError, match="start_from and end_before must be provided together"):
+ WorkflowRunArchiver(start_from=datetime.datetime(2025, 1, 1))
+
+ def test_end_before_without_start_from_raises(self):
+ with pytest.raises(ValueError, match="start_from and end_before must be provided together"):
+ WorkflowRunArchiver(end_before=datetime.datetime(2025, 1, 1))
+
+ def test_start_equals_end_raises(self):
+ ts = datetime.datetime(2025, 1, 1)
+ with pytest.raises(ValueError, match="start_from must be earlier than end_before"):
+ WorkflowRunArchiver(start_from=ts, end_before=ts)
+
+ def test_start_after_end_raises(self):
+ with pytest.raises(ValueError, match="start_from must be earlier than end_before"):
+ WorkflowRunArchiver(
+ start_from=datetime.datetime(2025, 6, 1),
+ end_before=datetime.datetime(2025, 1, 1),
+ )
+
+ def test_workers_zero_raises(self):
+ with pytest.raises(ValueError, match="workers must be at least 1"):
+ WorkflowRunArchiver(workers=0)
+
+ def test_valid_init_defaults(self):
+ archiver = WorkflowRunArchiver(days=30, batch_size=50)
+ assert archiver.days == 30
+ assert archiver.batch_size == 50
+ assert archiver.dry_run is False
+ assert archiver.delete_after_archive is False
+ assert archiver.start_from is None
+
+ def test_valid_init_with_time_range(self):
+ start = datetime.datetime(2025, 1, 1)
+ end = datetime.datetime(2025, 6, 1)
+ archiver = WorkflowRunArchiver(start_from=start, end_before=end, workers=2)
+ assert archiver.start_from is not None
+ assert archiver.end_before is not None
+ assert archiver.workers == 2
+
+
+class TestBuildArchiveBundle:
+ def test_bundle_contains_manifest_and_all_tables(self):
+ archiver = WorkflowRunArchiver(days=90)
+
+ manifest_data = json.dumps({"schema_version": ARCHIVE_SCHEMA_VERSION}).encode("utf-8")
+ table_payloads = dict.fromkeys(archiver.ARCHIVED_TABLES, b"")
+
+ bundle_bytes = archiver._build_archive_bundle(manifest_data, table_payloads)
+
+ with zipfile.ZipFile(io.BytesIO(bundle_bytes), "r") as zf:
+ names = set(zf.namelist())
+ assert "manifest.json" in names
+ for table in archiver.ARCHIVED_TABLES:
+ assert f"{table}.jsonl" in names, f"Missing {table}.jsonl in bundle"
+
+ def test_bundle_missing_table_payload_raises(self):
+ archiver = WorkflowRunArchiver(days=90)
+ manifest_data = b"{}"
+ incomplete_payloads = {archiver.ARCHIVED_TABLES[0]: b"data"}
+
+ with pytest.raises(ValueError, match="Missing archive payload"):
+ archiver._build_archive_bundle(manifest_data, incomplete_payloads)
+
+
+class TestGenerateManifest:
+ def test_manifest_structure(self):
+ archiver = WorkflowRunArchiver(days=90)
+ from services.retention.workflow_run.archive_paid_plan_workflow_run import TableStats
+
+ run = MagicMock()
+ run.id = str(uuid.uuid4())
+ run.tenant_id = str(uuid.uuid4())
+ run.app_id = str(uuid.uuid4())
+ run.workflow_id = str(uuid.uuid4())
+ run.created_at = datetime.datetime(2025, 3, 15, 10, 0, 0)
+
+ stats = [
+ TableStats(table_name="workflow_runs", row_count=1, checksum="abc123", size_bytes=512),
+ TableStats(table_name="workflow_app_logs", row_count=2, checksum="def456", size_bytes=1024),
+ ]
+
+ manifest = archiver._generate_manifest(run, stats)
+
+ assert manifest["schema_version"] == ARCHIVE_SCHEMA_VERSION
+ assert manifest["workflow_run_id"] == run.id
+ assert manifest["tenant_id"] == run.tenant_id
+ assert manifest["app_id"] == run.app_id
+ assert "tables" in manifest
+ assert manifest["tables"]["workflow_runs"]["row_count"] == 1
+ assert manifest["tables"]["workflow_runs"]["checksum"] == "abc123"
+ assert manifest["tables"]["workflow_app_logs"]["row_count"] == 2
+
+
+class TestFilterPaidTenants:
+ def test_all_tenants_paid_when_billing_disabled(self):
+ archiver = WorkflowRunArchiver(days=90)
+ tenant_ids = {"t1", "t2", "t3"}
+
+ with patch("services.retention.workflow_run.archive_paid_plan_workflow_run.dify_config") as cfg:
+ cfg.BILLING_ENABLED = False
+ result = archiver._filter_paid_tenants(tenant_ids)
+
+ assert result == tenant_ids
+
+ def test_empty_tenants_returns_empty(self):
+ archiver = WorkflowRunArchiver(days=90)
+
+ with patch("services.retention.workflow_run.archive_paid_plan_workflow_run.dify_config") as cfg:
+ cfg.BILLING_ENABLED = True
+ result = archiver._filter_paid_tenants(set())
+
+ assert result == set()
+
+ def test_only_paid_plans_returned(self):
+ archiver = WorkflowRunArchiver(days=90)
+
+ mock_bulk = {
+ "t1": {"plan": "professional"},
+ "t2": {"plan": "sandbox"},
+ "t3": {"plan": "team"},
+ }
+
+ with (
+ patch("services.retention.workflow_run.archive_paid_plan_workflow_run.dify_config") as cfg,
+ patch("services.retention.workflow_run.archive_paid_plan_workflow_run.BillingService") as billing,
+ ):
+ cfg.BILLING_ENABLED = True
+ billing.get_plan_bulk_with_cache.return_value = mock_bulk
+ result = archiver._filter_paid_tenants({"t1", "t2", "t3"})
+
+ assert "t1" in result
+ assert "t3" in result
+ assert "t2" not in result
+
+ def test_billing_api_failure_returns_empty(self):
+ archiver = WorkflowRunArchiver(days=90)
+
+ with (
+ patch("services.retention.workflow_run.archive_paid_plan_workflow_run.dify_config") as cfg,
+ patch("services.retention.workflow_run.archive_paid_plan_workflow_run.BillingService") as billing,
+ ):
+ cfg.BILLING_ENABLED = True
+ billing.get_plan_bulk_with_cache.side_effect = RuntimeError("API down")
+ result = archiver._filter_paid_tenants({"t1"})
+
+ assert result == set()
+
+
+class TestDryRunArchive:
+ @patch("services.retention.workflow_run.archive_paid_plan_workflow_run.get_archive_storage")
+ def test_dry_run_does_not_call_storage(self, mock_get_storage, flask_req_ctx):
+ archiver = WorkflowRunArchiver(days=90, dry_run=True)
+
+ with patch.object(archiver, "_get_runs_batch", return_value=[]):
+ summary = archiver.run()
+
+ mock_get_storage.assert_not_called()
+ assert isinstance(summary, ArchiveSummary)
+ assert summary.runs_failed == 0
diff --git a/api/tests/integration_tests/tools/api_tool/test_api_tool.py b/api/tests/integration_tests/tools/api_tool/test_api_tool.py
index e637530265..9079aa7d6d 100644
--- a/api/tests/integration_tests/tools/api_tool/test_api_tool.py
+++ b/api/tests/integration_tests/tools/api_tool/test_api_tool.py
@@ -3,7 +3,8 @@ from core.tools.custom_tool.tool import ApiTool
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_bundle import ApiToolBundle
from core.tools.entities.tool_entities import ToolEntity, ToolIdentity
-from tests.integration_tests.tools.__mock.http import setup_http_mock
+
+pytest_plugins = ("tests.integration_tests.tools.__mock.http",)
tool_bundle = {
"server_url": "http://www.example.com/{path_param}",
diff --git a/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py b/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py
index 5dd4754e8e..0981523809 100644
--- a/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py
+++ b/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py
@@ -1,7 +1,9 @@
from core.rag.datasource.vdb.analyticdb.analyticdb_vector import AnalyticdbVector
from core.rag.datasource.vdb.analyticdb.analyticdb_vector_openapi import AnalyticdbVectorOpenAPIConfig
from core.rag.datasource.vdb.analyticdb.analyticdb_vector_sql import AnalyticdbVectorBySqlConfig
-from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, setup_mock_redis
+from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest
+
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
class AnalyticdbVectorTest(AbstractVectorTest):
diff --git a/api/tests/integration_tests/vdb/baidu/test_baidu.py b/api/tests/integration_tests/vdb/baidu/test_baidu.py
index 25989958d9..716f88af67 100644
--- a/api/tests/integration_tests/vdb/baidu/test_baidu.py
+++ b/api/tests/integration_tests/vdb/baidu/test_baidu.py
@@ -1,6 +1,10 @@
from core.rag.datasource.vdb.baidu.baidu_vector import BaiduConfig, BaiduVector
-from tests.integration_tests.vdb.__mock.baiduvectordb import setup_baiduvectordb_mock
-from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis
+from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text
+
+pytest_plugins = (
+ "tests.integration_tests.vdb.test_vector_store",
+ "tests.integration_tests.vdb.__mock.baiduvectordb",
+)
class BaiduVectorTest(AbstractVectorTest):
diff --git a/api/tests/integration_tests/vdb/chroma/test_chroma.py b/api/tests/integration_tests/vdb/chroma/test_chroma.py
index ac7b5cbda4..52beba9979 100644
--- a/api/tests/integration_tests/vdb/chroma/test_chroma.py
+++ b/api/tests/integration_tests/vdb/chroma/test_chroma.py
@@ -4,9 +4,10 @@ from core.rag.datasource.vdb.chroma.chroma_vector import ChromaConfig, ChromaVec
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class ChromaVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/couchbase/test_couchbase.py b/api/tests/integration_tests/vdb/couchbase/test_couchbase.py
index eef1ee4e75..0371f04233 100644
--- a/api/tests/integration_tests/vdb/couchbase/test_couchbase.py
+++ b/api/tests/integration_tests/vdb/couchbase/test_couchbase.py
@@ -4,9 +4,10 @@ import time
from core.rag.datasource.vdb.couchbase.couchbase_vector import CouchbaseConfig, CouchbaseVector
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
def wait_for_healthy_container(service_name="couchbase-server", timeout=300):
start_time = time.time()
diff --git a/api/tests/integration_tests/vdb/elasticsearch/test_elasticsearch.py b/api/tests/integration_tests/vdb/elasticsearch/test_elasticsearch.py
index a5ff5b9e82..970d2cce1a 100644
--- a/api/tests/integration_tests/vdb/elasticsearch/test_elasticsearch.py
+++ b/api/tests/integration_tests/vdb/elasticsearch/test_elasticsearch.py
@@ -1,9 +1,10 @@
from core.rag.datasource.vdb.elasticsearch.elasticsearch_vector import ElasticSearchConfig, ElasticSearchVector
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class ElasticSearchVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/hologres/test_hologres.py b/api/tests/integration_tests/vdb/hologres/test_hologres.py
index ff2be88ef1..d81e18841e 100644
--- a/api/tests/integration_tests/vdb/hologres/test_hologres.py
+++ b/api/tests/integration_tests/vdb/hologres/test_hologres.py
@@ -6,8 +6,12 @@ from holo_search_sdk.types import BaseQuantizationType, DistanceType, TokenizerT
from core.rag.datasource.vdb.hologres.hologres_vector import HologresVector, HologresVectorConfig
from core.rag.models.document import Document
-from tests.integration_tests.vdb.__mock.hologres import setup_hologres_mock
-from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis
+from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text
+
+pytest_plugins = (
+ "tests.integration_tests.vdb.test_vector_store",
+ "tests.integration_tests.vdb.__mock.hologres",
+)
MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true"
diff --git a/api/tests/integration_tests/vdb/huawei/test_huawei_cloud.py b/api/tests/integration_tests/vdb/huawei/test_huawei_cloud.py
index 943b2bc877..01f511358a 100644
--- a/api/tests/integration_tests/vdb/huawei/test_huawei_cloud.py
+++ b/api/tests/integration_tests/vdb/huawei/test_huawei_cloud.py
@@ -1,6 +1,10 @@
from core.rag.datasource.vdb.huawei.huawei_cloud_vector import HuaweiCloudVector, HuaweiCloudVectorConfig
-from tests.integration_tests.vdb.__mock.huaweicloudvectordb import setup_client_mock
-from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis
+from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text
+
+pytest_plugins = (
+ "tests.integration_tests.vdb.test_vector_store",
+ "tests.integration_tests.vdb.__mock.huaweicloudvectordb",
+)
class HuaweiCloudVectorTest(AbstractVectorTest):
diff --git a/api/tests/integration_tests/vdb/iris/test_iris.py b/api/tests/integration_tests/vdb/iris/test_iris.py
index 49f6857743..4b2da8387b 100644
--- a/api/tests/integration_tests/vdb/iris/test_iris.py
+++ b/api/tests/integration_tests/vdb/iris/test_iris.py
@@ -3,9 +3,10 @@
from core.rag.datasource.vdb.iris.iris_vector import IrisVector, IrisVectorConfig
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class IrisVectorTest(AbstractVectorTest):
"""Test suite for IRIS vector store implementation."""
diff --git a/api/tests/integration_tests/vdb/lindorm/test_lindorm.py b/api/tests/integration_tests/vdb/lindorm/test_lindorm.py
index 6708ab8095..b24498fdfd 100644
--- a/api/tests/integration_tests/vdb/lindorm/test_lindorm.py
+++ b/api/tests/integration_tests/vdb/lindorm/test_lindorm.py
@@ -1,7 +1,9 @@
import os
from core.rag.datasource.vdb.lindorm.lindorm_vector import LindormVectorStore, LindormVectorStoreConfig
-from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, setup_mock_redis
+from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest
+
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
class Config:
diff --git a/api/tests/integration_tests/vdb/matrixone/test_matrixone.py b/api/tests/integration_tests/vdb/matrixone/test_matrixone.py
index c4056db63e..fe592f6699 100644
--- a/api/tests/integration_tests/vdb/matrixone/test_matrixone.py
+++ b/api/tests/integration_tests/vdb/matrixone/test_matrixone.py
@@ -1,9 +1,10 @@
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneConfig, MatrixoneVector
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class MatrixoneVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/milvus/test_milvus.py b/api/tests/integration_tests/vdb/milvus/test_milvus.py
index 0e13f9369e..b5fc4b4d10 100644
--- a/api/tests/integration_tests/vdb/milvus/test_milvus.py
+++ b/api/tests/integration_tests/vdb/milvus/test_milvus.py
@@ -2,9 +2,10 @@ from core.rag.datasource.vdb.milvus.milvus_vector import MilvusConfig, MilvusVec
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class MilvusVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/myscale/test_myscale.py b/api/tests/integration_tests/vdb/myscale/test_myscale.py
index 55b2fde427..74cefad2af 100644
--- a/api/tests/integration_tests/vdb/myscale/test_myscale.py
+++ b/api/tests/integration_tests/vdb/myscale/test_myscale.py
@@ -1,9 +1,10 @@
from core.rag.datasource.vdb.myscale.myscale_vector import MyScaleConfig, MyScaleVector
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class MyScaleVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/oceanbase/test_oceanbase.py b/api/tests/integration_tests/vdb/oceanbase/test_oceanbase.py
index 2db6732354..410de2c5ad 100644
--- a/api/tests/integration_tests/vdb/oceanbase/test_oceanbase.py
+++ b/api/tests/integration_tests/vdb/oceanbase/test_oceanbase.py
@@ -6,9 +6,10 @@ from core.rag.datasource.vdb.oceanbase.oceanbase_vector import (
)
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
@pytest.fixture
def oceanbase_vector():
diff --git a/api/tests/integration_tests/vdb/opengauss/test_opengauss.py b/api/tests/integration_tests/vdb/opengauss/test_opengauss.py
index 338077bbff..78436a19ee 100644
--- a/api/tests/integration_tests/vdb/opengauss/test_opengauss.py
+++ b/api/tests/integration_tests/vdb/opengauss/test_opengauss.py
@@ -5,9 +5,10 @@ import psycopg2
from core.rag.datasource.vdb.opengauss.opengauss import OpenGauss, OpenGaussConfig
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class OpenGaussTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/oracle/test_oraclevector.py b/api/tests/integration_tests/vdb/oracle/test_oraclevector.py
index 76e8b7bccd..8920dc97eb 100644
--- a/api/tests/integration_tests/vdb/oracle/test_oraclevector.py
+++ b/api/tests/integration_tests/vdb/oracle/test_oraclevector.py
@@ -3,9 +3,10 @@ from core.rag.models.document import Document
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class OracleVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/pgvecto_rs/test_pgvecto_rs.py b/api/tests/integration_tests/vdb/pgvecto_rs/test_pgvecto_rs.py
index 6497f47deb..6210613d42 100644
--- a/api/tests/integration_tests/vdb/pgvecto_rs/test_pgvecto_rs.py
+++ b/api/tests/integration_tests/vdb/pgvecto_rs/test_pgvecto_rs.py
@@ -2,9 +2,10 @@ from core.rag.datasource.vdb.pgvecto_rs.pgvecto_rs import PGVectoRS, PgvectoRSCo
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class PGVectoRSVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/pgvector/test_pgvector.py b/api/tests/integration_tests/vdb/pgvector/test_pgvector.py
index 3d2cfde5d1..4fdeca5a3a 100644
--- a/api/tests/integration_tests/vdb/pgvector/test_pgvector.py
+++ b/api/tests/integration_tests/vdb/pgvector/test_pgvector.py
@@ -1,10 +1,10 @@
from core.rag.datasource.vdb.pgvector.pgvector import PGVector, PGVectorConfig
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- get_example_text,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class PGVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/pyvastbase/test_vastbase_vector.py b/api/tests/integration_tests/vdb/pyvastbase/test_vastbase_vector.py
index 02931fef5a..a47f13625c 100644
--- a/api/tests/integration_tests/vdb/pyvastbase/test_vastbase_vector.py
+++ b/api/tests/integration_tests/vdb/pyvastbase/test_vastbase_vector.py
@@ -1,9 +1,10 @@
from core.rag.datasource.vdb.pyvastbase.vastbase_vector import VastbaseVector, VastbaseVectorConfig
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class VastbaseVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/qdrant/test_qdrant.py b/api/tests/integration_tests/vdb/qdrant/test_qdrant.py
index a2bf10001a..709cc2e14e 100644
--- a/api/tests/integration_tests/vdb/qdrant/test_qdrant.py
+++ b/api/tests/integration_tests/vdb/qdrant/test_qdrant.py
@@ -4,9 +4,10 @@ from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig, QdrantVec
from core.rag.models.document import Document
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class QdrantVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/tablestore/test_tablestore.py b/api/tests/integration_tests/vdb/tablestore/test_tablestore.py
index aebf3fbda1..b60e26a881 100644
--- a/api/tests/integration_tests/vdb/tablestore/test_tablestore.py
+++ b/api/tests/integration_tests/vdb/tablestore/test_tablestore.py
@@ -12,9 +12,10 @@ from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_document,
get_example_text,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class TableStoreVectorTest(AbstractVectorTest):
def __init__(self, normalize_full_text_score: bool = False):
diff --git a/api/tests/integration_tests/vdb/tcvectordb/test_tencent.py b/api/tests/integration_tests/vdb/tcvectordb/test_tencent.py
index 9227bbdcd6..3d6deff2a0 100644
--- a/api/tests/integration_tests/vdb/tcvectordb/test_tencent.py
+++ b/api/tests/integration_tests/vdb/tcvectordb/test_tencent.py
@@ -1,8 +1,12 @@
from unittest.mock import MagicMock
from core.rag.datasource.vdb.tencent.tencent_vector import TencentConfig, TencentVector
-from tests.integration_tests.vdb.__mock.tcvectordb import setup_tcvectordb_mock
-from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis
+from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text
+
+pytest_plugins = (
+ "tests.integration_tests.vdb.test_vector_store",
+ "tests.integration_tests.vdb.__mock.tcvectordb",
+)
mock_client = MagicMock()
mock_client.list_databases.return_value = [{"name": "test"}]
diff --git a/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py b/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py
index dec63c6476..14c6d1c67c 100644
--- a/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py
+++ b/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py
@@ -2,7 +2,9 @@ import pytest
from core.rag.datasource.vdb.tidb_vector.tidb_vector import TiDBVector, TiDBVectorConfig
from models.dataset import Document
-from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis
+from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text
+
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
@pytest.fixture
diff --git a/api/tests/integration_tests/vdb/upstash/test_upstash_vector.py b/api/tests/integration_tests/vdb/upstash/test_upstash_vector.py
index 23470474ff..8cea0a05eb 100644
--- a/api/tests/integration_tests/vdb/upstash/test_upstash_vector.py
+++ b/api/tests/integration_tests/vdb/upstash/test_upstash_vector.py
@@ -1,8 +1,9 @@
from core.rag.datasource.vdb.upstash.upstash_vector import UpstashVector, UpstashVectorConfig
from core.rag.models.document import Document
-from tests.integration_tests.vdb.__mock.upstashvectordb import setup_upstashvector_mock
from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text
+pytest_plugins = ("tests.integration_tests.vdb.__mock.upstashvectordb",)
+
class UpstashVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py b/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py
index 2572012ea0..56311acd25 100644
--- a/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py
+++ b/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py
@@ -1,6 +1,10 @@
from core.rag.datasource.vdb.vikingdb.vikingdb_vector import VikingDBConfig, VikingDBVector
-from tests.integration_tests.vdb.__mock.vikingdb import setup_vikingdb_mock
-from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis
+from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text
+
+pytest_plugins = (
+ "tests.integration_tests.vdb.test_vector_store",
+ "tests.integration_tests.vdb.__mock.vikingdb",
+)
class VikingDBVectorTest(AbstractVectorTest):
diff --git a/api/tests/integration_tests/vdb/weaviate/test_weaviate.py b/api/tests/integration_tests/vdb/weaviate/test_weaviate.py
index a6f55420d3..a1d9850979 100644
--- a/api/tests/integration_tests/vdb/weaviate/test_weaviate.py
+++ b/api/tests/integration_tests/vdb/weaviate/test_weaviate.py
@@ -1,9 +1,10 @@
from core.rag.datasource.vdb.weaviate.weaviate_vector import WeaviateConfig, WeaviateVector
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
- setup_mock_redis,
)
+pytest_plugins = ("tests.integration_tests.vdb.test_vector_store",)
+
class WeaviateVectorTest(AbstractVectorTest):
def __init__(self):
diff --git a/api/tests/integration_tests/workflow/nodes/test_code.py b/api/tests/integration_tests/workflow/nodes/test_code.py
index ce0c8bf8ca..4f41396c22 100644
--- a/api/tests/integration_tests/workflow/nodes/test_code.py
+++ b/api/tests/integration_tests/workflow/nodes/test_code.py
@@ -13,9 +13,10 @@ from configs import dify_config
from core.app.entities.app_invoke_entities import InvokeFrom, UserFrom
from core.workflow.node_factory import DifyNodeFactory
from core.workflow.system_variables import build_system_variables
-from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock
from tests.workflow_test_utils import build_test_graph_init_params
+pytest_plugins = ("tests.integration_tests.workflow.nodes.__mock.code_executor",)
+
CODE_MAX_STRING_LENGTH = dify_config.CODE_MAX_STRING_LENGTH
diff --git a/api/tests/integration_tests/workflow/nodes/test_http.py b/api/tests/integration_tests/workflow/nodes/test_http.py
index ce18486faf..b1f937e738 100644
--- a/api/tests/integration_tests/workflow/nodes/test_http.py
+++ b/api/tests/integration_tests/workflow/nodes/test_http.py
@@ -16,9 +16,10 @@ from core.tools.tool_file_manager import ToolFileManager
from core.workflow.node_factory import DifyNodeFactory
from core.workflow.node_runtime import DifyFileReferenceFactory
from core.workflow.system_variables import build_system_variables
-from tests.integration_tests.workflow.nodes.__mock.http import setup_http_mock
from tests.workflow_test_utils import build_test_graph_init_params
+pytest_plugins = ("tests.integration_tests.workflow.nodes.__mock.http",)
+
HTTP_REQUEST_CONFIG = HttpRequestNodeConfig(
max_connect_timeout=dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT,
max_read_timeout=dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT,
diff --git a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py
index 3bf44df349..fe512c2585 100644
--- a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py
+++ b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py
@@ -17,8 +17,7 @@ from extensions.ext_database import db
from tests.integration_tests.workflow.nodes.__mock.model import get_mocked_fetch_model_instance
from tests.workflow_test_utils import build_test_graph_init_params
-"""FOR MOCK FIXTURES, DO NOT REMOVE"""
-from tests.integration_tests.model_runtime.__mock.plugin_daemon import setup_model_mock
+pytest_plugins = ("tests.integration_tests.model_runtime.__mock.plugin_daemon",)
def get_mocked_fetch_memory(memory_text: str):
diff --git a/api/tests/test_containers_integration_tests/conftest.py b/api/tests/test_containers_integration_tests/conftest.py
index be8a1c6aab..ef74893f07 100644
--- a/api/tests/test_containers_integration_tests/conftest.py
+++ b/api/tests/test_containers_integration_tests/conftest.py
@@ -12,7 +12,7 @@ import os
from collections.abc import Generator
from contextlib import contextmanager
from pathlib import Path
-from typing import Protocol, TypeVar
+from typing import Protocol
import psycopg2
import pytest
@@ -48,11 +48,8 @@ class _CloserProtocol(Protocol):
pass
-_Closer = TypeVar("_Closer", bound=_CloserProtocol)
-
-
@contextmanager
-def _auto_close(closer: _Closer) -> Generator[_Closer, None, None]:
+def _auto_close[T: _CloserProtocol](closer: T) -> Generator[T, None, None]:
yield closer
closer.close()
diff --git a/api/tests/unit_tests/controllers/console/app/test_app_apis.py b/api/tests/test_containers_integration_tests/controllers/console/app/test_app_apis.py
similarity index 88%
rename from api/tests/unit_tests/controllers/console/app/test_app_apis.py
rename to api/tests/test_containers_integration_tests/controllers/console/app/test_app_apis.py
index 1d1e119fd6..0841217fcf 100644
--- a/api/tests/unit_tests/controllers/console/app/test_app_apis.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/app/test_app_apis.py
@@ -1,7 +1,4 @@
-"""
-Additional tests to improve coverage for low-coverage modules in controllers/console/app.
-Target: increase coverage for files with <75% coverage.
-"""
+"""Testcontainers integration tests for controllers/console/app endpoints."""
from __future__ import annotations
@@ -70,26 +67,12 @@ def _unwrap(func):
return func
-class _ConnContext:
- def __init__(self, rows):
- self._rows = rows
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc, tb):
- return False
-
- def execute(self, _query, _args):
- return self._rows
-
-
-# ========== Completion Tests ==========
class TestCompletionEndpoints:
- """Tests for completion API endpoints."""
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
def test_completion_create_payload(self):
- """Test completion creation payload."""
payload = CompletionMessagePayload(inputs={"prompt": "test"}, model_config={})
assert payload.inputs == {"prompt": "test"}
@@ -209,7 +192,9 @@ class TestCompletionEndpoints:
class TestAppEndpoints:
- """Tests for app endpoints."""
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
def test_app_put_should_preserve_icon_type_when_payload_omits_it(self, app, monkeypatch):
api = app_module.AppApi()
@@ -250,12 +235,12 @@ class TestAppEndpoints:
)
-# ========== OpsTrace Tests ==========
class TestOpsTraceEndpoints:
- """Tests for ops_trace endpoint."""
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
def test_ops_trace_query_basic(self):
- """Test ops_trace query."""
query = TraceProviderQuery(tracing_provider="langfuse")
assert query.tracing_provider == "langfuse"
@@ -310,12 +295,12 @@ class TestOpsTraceEndpoints:
method(app_id="app-1")
-# ========== Site Tests ==========
class TestSiteEndpoints:
- """Tests for site endpoint."""
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
def test_site_response_structure(self):
- """Test site response structure."""
payload = AppSiteUpdatePayload(title="My Site", description="Test site")
assert payload.title == "My Site"
@@ -369,27 +354,22 @@ class TestSiteEndpoints:
assert result is site
-# ========== Workflow Tests ==========
class TestWorkflowEndpoints:
- """Tests for workflow endpoints."""
-
def test_workflow_copy_payload(self):
- """Test workflow copy payload."""
payload = SyncDraftWorkflowPayload(graph={}, features={})
assert payload.graph == {}
def test_workflow_mode_query(self):
- """Test workflow mode query."""
payload = AdvancedChatWorkflowRunPayload(inputs={}, query="hi")
assert payload.query == "hi"
-# ========== Workflow App Log Tests ==========
class TestWorkflowAppLogEndpoints:
- """Tests for workflow app log endpoints."""
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
def test_workflow_app_log_query(self):
- """Test workflow app log query."""
query = WorkflowAppLogQuery(keyword="test", page=1, limit=20)
assert query.keyword == "test"
@@ -403,14 +383,21 @@ class TestWorkflowAppLogEndpoints:
monkeypatch.setattr(workflow_app_log_module, "db", SimpleNamespace(engine=MagicMock()))
- class DummySession:
+ class DummySessionCtx:
def __enter__(self):
return "session"
def __exit__(self, exc_type, exc, tb):
return False
- monkeypatch.setattr(workflow_app_log_module, "Session", lambda *args, **kwargs: DummySession())
+ class DummySessionMaker:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def begin(self):
+ return DummySessionCtx()
+
+ monkeypatch.setattr(workflow_app_log_module, "sessionmaker", DummySessionMaker)
def fake_get_paginate(self, **_kwargs):
return {"items": [], "total": 0}
@@ -427,12 +414,12 @@ class TestWorkflowAppLogEndpoints:
assert result == {"items": [], "total": 0}
-# ========== Workflow Draft Variable Tests ==========
class TestWorkflowDraftVariableEndpoints:
- """Tests for workflow draft variable endpoints."""
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
def test_workflow_variable_creation(self):
- """Test workflow variable creation."""
payload = WorkflowDraftVariableUpdatePayload(name="var1", value="test")
assert payload.name == "var1"
@@ -443,13 +430,20 @@ class TestWorkflowDraftVariableEndpoints:
monkeypatch.setattr(workflow_draft_variable_module, "db", SimpleNamespace(engine=MagicMock()))
monkeypatch.setattr(workflow_draft_variable_module, "current_user", SimpleNamespace(id="user-1"))
- class DummySession:
+ class DummySessionCtx:
def __enter__(self):
return "session"
def __exit__(self, exc_type, exc, tb):
return False
+ class DummySessionMaker:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def begin(self):
+ return DummySessionCtx()
+
class DummyDraftService:
def __init__(self, session):
self.session = session
@@ -457,7 +451,7 @@ class TestWorkflowDraftVariableEndpoints:
def list_variables_without_values(self, **_kwargs):
return {"items": [], "total": 0}
- monkeypatch.setattr(workflow_draft_variable_module, "Session", lambda *args, **kwargs: DummySession())
+ monkeypatch.setattr(workflow_draft_variable_module, "sessionmaker", DummySessionMaker)
class DummyWorkflowService:
def is_workflow_exist(self, *args, **kwargs):
@@ -472,12 +466,12 @@ class TestWorkflowDraftVariableEndpoints:
assert result == {"items": [], "total": 0}
-# ========== Workflow Statistic Tests ==========
class TestWorkflowStatisticEndpoints:
- """Tests for workflow statistic endpoints."""
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
def test_workflow_statistic_time_range(self):
- """Test workflow statistic time range query."""
query = WorkflowStatisticQuery(start="2024-01-01", end="2024-12-31")
assert query.start == "2024-01-01"
@@ -541,12 +535,12 @@ class TestWorkflowStatisticEndpoints:
assert response.get_json() == {"data": [{"date": "2024-01-02"}]}
-# ========== Workflow Trigger Tests ==========
class TestWorkflowTriggerEndpoints:
- """Tests for workflow trigger endpoints."""
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
def test_webhook_trigger_payload(self):
- """Test webhook trigger payload."""
payload = Parser(node_id="node-1")
assert payload.node_id == "node-1"
@@ -563,14 +557,21 @@ class TestWorkflowTriggerEndpoints:
session = MagicMock()
session.query.return_value.where.return_value.first.return_value = trigger
- class DummySession:
+ class DummySessionCtx:
def __enter__(self):
return session
def __exit__(self, exc_type, exc, tb):
return False
- monkeypatch.setattr(workflow_trigger_module, "Session", lambda *_args, **_kwargs: DummySession())
+ class DummySessionMaker:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def begin(self):
+ return DummySessionCtx()
+
+ monkeypatch.setattr(workflow_trigger_module, "sessionmaker", DummySessionMaker)
with app.test_request_context("/?node_id=node-1"):
result = method(app_model=SimpleNamespace(id="app-1"))
@@ -578,22 +579,13 @@ class TestWorkflowTriggerEndpoints:
assert result is trigger
-# ========== Wraps Tests ==========
class TestWrapsEndpoints:
- """Tests for wraps utility functions."""
-
def test_get_app_model_context(self):
- """Test get_app_model wrapper context."""
- # These are decorator functions, so we test their availability
assert hasattr(wraps_module, "get_app_model")
-# ========== MCP Server Tests ==========
class TestMCPServerEndpoints:
- """Tests for MCP server endpoints."""
-
def test_mcp_server_connection(self):
- """Test MCP server connection."""
payload = MCPServerCreatePayload(parameters={"url": "http://localhost:3000"})
assert payload.parameters["url"] == "http://localhost:3000"
@@ -602,22 +594,14 @@ class TestMCPServerEndpoints:
assert payload.status == "active"
-# ========== Error Handling Tests ==========
class TestErrorHandling:
- """Tests for error handling in various endpoints."""
-
def test_annotation_list_query_validation(self):
- """Test annotation list query validation."""
with pytest.raises(ValueError):
annotation_module.AnnotationListQuery(page=0)
-# ========== Integration-like Tests ==========
class TestPayloadIntegration:
- """Integration tests for payload handling."""
-
def test_multiple_payload_types(self):
- """Test handling of multiple payload types."""
payloads = [
annotation_module.AnnotationReplyPayload(
score_threshold=0.5, embedding_provider_name="openai", embedding_model_name="text-embedding-3-small"
diff --git a/api/tests/test_containers_integration_tests/controllers/console/app/test_app_import_api.py b/api/tests/test_containers_integration_tests/controllers/console/app/test_app_import_api.py
new file mode 100644
index 0000000000..d8c6821f8d
--- /dev/null
+++ b/api/tests/test_containers_integration_tests/controllers/console/app/test_app_import_api.py
@@ -0,0 +1,142 @@
+"""Testcontainers integration tests for controllers.console.app.app_import endpoints."""
+
+from __future__ import annotations
+
+from types import SimpleNamespace
+from unittest.mock import MagicMock
+
+import pytest
+
+from controllers.console.app import app_import as app_import_module
+from services.app_dsl_service import ImportStatus
+
+
+def _unwrap(func):
+ bound_self = getattr(func, "__self__", None)
+ while hasattr(func, "__wrapped__"):
+ func = func.__wrapped__
+ if bound_self is not None:
+ return func.__get__(bound_self, bound_self.__class__)
+ return func
+
+
+class _Result:
+ def __init__(self, status: ImportStatus, app_id: str | None = "app-1"):
+ self.status = status
+ self.app_id = app_id
+
+ def model_dump(self, mode: str = "json"):
+ return {"status": self.status, "app_id": self.app_id}
+
+
+def _install_features(monkeypatch: pytest.MonkeyPatch, enabled: bool) -> None:
+ features = SimpleNamespace(webapp_auth=SimpleNamespace(enabled=enabled))
+ monkeypatch.setattr(app_import_module.FeatureService, "get_system_features", lambda: features)
+
+
+class TestAppImportApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_import_post_returns_failed_status(self, app, monkeypatch: pytest.MonkeyPatch) -> None:
+ api = app_import_module.AppImportApi()
+ method = _unwrap(api.post)
+
+ _install_features(monkeypatch, enabled=False)
+ monkeypatch.setattr(
+ app_import_module.AppDslService,
+ "import_app",
+ lambda *_args, **_kwargs: _Result(ImportStatus.FAILED, app_id=None),
+ )
+ monkeypatch.setattr(app_import_module, "current_account_with_tenant", lambda: (SimpleNamespace(id="u1"), "t1"))
+
+ with app.test_request_context("/console/api/apps/imports", method="POST", json={"mode": "yaml-content"}):
+ response, status = method()
+
+ assert status == 400
+ assert response["status"] == ImportStatus.FAILED
+
+ def test_import_post_returns_pending_status(self, app, monkeypatch: pytest.MonkeyPatch) -> None:
+ api = app_import_module.AppImportApi()
+ method = _unwrap(api.post)
+
+ _install_features(monkeypatch, enabled=False)
+ monkeypatch.setattr(
+ app_import_module.AppDslService,
+ "import_app",
+ lambda *_args, **_kwargs: _Result(ImportStatus.PENDING),
+ )
+ monkeypatch.setattr(app_import_module, "current_account_with_tenant", lambda: (SimpleNamespace(id="u1"), "t1"))
+
+ with app.test_request_context("/console/api/apps/imports", method="POST", json={"mode": "yaml-content"}):
+ response, status = method()
+
+ assert status == 202
+ assert response["status"] == ImportStatus.PENDING
+
+ def test_import_post_updates_webapp_auth_when_enabled(self, app, monkeypatch: pytest.MonkeyPatch) -> None:
+ api = app_import_module.AppImportApi()
+ method = _unwrap(api.post)
+
+ _install_features(monkeypatch, enabled=True)
+ monkeypatch.setattr(
+ app_import_module.AppDslService,
+ "import_app",
+ lambda *_args, **_kwargs: _Result(ImportStatus.COMPLETED, app_id="app-123"),
+ )
+ update_access = MagicMock()
+ monkeypatch.setattr(app_import_module.EnterpriseService.WebAppAuth, "update_app_access_mode", update_access)
+ monkeypatch.setattr(app_import_module, "current_account_with_tenant", lambda: (SimpleNamespace(id="u1"), "t1"))
+
+ with app.test_request_context("/console/api/apps/imports", method="POST", json={"mode": "yaml-content"}):
+ response, status = method()
+
+ update_access.assert_called_once_with("app-123", "private")
+ assert status == 200
+ assert response["status"] == ImportStatus.COMPLETED
+
+
+class TestAppImportConfirmApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_import_confirm_returns_failed_status(self, app, monkeypatch: pytest.MonkeyPatch) -> None:
+ api = app_import_module.AppImportConfirmApi()
+ method = _unwrap(api.post)
+
+ monkeypatch.setattr(
+ app_import_module.AppDslService,
+ "confirm_import",
+ lambda *_args, **_kwargs: _Result(ImportStatus.FAILED),
+ )
+ monkeypatch.setattr(app_import_module, "current_account_with_tenant", lambda: (SimpleNamespace(id="u1"), "t1"))
+
+ with app.test_request_context("/console/api/apps/imports/import-1/confirm", method="POST"):
+ response, status = method(import_id="import-1")
+
+ assert status == 400
+ assert response["status"] == ImportStatus.FAILED
+
+
+class TestAppImportCheckDependenciesApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_import_check_dependencies_returns_result(self, app, monkeypatch: pytest.MonkeyPatch) -> None:
+ api = app_import_module.AppImportCheckDependenciesApi()
+ method = _unwrap(api.get)
+
+ monkeypatch.setattr(
+ app_import_module.AppDslService,
+ "check_dependencies",
+ lambda *_args, **_kwargs: SimpleNamespace(model_dump=lambda mode="json": {"leaked_dependencies": []}),
+ )
+
+ with app.test_request_context("/console/api/apps/imports/app-1/check-dependencies", method="GET"):
+ response, status = method(app_model=SimpleNamespace(id="app-1"))
+
+ assert status == 200
+ assert response["leaked_dependencies"] == []
diff --git a/api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline.py b/api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline.py
similarity index 77%
rename from api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline.py
rename to api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline.py
index ebbb34e069..d5ae95dfb7 100644
--- a/api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline.py
@@ -1,6 +1,12 @@
+"""Testcontainers integration tests for rag_pipeline controller endpoints."""
+
+from __future__ import annotations
+
from unittest.mock import MagicMock, patch
+from uuid import uuid4
import pytest
+from sqlalchemy.orm import Session
from controllers.console import console_ns
from controllers.console.datasets.rag_pipeline.rag_pipeline import (
@@ -9,6 +15,7 @@ from controllers.console.datasets.rag_pipeline.rag_pipeline import (
PipelineTemplateListApi,
PublishCustomizedPipelineTemplateApi,
)
+from models.dataset import PipelineCustomizedTemplate
def unwrap(func):
@@ -18,6 +25,10 @@ def unwrap(func):
class TestPipelineTemplateListApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_success(self, app):
api = PipelineTemplateListApi()
method = unwrap(api.get)
@@ -38,6 +49,10 @@ class TestPipelineTemplateListApi:
class TestPipelineTemplateDetailApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_success(self, app):
api = PipelineTemplateDetailApi()
method = unwrap(api.get)
@@ -99,6 +114,10 @@ class TestPipelineTemplateDetailApi:
class TestCustomizedPipelineTemplateApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_patch_success(self, app):
api = CustomizedPipelineTemplateApi()
method = unwrap(api.patch)
@@ -136,35 +155,29 @@ class TestCustomizedPipelineTemplateApi:
delete_mock.assert_called_once_with("tpl-1")
assert response == 200
- def test_post_success(self, app):
+ def test_post_success(self, app, db_session_with_containers: Session):
api = CustomizedPipelineTemplateApi()
method = unwrap(api.post)
- template = MagicMock()
- template.yaml_content = "yaml-data"
+ tenant_id = str(uuid4())
+ template = PipelineCustomizedTemplate(
+ tenant_id=tenant_id,
+ name="Test Template",
+ description="Test",
+ chunk_structure="hierarchical",
+ icon={"icon": "📘"},
+ position=0,
+ yaml_content="yaml-data",
+ install_count=0,
+ language="en-US",
+ created_by=str(uuid4()),
+ )
+ db_session_with_containers.add(template)
+ db_session_with_containers.commit()
+ db_session_with_containers.expire_all()
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session = MagicMock()
- session.query.return_value.where.return_value.first.return_value = template
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = session
- session_ctx.__exit__.return_value = None
-
- with (
- app.test_request_context("/"),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline.Session",
- return_value=session_ctx,
- ),
- ):
- response, status = method(api, "tpl-1")
+ with app.test_request_context("/"):
+ response, status = method(api, template.id)
assert status == 200
assert response == {"data": "yaml-data"}
@@ -173,32 +186,16 @@ class TestCustomizedPipelineTemplateApi:
api = CustomizedPipelineTemplateApi()
method = unwrap(api.post)
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session = MagicMock()
- session.query.return_value.where.return_value.first.return_value = None
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = session
- session_ctx.__exit__.return_value = None
-
- with (
- app.test_request_context("/"),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline.Session",
- return_value=session_ctx,
- ),
- ):
+ with app.test_request_context("/"):
with pytest.raises(ValueError):
- method(api, "tpl-1")
+ method(api, str(uuid4()))
class TestPublishCustomizedPipelineTemplateApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_post_success(self, app):
api = PublishCustomizedPipelineTemplateApi()
method = unwrap(api.post)
diff --git a/api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_datasets.py b/api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_datasets.py
similarity index 83%
rename from api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_datasets.py
rename to api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_datasets.py
index fd38fcbb5e..64e3de2ca3 100644
--- a/api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_datasets.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_datasets.py
@@ -1,3 +1,7 @@
+"""Testcontainers integration tests for rag_pipeline_datasets controller endpoints."""
+
+from __future__ import annotations
+
from unittest.mock import MagicMock, patch
import pytest
@@ -19,6 +23,10 @@ def unwrap(func):
class TestCreateRagPipelineDatasetApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def _valid_payload(self):
return {"yaml_content": "name: test"}
@@ -33,13 +41,6 @@ class TestCreateRagPipelineDatasetApi:
mock_service = MagicMock()
mock_service.create_rag_pipeline_dataset.return_value = import_info
- mock_session_ctx = MagicMock()
- mock_session_ctx.__enter__.return_value = MagicMock()
- mock_session_ctx.__exit__.return_value = None
-
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
with (
app.test_request_context("/", json=payload),
patch.object(type(console_ns), "payload", payload),
@@ -47,14 +48,6 @@ class TestCreateRagPipelineDatasetApi:
"controllers.console.datasets.rag_pipeline.rag_pipeline_datasets.current_account_with_tenant",
return_value=(user, "tenant-1"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_datasets.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_datasets.Session",
- return_value=mock_session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_datasets.RagPipelineDslService",
return_value=mock_service,
@@ -93,13 +86,6 @@ class TestCreateRagPipelineDatasetApi:
mock_service = MagicMock()
mock_service.create_rag_pipeline_dataset.side_effect = services.errors.dataset.DatasetNameDuplicateError()
- mock_session_ctx = MagicMock()
- mock_session_ctx.__enter__.return_value = MagicMock()
- mock_session_ctx.__exit__.return_value = None
-
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
with (
app.test_request_context("/", json=payload),
patch.object(type(console_ns), "payload", payload),
@@ -107,14 +93,6 @@ class TestCreateRagPipelineDatasetApi:
"controllers.console.datasets.rag_pipeline.rag_pipeline_datasets.current_account_with_tenant",
return_value=(user, "tenant-1"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_datasets.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_datasets.Session",
- return_value=mock_session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_datasets.RagPipelineDslService",
return_value=mock_service,
@@ -143,6 +121,10 @@ class TestCreateRagPipelineDatasetApi:
class TestCreateEmptyRagPipelineDatasetApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_post_success(self, app):
api = CreateEmptyRagPipelineDatasetApi()
method = unwrap(api.post)
diff --git a/api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_import.py b/api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_import.py
similarity index 66%
rename from api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_import.py
rename to api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_import.py
index a72ad45110..cb67892878 100644
--- a/api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_import.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_import.py
@@ -1,5 +1,11 @@
+"""Testcontainers integration tests for rag_pipeline_import controller endpoints."""
+
+from __future__ import annotations
+
from unittest.mock import MagicMock, patch
+import pytest
+
from controllers.console import console_ns
from controllers.console.datasets.rag_pipeline.rag_pipeline_import import (
RagPipelineExportApi,
@@ -18,6 +24,10 @@ def unwrap(func):
class TestRagPipelineImportApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def _payload(self, mode="create"):
return {
"mode": mode,
@@ -30,7 +40,6 @@ class TestRagPipelineImportApi:
method = unwrap(api.post)
payload = self._payload()
-
user = MagicMock()
result = MagicMock()
result.status = "completed"
@@ -39,13 +48,6 @@ class TestRagPipelineImportApi:
service = MagicMock()
service.import_rag_pipeline.return_value = result
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = MagicMock()
- session_ctx.__exit__.return_value = None
-
with (
app.test_request_context("/", json=payload),
patch.object(type(console_ns), "payload", payload),
@@ -53,14 +55,6 @@ class TestRagPipelineImportApi:
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.current_account_with_tenant",
return_value=(user, "tenant"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.RagPipelineDslService",
return_value=service,
@@ -76,7 +70,6 @@ class TestRagPipelineImportApi:
method = unwrap(api.post)
payload = self._payload()
-
user = MagicMock()
result = MagicMock()
result.status = ImportStatus.FAILED
@@ -85,13 +78,6 @@ class TestRagPipelineImportApi:
service = MagicMock()
service.import_rag_pipeline.return_value = result
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = MagicMock()
- session_ctx.__exit__.return_value = None
-
with (
app.test_request_context("/", json=payload),
patch.object(type(console_ns), "payload", payload),
@@ -99,14 +85,6 @@ class TestRagPipelineImportApi:
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.current_account_with_tenant",
return_value=(user, "tenant"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.RagPipelineDslService",
return_value=service,
@@ -122,7 +100,6 @@ class TestRagPipelineImportApi:
method = unwrap(api.post)
payload = self._payload()
-
user = MagicMock()
result = MagicMock()
result.status = ImportStatus.PENDING
@@ -131,13 +108,6 @@ class TestRagPipelineImportApi:
service = MagicMock()
service.import_rag_pipeline.return_value = result
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = MagicMock()
- session_ctx.__exit__.return_value = None
-
with (
app.test_request_context("/", json=payload),
patch.object(type(console_ns), "payload", payload),
@@ -145,14 +115,6 @@ class TestRagPipelineImportApi:
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.current_account_with_tenant",
return_value=(user, "tenant"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.RagPipelineDslService",
return_value=service,
@@ -165,6 +127,10 @@ class TestRagPipelineImportApi:
class TestRagPipelineImportConfirmApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_confirm_success(self, app):
api = RagPipelineImportConfirmApi()
method = unwrap(api.post)
@@ -177,27 +143,12 @@ class TestRagPipelineImportConfirmApi:
service = MagicMock()
service.confirm_import.return_value = result
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = MagicMock()
- session_ctx.__exit__.return_value = None
-
with (
app.test_request_context("/"),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.current_account_with_tenant",
return_value=(user, "tenant"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.RagPipelineDslService",
return_value=service,
@@ -220,27 +171,12 @@ class TestRagPipelineImportConfirmApi:
service = MagicMock()
service.confirm_import.return_value = result
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = MagicMock()
- session_ctx.__exit__.return_value = None
-
with (
app.test_request_context("/"),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.current_account_with_tenant",
return_value=(user, "tenant"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.RagPipelineDslService",
return_value=service,
@@ -253,6 +189,10 @@ class TestRagPipelineImportConfirmApi:
class TestRagPipelineImportCheckDependenciesApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_success(self, app):
api = RagPipelineImportCheckDependenciesApi()
method = unwrap(api.get)
@@ -264,23 +204,8 @@ class TestRagPipelineImportCheckDependenciesApi:
service = MagicMock()
service.check_dependencies.return_value = result
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = MagicMock()
- session_ctx.__exit__.return_value = None
-
with (
app.test_request_context("/"),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.RagPipelineDslService",
return_value=service,
@@ -293,6 +218,10 @@ class TestRagPipelineImportCheckDependenciesApi:
class TestRagPipelineExportApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_with_include_secret(self, app):
api = RagPipelineExportApi()
method = unwrap(api.get)
@@ -301,23 +230,8 @@ class TestRagPipelineExportApi:
service = MagicMock()
service.export_rag_pipeline_dsl.return_value = {"yaml": "data"}
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = MagicMock()
- session_ctx.__exit__.return_value = None
-
with (
app.test_request_context("/?include_secret=true"),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_import.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_import.RagPipelineDslService",
return_value=service,
diff --git a/api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_workflow.py b/api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_workflow.py
similarity index 91%
rename from api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_workflow.py
rename to api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_workflow.py
index a3c0592d76..c1f3122c2b 100644
--- a/api/tests/unit_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_workflow.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/datasets/rag_pipeline/test_rag_pipeline_workflow.py
@@ -1,7 +1,13 @@
+"""Testcontainers integration tests for rag_pipeline_workflow controller endpoints."""
+
+from __future__ import annotations
+
from datetime import datetime
from unittest.mock import MagicMock, patch
+from uuid import uuid4
import pytest
+from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, Forbidden, HTTPException, NotFound
import services
@@ -38,6 +44,10 @@ def unwrap(func):
class TestDraftWorkflowApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_draft_success(self, app):
api = DraftRagPipelineApi()
method = unwrap(api.get)
@@ -200,6 +210,10 @@ class TestDraftWorkflowApi:
class TestDraftRunNodes:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_iteration_node_success(self, app):
api = RagPipelineDraftRunIterationNodeApi()
method = unwrap(api.post)
@@ -275,6 +289,10 @@ class TestDraftRunNodes:
class TestPipelineRunApis:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_draft_run_success(self, app):
api = DraftRagPipelineRunApi()
method = unwrap(api.post)
@@ -337,6 +355,10 @@ class TestPipelineRunApis:
class TestDraftNodeRun:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_execution_not_found(self, app):
api = RagPipelineDraftNodeRunApi()
method = unwrap(api.post)
@@ -364,45 +386,43 @@ class TestDraftNodeRun:
class TestPublishedPipelineApis:
- def test_publish_success(self, app):
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_publish_success(self, app, db_session_with_containers: Session):
+ from models.dataset import Pipeline
+
api = PublishedRagPipelineApi()
method = unwrap(api.post)
- pipeline = MagicMock()
+ tenant_id = str(uuid4())
+ pipeline = Pipeline(
+ tenant_id=tenant_id,
+ name="test-pipeline",
+ description="test",
+ created_by=str(uuid4()),
+ )
+ db_session_with_containers.add(pipeline)
+ db_session_with_containers.commit()
+ db_session_with_containers.expire_all()
+
user = MagicMock(id="u1")
workflow = MagicMock(
- id="w1",
+ id=str(uuid4()),
created_at=naive_utc_now(),
)
- session = MagicMock()
- session.merge.return_value = pipeline
-
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = session
- session_ctx.__exit__.return_value = None
-
service = MagicMock()
service.publish_workflow.return_value = workflow
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
with (
app.test_request_context("/"),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.current_account_with_tenant",
return_value=(user, "t"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.RagPipelineService",
return_value=service,
@@ -415,6 +435,10 @@ class TestPublishedPipelineApis:
class TestMiscApis:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_task_stop(self, app):
api = RagPipelineTaskStopApi()
method = unwrap(api.post)
@@ -471,6 +495,10 @@ class TestMiscApis:
class TestPublishedRagPipelineRunApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_published_run_success(self, app):
api = PublishedRagPipelineRunApi()
method = unwrap(api.post)
@@ -536,6 +564,10 @@ class TestPublishedRagPipelineRunApi:
class TestDefaultBlockConfigApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_block_config_success(self, app):
api = DefaultRagPipelineBlockConfigApi()
method = unwrap(api.get)
@@ -567,6 +599,10 @@ class TestDefaultBlockConfigApi:
class TestPublishedAllRagPipelineApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_published_workflows_success(self, app):
api = PublishedAllRagPipelineApi()
method = unwrap(api.get)
@@ -577,28 +613,12 @@ class TestPublishedAllRagPipelineApi:
service = MagicMock()
service.get_all_published_workflow.return_value = ([{"id": "w1"}], False)
- session = MagicMock()
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = session
- session_ctx.__exit__.return_value = None
-
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
with (
app.test_request_context("/"),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.current_account_with_tenant",
return_value=(user, "t"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.RagPipelineService",
return_value=service,
@@ -628,6 +648,10 @@ class TestPublishedAllRagPipelineApi:
class TestRagPipelineByIdApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_patch_success(self, app):
api = RagPipelineByIdApi()
method = unwrap(api.patch)
@@ -640,14 +664,6 @@ class TestRagPipelineByIdApi:
service = MagicMock()
service.update_workflow.return_value = workflow
- session = MagicMock()
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = session
- session_ctx.__exit__.return_value = None
-
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
payload = {"marked_name": "test"}
with (
@@ -657,14 +673,6 @@ class TestRagPipelineByIdApi:
"controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.current_account_with_tenant",
return_value=(user, "t"),
),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.RagPipelineService",
return_value=service,
@@ -700,24 +708,8 @@ class TestRagPipelineByIdApi:
workflow_service = MagicMock()
- session = MagicMock()
- session_ctx = MagicMock()
- session_ctx.__enter__.return_value = session
- session_ctx.__exit__.return_value = None
-
- fake_db = MagicMock()
- fake_db.engine = MagicMock()
-
with (
app.test_request_context("/", method="DELETE"),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.db",
- fake_db,
- ),
- patch(
- "controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.Session",
- return_value=session_ctx,
- ),
patch(
"controllers.console.datasets.rag_pipeline.rag_pipeline_workflow.WorkflowService",
return_value=workflow_service,
@@ -725,12 +717,7 @@ class TestRagPipelineByIdApi:
):
result = method(api, pipeline, "old-workflow")
- workflow_service.delete_workflow.assert_called_once_with(
- session=session,
- workflow_id="old-workflow",
- tenant_id="t1",
- )
- session.commit.assert_called_once()
+ workflow_service.delete_workflow.assert_called_once()
assert result == (None, 204)
def test_delete_active_workflow_rejected(self, app):
@@ -745,6 +732,10 @@ class TestRagPipelineByIdApi:
class TestRagPipelineWorkflowLastRunApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_last_run_success(self, app):
api = RagPipelineWorkflowLastRunApi()
method = unwrap(api.get)
@@ -788,6 +779,10 @@ class TestRagPipelineWorkflowLastRunApi:
class TestRagPipelineDatasourceVariableApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_set_datasource_variables_success(self, app):
api = RagPipelineDatasourceVariableApi()
method = unwrap(api.post)
diff --git a/api/tests/unit_tests/controllers/console/datasets/test_data_source.py b/api/tests/test_containers_integration_tests/controllers/console/datasets/test_data_source.py
similarity index 87%
rename from api/tests/unit_tests/controllers/console/datasets/test_data_source.py
rename to api/tests/test_containers_integration_tests/controllers/console/datasets/test_data_source.py
index d841f67f9b..1c4c6a899f 100644
--- a/api/tests/unit_tests/controllers/console/datasets/test_data_source.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/datasets/test_data_source.py
@@ -1,3 +1,7 @@
+"""Testcontainers integration tests for controllers.console.datasets.data_source endpoints."""
+
+from __future__ import annotations
+
from unittest.mock import MagicMock, PropertyMock, patch
import pytest
@@ -46,6 +50,10 @@ def mock_engine():
class TestDataSourceApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_success(self, app, patch_tenant):
api = DataSourceApi()
method = unwrap(api.get)
@@ -94,12 +102,12 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
- patch("controllers.console.datasets.data_source.Session") as mock_session_class,
+ patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch("controllers.console.datasets.data_source.db.session.add"),
patch("controllers.console.datasets.data_source.db.session.commit"),
):
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = binding
response, status = method(api, "b1", "enable")
@@ -115,12 +123,12 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
- patch("controllers.console.datasets.data_source.Session") as mock_session_class,
+ patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch("controllers.console.datasets.data_source.db.session.add"),
patch("controllers.console.datasets.data_source.db.session.commit"),
):
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = binding
response, status = method(api, "b1", "disable")
@@ -134,10 +142,10 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
- patch("controllers.console.datasets.data_source.Session") as mock_session_class,
+ patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
):
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = None
with pytest.raises(NotFound):
@@ -151,10 +159,10 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
- patch("controllers.console.datasets.data_source.Session") as mock_session_class,
+ patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
):
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = binding
with pytest.raises(ValueError):
@@ -168,10 +176,10 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
- patch("controllers.console.datasets.data_source.Session") as mock_session_class,
+ patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
):
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = binding
with pytest.raises(ValueError):
@@ -179,6 +187,10 @@ class TestDataSourceApi:
class TestDataSourceNotionListApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_credential_not_found(self, app, patch_tenant):
api = DataSourceNotionListApi()
method = unwrap(api.get)
@@ -270,7 +282,7 @@ class TestDataSourceNotionListApi:
"controllers.console.datasets.data_source.DatasetService.get_dataset",
return_value=dataset,
),
- patch("controllers.console.datasets.data_source.Session") as mock_session_class,
+ patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch(
"core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime",
return_value=MagicMock(
@@ -280,7 +292,7 @@ class TestDataSourceNotionListApi:
),
):
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.scalars.return_value.all.return_value = [document]
response, status = method(api)
@@ -303,13 +315,17 @@ class TestDataSourceNotionListApi:
"controllers.console.datasets.data_source.DatasetService.get_dataset",
return_value=dataset,
),
- patch("controllers.console.datasets.data_source.Session"),
+ patch("controllers.console.datasets.data_source.sessionmaker"),
):
with pytest.raises(ValueError):
method(api)
class TestDataSourceNotionApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_preview_success(self, app, patch_tenant):
api = DataSourceNotionApi()
method = unwrap(api.get)
@@ -364,6 +380,10 @@ class TestDataSourceNotionApi:
class TestDataSourceNotionDatasetSyncApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_success(self, app, patch_tenant):
api = DataSourceNotionDatasetSyncApi()
method = unwrap(api.get)
@@ -403,6 +423,10 @@ class TestDataSourceNotionDatasetSyncApi:
class TestDataSourceNotionDocumentSyncApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_success(self, app, patch_tenant):
api = DataSourceNotionDocumentSyncApi()
method = unwrap(api.get)
diff --git a/api/tests/unit_tests/controllers/console/explore/test_conversation.py b/api/tests/test_containers_integration_tests/controllers/console/explore/test_conversation.py
similarity index 82%
rename from api/tests/unit_tests/controllers/console/explore/test_conversation.py
rename to api/tests/test_containers_integration_tests/controllers/console/explore/test_conversation.py
index 65cc209725..83492048ef 100644
--- a/api/tests/unit_tests/controllers/console/explore/test_conversation.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/explore/test_conversation.py
@@ -1,7 +1,10 @@
+"""Testcontainers integration tests for controllers.console.explore.conversation endpoints."""
+
+from __future__ import annotations
+
from unittest.mock import MagicMock, patch
import pytest
-from flask import Flask
from werkzeug.exceptions import NotFound
import controllers.console.explore.conversation as conversation_module
@@ -48,24 +51,12 @@ def user():
return user
-@pytest.fixture(autouse=True)
-def mock_db_and_session():
- with (
- patch.object(
- conversation_module,
- "db",
- MagicMock(session=MagicMock(), engine=MagicMock()),
- ),
- patch(
- "controllers.console.explore.conversation.Session",
- MagicMock(),
- ),
- ):
- yield
-
-
class TestConversationListApi:
- def test_get_success(self, app: Flask, chat_app, user):
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_get_success(self, app, chat_app, user):
api = conversation_module.ConversationListApi()
method = unwrap(api.get)
@@ -90,7 +81,7 @@ class TestConversationListApi:
assert result["has_more"] is False
assert len(result["data"]) == 2
- def test_last_conversation_not_exists(self, app: Flask, chat_app, user):
+ def test_last_conversation_not_exists(self, app, chat_app, user):
api = conversation_module.ConversationListApi()
method = unwrap(api.get)
@@ -106,7 +97,7 @@ class TestConversationListApi:
with pytest.raises(NotFound):
method(chat_app)
- def test_wrong_app_mode(self, app: Flask, non_chat_app):
+ def test_wrong_app_mode(self, app, non_chat_app):
api = conversation_module.ConversationListApi()
method = unwrap(api.get)
@@ -116,7 +107,11 @@ class TestConversationListApi:
class TestConversationApi:
- def test_delete_success(self, app: Flask, chat_app, user):
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_delete_success(self, app, chat_app, user):
api = conversation_module.ConversationApi()
method = unwrap(api.delete)
@@ -134,7 +129,7 @@ class TestConversationApi:
assert status == 204
assert body["result"] == "success"
- def test_delete_not_found(self, app: Flask, chat_app, user):
+ def test_delete_not_found(self, app, chat_app, user):
api = conversation_module.ConversationApi()
method = unwrap(api.delete)
@@ -150,7 +145,7 @@ class TestConversationApi:
with pytest.raises(NotFound):
method(chat_app, "cid")
- def test_delete_wrong_app_mode(self, app: Flask, non_chat_app):
+ def test_delete_wrong_app_mode(self, app, non_chat_app):
api = conversation_module.ConversationApi()
method = unwrap(api.delete)
@@ -160,7 +155,11 @@ class TestConversationApi:
class TestConversationRenameApi:
- def test_rename_success(self, app: Flask, chat_app, user):
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_rename_success(self, app, chat_app, user):
api = conversation_module.ConversationRenameApi()
method = unwrap(api.post)
@@ -179,7 +178,7 @@ class TestConversationRenameApi:
assert result["id"] == "cid"
- def test_rename_not_found(self, app: Flask, chat_app, user):
+ def test_rename_not_found(self, app, chat_app, user):
api = conversation_module.ConversationRenameApi()
method = unwrap(api.post)
@@ -197,7 +196,11 @@ class TestConversationRenameApi:
class TestConversationPinApi:
- def test_pin_success(self, app: Flask, chat_app, user):
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_pin_success(self, app, chat_app, user):
api = conversation_module.ConversationPinApi()
method = unwrap(api.patch)
@@ -215,7 +218,11 @@ class TestConversationPinApi:
class TestConversationUnPinApi:
- def test_unpin_success(self, app: Flask, chat_app, user):
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_unpin_success(self, app, chat_app, user):
api = conversation_module.ConversationUnPinApi()
method = unwrap(api.patch)
diff --git a/api/tests/test_containers_integration_tests/controllers/console/test_apikey.py b/api/tests/test_containers_integration_tests/controllers/console/test_apikey.py
new file mode 100644
index 0000000000..7df63aae1a
--- /dev/null
+++ b/api/tests/test_containers_integration_tests/controllers/console/test_apikey.py
@@ -0,0 +1,153 @@
+"""Integration tests for console API key endpoints using testcontainers."""
+
+from __future__ import annotations
+
+from unittest.mock import MagicMock, patch
+
+import pytest
+from flask.testing import FlaskClient
+from sqlalchemy import delete
+from sqlalchemy.orm import Session
+
+from models.enums import ApiTokenType
+from models.model import ApiToken, App, AppMode
+from tests.test_containers_integration_tests.controllers.console.helpers import (
+ authenticate_console_client,
+ create_console_account_and_tenant,
+ create_console_app,
+)
+
+
+@pytest.fixture
+def setup_app(
+ db_session_with_containers: Session,
+ test_client_with_containers: FlaskClient,
+) -> tuple[FlaskClient, dict[str, str], App]:
+ """Create an authenticated client with an app for API key tests."""
+ account, tenant = create_console_account_and_tenant(db_session_with_containers)
+ app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
+ headers = authenticate_console_client(test_client_with_containers, account)
+ return test_client_with_containers, headers, app
+
+
+@pytest.fixture(autouse=True)
+def cleanup_api_tokens(db_session_with_containers: Session):
+ """Remove API tokens created during each test."""
+ yield
+ db_session_with_containers.execute(delete(ApiToken))
+ db_session_with_containers.commit()
+
+
+class TestAppApiKeyListResource:
+ """Tests for GET/POST /apps//api-keys."""
+
+ def test_get_empty_keys(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
+ client, headers, app = setup_app
+ resp = client.get(f"/console/api/apps/{app.id}/api-keys", headers=headers)
+ assert resp.status_code == 200
+ assert resp.json is not None
+ assert resp.json["data"] == []
+
+ def test_create_api_key(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
+ client, headers, app = setup_app
+ resp = client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
+ assert resp.status_code == 201
+ data = resp.json
+ assert data is not None
+ assert data["token"].startswith("app-")
+ assert data["id"] is not None
+
+ def test_get_keys_after_create(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
+ client, headers, app = setup_app
+ client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
+ client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
+
+ resp = client.get(f"/console/api/apps/{app.id}/api-keys", headers=headers)
+ assert resp.status_code == 200
+ assert resp.json is not None
+ assert len(resp.json["data"]) == 2
+
+ def test_create_key_max_limit(
+ self,
+ setup_app: tuple[FlaskClient, dict[str, str], App],
+ db_session_with_containers: Session,
+ ) -> None:
+ client, headers, app = setup_app
+ # Create 10 keys (the max)
+ for _ in range(10):
+ client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
+
+ # 11th should fail
+ resp = client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
+ assert resp.status_code == 400
+
+ def test_get_keys_for_nonexistent_app(
+ self,
+ setup_app: tuple[FlaskClient, dict[str, str], App],
+ ) -> None:
+ client, headers, _ = setup_app
+ resp = client.get(
+ "/console/api/apps/00000000-0000-0000-0000-000000000000/api-keys",
+ headers=headers,
+ )
+ assert resp.status_code == 404
+
+
+class TestAppApiKeyResource:
+ """Tests for DELETE /apps//api-keys/."""
+
+ def test_delete_key_success(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
+ client, headers, app = setup_app
+ create_resp = client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
+ assert create_resp.json is not None
+ key_id = create_resp.json["id"]
+
+ resp = client.delete(f"/console/api/apps/{app.id}/api-keys/{key_id}", headers=headers)
+ assert resp.status_code == 204
+
+ def test_delete_nonexistent_key(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
+ client, headers, app = setup_app
+ resp = client.delete(
+ f"/console/api/apps/{app.id}/api-keys/00000000-0000-0000-0000-000000000000",
+ headers=headers,
+ )
+ assert resp.status_code == 404
+
+ def test_delete_key_nonexistent_app(
+ self,
+ setup_app: tuple[FlaskClient, dict[str, str], App],
+ ) -> None:
+ client, headers, _ = setup_app
+ resp = client.delete(
+ "/console/api/apps/00000000-0000-0000-0000-000000000000/api-keys/00000000-0000-0000-0000-000000000000",
+ headers=headers,
+ )
+ assert resp.status_code == 404
+
+ def test_delete_forbidden_for_non_admin(
+ self,
+ flask_app_with_containers,
+ ) -> None:
+ """A non-admin member cannot delete API keys via the controller permission check."""
+ from werkzeug.exceptions import Forbidden
+
+ from controllers.console.apikey import BaseApiKeyResource
+
+ resource = BaseApiKeyResource()
+ resource.resource_type = ApiTokenType.APP
+ resource.resource_model = MagicMock()
+ resource.resource_id_field = "app_id"
+
+ non_admin = MagicMock()
+ non_admin.is_admin_or_owner = False
+
+ with (
+ flask_app_with_containers.test_request_context("/"),
+ patch(
+ "controllers.console.apikey.current_account_with_tenant",
+ return_value=(non_admin, "tenant-id"),
+ ),
+ patch("controllers.console.apikey._get_resource"),
+ ):
+ with pytest.raises(Forbidden):
+ BaseApiKeyResource.delete(resource, "rid", "kid")
diff --git a/api/tests/unit_tests/controllers/console/workspace/test_tool_provider.py b/api/tests/test_containers_integration_tests/controllers/console/workspace/test_tool_provider.py
similarity index 94%
rename from api/tests/unit_tests/controllers/console/workspace/test_tool_provider.py
rename to api/tests/test_containers_integration_tests/controllers/console/workspace/test_tool_provider.py
index 16ea1bf509..f2e7104b18 100644
--- a/api/tests/unit_tests/controllers/console/workspace/test_tool_provider.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/workspace/test_tool_provider.py
@@ -1,9 +1,11 @@
+"""Testcontainers integration tests for controllers.console.workspace.tool_providers endpoints."""
+
+from __future__ import annotations
+
import json
from unittest.mock import MagicMock, patch
import pytest
-from flask import Flask
-from flask_restx import Api
from werkzeug.exceptions import Forbidden
from controllers.console.workspace.tool_providers import (
@@ -31,7 +33,6 @@ from controllers.console.workspace.tool_providers import (
ToolOAuthCustomClient,
ToolPluginOAuthApi,
ToolProviderListApi,
- ToolProviderMCPApi,
ToolWorkflowListApi,
ToolWorkflowProviderCreateApi,
ToolWorkflowProviderDeleteApi,
@@ -39,8 +40,6 @@ from controllers.console.workspace.tool_providers import (
ToolWorkflowProviderUpdateApi,
is_valid_url,
)
-from core.db.session_factory import configure_session_factory
-from extensions.ext_database import db
from services.tools.mcp_tools_manage_service import ReconnectResult
@@ -61,17 +60,8 @@ def _mock_user_tenant():
@pytest.fixture
-def client():
- app = Flask(__name__)
- app.config["TESTING"] = True
- app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:"
- api = Api(app)
- api.add_resource(ToolProviderMCPApi, "/console/api/workspaces/current/tool-provider/mcp")
- db.init_app(app)
- # Configure session factory used by controller code
- with app.app_context():
- configure_session_factory(db.engine)
- return app.test_client()
+def client(flask_app_with_containers):
+ return flask_app_with_containers.test_client()
@patch(
@@ -79,7 +69,7 @@ def client():
return_value=(MagicMock(id="u1"), "t1"),
autospec=True,
)
-@patch("controllers.console.workspace.tool_providers.Session", autospec=True)
+@patch("controllers.console.workspace.tool_providers.sessionmaker", autospec=True)
@patch("controllers.console.workspace.tool_providers.MCPToolManageService._reconnect_with_url", autospec=True)
@pytest.mark.usefixtures("_mock_cache", "_mock_user_tenant")
def test_create_mcp_provider_populates_tools(mock_reconnect, mock_session, mock_current_account_with_tenant, client):
@@ -98,7 +88,7 @@ def test_create_mcp_provider_populates_tools(mock_reconnect, mock_session, mock_
create_result.id = "provider-1"
svc.create_provider.return_value = create_result
svc.get_provider.return_value = MagicMock(id="provider-1", tenant_id="t1") # used by reload path
- mock_session.return_value.__enter__.return_value = MagicMock()
+ mock_session.return_value.begin.return_value.__enter__.return_value = MagicMock()
# Patch MCPToolManageService constructed inside controller
with patch("controllers.console.workspace.tool_providers.MCPToolManageService", return_value=svc, autospec=True):
payload = {
@@ -152,10 +142,14 @@ class TestUtils:
assert not is_valid_url("")
assert not is_valid_url("ftp://example.com")
assert not is_valid_url("not-a-url")
- assert not is_valid_url(None)
+ assert not is_valid_url(None) # type: ignore[arg-type]
class TestToolProviderListApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_success(self, app):
api = ToolProviderListApi()
method = unwrap(api.get)
@@ -175,6 +169,10 @@ class TestToolProviderListApi:
class TestBuiltinProviderApis:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_list_tools(self, app):
api = ToolBuiltinProviderListToolsApi()
method = unwrap(api.get)
@@ -379,6 +377,10 @@ class TestBuiltinProviderApis:
class TestApiProviderApis:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_add(self, app):
api = ToolApiProviderAddApi()
method = unwrap(api.post)
@@ -502,6 +504,10 @@ class TestApiProviderApis:
class TestWorkflowApis:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_create(self, app):
api = ToolWorkflowProviderCreateApi()
method = unwrap(api.post)
@@ -587,6 +593,10 @@ class TestWorkflowApis:
class TestLists:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_builtin_list(self, app):
api = ToolBuiltinListApi()
method = unwrap(api.get)
@@ -649,6 +659,10 @@ class TestLists:
class TestLabels:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_labels(self, app):
api = ToolLabelsApi()
method = unwrap(api.get)
@@ -664,6 +678,10 @@ class TestLabels:
class TestOAuth:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_oauth_no_client(self, app):
api = ToolPluginOAuthApi()
method = unwrap(api.get)
@@ -692,6 +710,10 @@ class TestOAuth:
class TestOAuthCustomClient:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_save_custom_client(self, app):
api = ToolOAuthCustomClient()
method = unwrap(api.post)
diff --git a/api/tests/unit_tests/controllers/console/workspace/test_trigger_providers.py b/api/tests/test_containers_integration_tests/controllers/console/workspace/test_trigger_providers.py
similarity index 94%
rename from api/tests/unit_tests/controllers/console/workspace/test_trigger_providers.py
rename to api/tests/test_containers_integration_tests/controllers/console/workspace/test_trigger_providers.py
index 4776bc7af0..ca8195af53 100644
--- a/api/tests/unit_tests/controllers/console/workspace/test_trigger_providers.py
+++ b/api/tests/test_containers_integration_tests/controllers/console/workspace/test_trigger_providers.py
@@ -1,3 +1,7 @@
+"""Testcontainers integration tests for controllers.console.workspace.trigger_providers endpoints."""
+
+from __future__ import annotations
+
from unittest.mock import MagicMock, patch
import pytest
@@ -40,6 +44,10 @@ def mock_user():
class TestTriggerProviderApis:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_icon_success(self, app):
api = TriggerProviderIconApi()
method = unwrap(api.get)
@@ -84,6 +92,10 @@ class TestTriggerProviderApis:
class TestTriggerSubscriptionListApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_list_success(self, app):
api = TriggerSubscriptionListApi()
method = unwrap(api.get)
@@ -115,6 +127,10 @@ class TestTriggerSubscriptionListApi:
class TestTriggerSubscriptionBuilderApis:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_create_builder(self, app):
api = TriggerSubscriptionBuilderCreateApi()
method = unwrap(api.post)
@@ -219,6 +235,10 @@ class TestTriggerSubscriptionBuilderApis:
class TestTriggerSubscriptionCrud:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_update_rename_only(self, app):
api = TriggerSubscriptionUpdateApi()
method = unwrap(api.post)
@@ -286,14 +306,14 @@ class TestTriggerSubscriptionCrud:
app.test_request_context("/"),
patch("controllers.console.workspace.trigger_providers.current_user", mock_user()),
patch("controllers.console.workspace.trigger_providers.db") as mock_db,
- patch("controllers.console.workspace.trigger_providers.Session") as mock_session_cls,
+ patch("controllers.console.workspace.trigger_providers.sessionmaker") as mock_session_cls,
patch("controllers.console.workspace.trigger_providers.TriggerProviderService.delete_trigger_provider"),
patch(
"controllers.console.workspace.trigger_providers.TriggerSubscriptionOperatorService.delete_plugin_trigger_by_subscription"
),
):
mock_db.engine = MagicMock()
- mock_session_cls.return_value.__enter__.return_value = mock_session
+ mock_session_cls.return_value.begin.return_value.__enter__.return_value = mock_session
result = method(api, "sub1")
@@ -307,20 +327,24 @@ class TestTriggerSubscriptionCrud:
app.test_request_context("/"),
patch("controllers.console.workspace.trigger_providers.current_user", mock_user()),
patch("controllers.console.workspace.trigger_providers.db") as mock_db,
- patch("controllers.console.workspace.trigger_providers.Session") as session_cls,
+ patch("controllers.console.workspace.trigger_providers.sessionmaker") as session_cls,
patch(
"controllers.console.workspace.trigger_providers.TriggerProviderService.delete_trigger_provider",
side_effect=ValueError("bad"),
),
):
mock_db.engine = MagicMock()
- session_cls.return_value.__enter__.return_value = MagicMock()
+ session_cls.return_value.begin.return_value.__enter__.return_value = MagicMock()
with pytest.raises(BadRequest):
method(api, "sub1")
class TestTriggerOAuthApis:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_oauth_authorize_success(self, app):
api = TriggerOAuthAuthorizeApi()
method = unwrap(api.get)
@@ -455,6 +479,10 @@ class TestTriggerOAuthApis:
class TestTriggerOAuthClientManageApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_get_client(self, app):
api = TriggerOAuthClientManageApi()
method = unwrap(api.get)
@@ -527,6 +555,10 @@ class TestTriggerOAuthClientManageApi:
class TestTriggerSubscriptionVerifyApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
def test_verify_success(self, app):
api = TriggerSubscriptionVerifyApi()
method = unwrap(api.post)
diff --git a/api/tests/test_containers_integration_tests/controllers/console/workspace/test_workspace_wraps.py b/api/tests/test_containers_integration_tests/controllers/console/workspace/test_workspace_wraps.py
new file mode 100644
index 0000000000..99cabb6cea
--- /dev/null
+++ b/api/tests/test_containers_integration_tests/controllers/console/workspace/test_workspace_wraps.py
@@ -0,0 +1,185 @@
+"""Testcontainers integration tests for plugin_permission_required decorator."""
+
+from __future__ import annotations
+
+from types import SimpleNamespace
+from unittest.mock import patch
+
+import pytest
+from sqlalchemy.orm import Session
+from werkzeug.exceptions import Forbidden
+
+from controllers.console.workspace import plugin_permission_required
+from models.account import Tenant, TenantPluginPermission, TenantStatus
+
+
+def _create_tenant(db_session: Session) -> Tenant:
+ tenant = Tenant(name="test-tenant", status=TenantStatus.NORMAL, plan="basic")
+ db_session.add(tenant)
+ db_session.commit()
+ db_session.expire_all()
+ return tenant
+
+
+def _create_permission(
+ db_session: Session,
+ tenant_id: str,
+ install: TenantPluginPermission.InstallPermission = TenantPluginPermission.InstallPermission.EVERYONE,
+ debug: TenantPluginPermission.DebugPermission = TenantPluginPermission.DebugPermission.EVERYONE,
+) -> TenantPluginPermission:
+ perm = TenantPluginPermission(
+ tenant_id=tenant_id,
+ install_permission=install,
+ debug_permission=debug,
+ )
+ db_session.add(perm)
+ db_session.commit()
+ db_session.expire_all()
+ return perm
+
+
+class TestPluginPermissionRequired:
+ def test_allows_without_permission(self, db_session_with_containers: Session):
+ tenant = _create_tenant(db_session_with_containers)
+ user = SimpleNamespace(is_admin_or_owner=False)
+
+ with patch(
+ "controllers.console.workspace.current_account_with_tenant",
+ return_value=(user, tenant.id),
+ ):
+
+ @plugin_permission_required()
+ def handler():
+ return "ok"
+
+ assert handler() == "ok"
+
+ def test_install_nobody_forbidden(self, db_session_with_containers: Session):
+ tenant = _create_tenant(db_session_with_containers)
+ _create_permission(
+ db_session_with_containers,
+ tenant.id,
+ install=TenantPluginPermission.InstallPermission.NOBODY,
+ debug=TenantPluginPermission.DebugPermission.EVERYONE,
+ )
+ user = SimpleNamespace(is_admin_or_owner=True)
+
+ with patch(
+ "controllers.console.workspace.current_account_with_tenant",
+ return_value=(user, tenant.id),
+ ):
+
+ @plugin_permission_required(install_required=True)
+ def handler():
+ return "ok"
+
+ with pytest.raises(Forbidden):
+ handler()
+
+ def test_install_admin_requires_admin(self, db_session_with_containers: Session):
+ tenant = _create_tenant(db_session_with_containers)
+ _create_permission(
+ db_session_with_containers,
+ tenant.id,
+ install=TenantPluginPermission.InstallPermission.ADMINS,
+ debug=TenantPluginPermission.DebugPermission.EVERYONE,
+ )
+ user = SimpleNamespace(is_admin_or_owner=False)
+
+ with patch(
+ "controllers.console.workspace.current_account_with_tenant",
+ return_value=(user, tenant.id),
+ ):
+
+ @plugin_permission_required(install_required=True)
+ def handler():
+ return "ok"
+
+ with pytest.raises(Forbidden):
+ handler()
+
+ def test_install_admin_allows_admin(self, db_session_with_containers: Session):
+ tenant = _create_tenant(db_session_with_containers)
+ _create_permission(
+ db_session_with_containers,
+ tenant.id,
+ install=TenantPluginPermission.InstallPermission.ADMINS,
+ debug=TenantPluginPermission.DebugPermission.EVERYONE,
+ )
+ user = SimpleNamespace(is_admin_or_owner=True)
+
+ with patch(
+ "controllers.console.workspace.current_account_with_tenant",
+ return_value=(user, tenant.id),
+ ):
+
+ @plugin_permission_required(install_required=True)
+ def handler():
+ return "ok"
+
+ assert handler() == "ok"
+
+ def test_debug_nobody_forbidden(self, db_session_with_containers: Session):
+ tenant = _create_tenant(db_session_with_containers)
+ _create_permission(
+ db_session_with_containers,
+ tenant.id,
+ install=TenantPluginPermission.InstallPermission.EVERYONE,
+ debug=TenantPluginPermission.DebugPermission.NOBODY,
+ )
+ user = SimpleNamespace(is_admin_or_owner=True)
+
+ with patch(
+ "controllers.console.workspace.current_account_with_tenant",
+ return_value=(user, tenant.id),
+ ):
+
+ @plugin_permission_required(debug_required=True)
+ def handler():
+ return "ok"
+
+ with pytest.raises(Forbidden):
+ handler()
+
+ def test_debug_admin_requires_admin(self, db_session_with_containers: Session):
+ tenant = _create_tenant(db_session_with_containers)
+ _create_permission(
+ db_session_with_containers,
+ tenant.id,
+ install=TenantPluginPermission.InstallPermission.EVERYONE,
+ debug=TenantPluginPermission.DebugPermission.ADMINS,
+ )
+ user = SimpleNamespace(is_admin_or_owner=False)
+
+ with patch(
+ "controllers.console.workspace.current_account_with_tenant",
+ return_value=(user, tenant.id),
+ ):
+
+ @plugin_permission_required(debug_required=True)
+ def handler():
+ return "ok"
+
+ with pytest.raises(Forbidden):
+ handler()
+
+ def test_debug_admin_allows_admin(self, db_session_with_containers: Session):
+ tenant = _create_tenant(db_session_with_containers)
+ _create_permission(
+ db_session_with_containers,
+ tenant.id,
+ install=TenantPluginPermission.InstallPermission.EVERYONE,
+ debug=TenantPluginPermission.DebugPermission.ADMINS,
+ )
+ user = SimpleNamespace(is_admin_or_owner=True)
+
+ with patch(
+ "controllers.console.workspace.current_account_with_tenant",
+ return_value=(user, tenant.id),
+ ):
+
+ @plugin_permission_required(debug_required=True)
+ def handler():
+ return "ok"
+
+ assert handler() == "ok"
diff --git a/api/tests/unit_tests/controllers/mcp/test_mcp.py b/api/tests/test_containers_integration_tests/controllers/mcp/test_mcp.py
similarity index 96%
rename from api/tests/unit_tests/controllers/mcp/test_mcp.py
rename to api/tests/test_containers_integration_tests/controllers/mcp/test_mcp.py
index b93770e9c2..90670a9db5 100644
--- a/api/tests/unit_tests/controllers/mcp/test_mcp.py
+++ b/api/tests/test_containers_integration_tests/controllers/mcp/test_mcp.py
@@ -1,5 +1,10 @@
+"""Testcontainers integration tests for controllers.mcp.mcp endpoints."""
+
+from __future__ import annotations
+
import types
from unittest.mock import MagicMock, patch
+from uuid import uuid4
import pytest
from flask import Response
@@ -14,24 +19,6 @@ def unwrap(func):
return func
-@pytest.fixture(autouse=True)
-def mock_db():
- module.db = types.SimpleNamespace(engine=object())
-
-
-@pytest.fixture
-def fake_session():
- session = MagicMock()
- session.__enter__.return_value = session
- session.__exit__.return_value = False
- return session
-
-
-@pytest.fixture(autouse=True)
-def mock_session(fake_session):
- module.Session = MagicMock(return_value=fake_session)
-
-
@pytest.fixture(autouse=True)
def mock_mcp_ns():
fake_ns = types.SimpleNamespace()
@@ -44,8 +31,13 @@ def fake_payload(data):
module.mcp_ns.payload = data
+_TENANT_ID = str(uuid4())
+_APP_ID = str(uuid4())
+_SERVER_ID = str(uuid4())
+
+
class DummyServer:
- def __init__(self, status, app_id="app-1", tenant_id="tenant-1", server_id="srv-1"):
+ def __init__(self, status, app_id=_APP_ID, tenant_id=_TENANT_ID, server_id=_SERVER_ID):
self.status = status
self.app_id = app_id
self.tenant_id = tenant_id
@@ -54,8 +46,8 @@ class DummyServer:
class DummyApp:
def __init__(self, mode, workflow=None, app_model_config=None):
- self.id = "app-1"
- self.tenant_id = "tenant-1"
+ self.id = _APP_ID
+ self.tenant_id = _TENANT_ID
self.mode = mode
self.workflow = workflow
self.app_model_config = app_model_config
@@ -76,6 +68,7 @@ class DummyResult:
return {"jsonrpc": "2.0", "result": "ok", "id": 1}
+@pytest.mark.usefixtures("flask_req_ctx_with_containers")
class TestMCPAppApi:
@patch.object(module, "handle_mcp_request", return_value=DummyResult(), autospec=True)
def test_success_request(self, mock_handle):
diff --git a/api/tests/unit_tests/controllers/web/test_conversation.py b/api/tests/test_containers_integration_tests/controllers/web/test_conversation.py
similarity index 72%
rename from api/tests/unit_tests/controllers/web/test_conversation.py
rename to api/tests/test_containers_integration_tests/controllers/web/test_conversation.py
index e5adbbbf66..e1e6741014 100644
--- a/api/tests/unit_tests/controllers/web/test_conversation.py
+++ b/api/tests/test_containers_integration_tests/controllers/web/test_conversation.py
@@ -1,4 +1,4 @@
-"""Unit tests for controllers.web.conversation endpoints."""
+"""Testcontainers integration tests for controllers.web.conversation endpoints."""
from __future__ import annotations
@@ -7,7 +7,6 @@ from unittest.mock import MagicMock, patch
from uuid import uuid4
import pytest
-from flask import Flask
from werkzeug.exceptions import NotFound
from controllers.web.conversation import (
@@ -33,18 +32,18 @@ def _end_user() -> SimpleNamespace:
return SimpleNamespace(id="eu-1")
-# ---------------------------------------------------------------------------
-# ConversationListApi
-# ---------------------------------------------------------------------------
class TestConversationListApi:
- def test_non_chat_mode_raises(self, app: Flask) -> None:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_non_chat_mode_raises(self, app) -> None:
with app.test_request_context("/conversations"):
with pytest.raises(NotChatAppError):
ConversationListApi().get(_completion_app(), _end_user())
@patch("controllers.web.conversation.WebConversationService.pagination_by_last_id")
- @patch("controllers.web.conversation.db")
- def test_happy_path(self, mock_db: MagicMock, mock_paginate: MagicMock, app: Flask) -> None:
+ def test_happy_path(self, mock_paginate: MagicMock, app) -> None:
conv_id = str(uuid4())
conv = SimpleNamespace(
id=conv_id,
@@ -56,34 +55,26 @@ class TestConversationListApi:
updated_at=1700000000,
)
mock_paginate.return_value = SimpleNamespace(limit=20, has_more=False, data=[conv])
- mock_db.engine = "engine"
- session_mock = MagicMock()
- session_ctx = MagicMock()
- session_ctx.__enter__ = MagicMock(return_value=session_mock)
- session_ctx.__exit__ = MagicMock(return_value=False)
-
- with (
- app.test_request_context("/conversations?limit=20"),
- patch("controllers.web.conversation.Session", return_value=session_ctx),
- ):
+ with app.test_request_context("/conversations?limit=20"):
result = ConversationListApi().get(_chat_app(), _end_user())
assert result["limit"] == 20
assert result["has_more"] is False
-# ---------------------------------------------------------------------------
-# ConversationApi (delete)
-# ---------------------------------------------------------------------------
class TestConversationApi:
- def test_non_chat_mode_raises(self, app: Flask) -> None:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_non_chat_mode_raises(self, app) -> None:
with app.test_request_context(f"/conversations/{uuid4()}"):
with pytest.raises(NotChatAppError):
ConversationApi().delete(_completion_app(), _end_user(), uuid4())
@patch("controllers.web.conversation.ConversationService.delete")
- def test_delete_success(self, mock_delete: MagicMock, app: Flask) -> None:
+ def test_delete_success(self, mock_delete: MagicMock, app) -> None:
c_id = uuid4()
with app.test_request_context(f"/conversations/{c_id}"):
result, status = ConversationApi().delete(_chat_app(), _end_user(), c_id)
@@ -92,25 +83,26 @@ class TestConversationApi:
assert result["result"] == "success"
@patch("controllers.web.conversation.ConversationService.delete", side_effect=ConversationNotExistsError())
- def test_delete_not_found(self, mock_delete: MagicMock, app: Flask) -> None:
+ def test_delete_not_found(self, mock_delete: MagicMock, app) -> None:
c_id = uuid4()
with app.test_request_context(f"/conversations/{c_id}"):
with pytest.raises(NotFound, match="Conversation Not Exists"):
ConversationApi().delete(_chat_app(), _end_user(), c_id)
-# ---------------------------------------------------------------------------
-# ConversationRenameApi
-# ---------------------------------------------------------------------------
class TestConversationRenameApi:
- def test_non_chat_mode_raises(self, app: Flask) -> None:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_non_chat_mode_raises(self, app) -> None:
with app.test_request_context(f"/conversations/{uuid4()}/name", method="POST", json={"name": "x"}):
with pytest.raises(NotChatAppError):
ConversationRenameApi().post(_completion_app(), _end_user(), uuid4())
@patch("controllers.web.conversation.ConversationService.rename")
@patch("controllers.web.conversation.web_ns")
- def test_rename_success(self, mock_ns: MagicMock, mock_rename: MagicMock, app: Flask) -> None:
+ def test_rename_success(self, mock_ns: MagicMock, mock_rename: MagicMock, app) -> None:
c_id = uuid4()
mock_ns.payload = {"name": "New Name", "auto_generate": False}
conv = SimpleNamespace(
@@ -134,7 +126,7 @@ class TestConversationRenameApi:
side_effect=ConversationNotExistsError(),
)
@patch("controllers.web.conversation.web_ns")
- def test_rename_not_found(self, mock_ns: MagicMock, mock_rename: MagicMock, app: Flask) -> None:
+ def test_rename_not_found(self, mock_ns: MagicMock, mock_rename: MagicMock, app) -> None:
c_id = uuid4()
mock_ns.payload = {"name": "X", "auto_generate": False}
@@ -143,17 +135,18 @@ class TestConversationRenameApi:
ConversationRenameApi().post(_chat_app(), _end_user(), c_id)
-# ---------------------------------------------------------------------------
-# ConversationPinApi / ConversationUnPinApi
-# ---------------------------------------------------------------------------
class TestConversationPinApi:
- def test_non_chat_mode_raises(self, app: Flask) -> None:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_non_chat_mode_raises(self, app) -> None:
with app.test_request_context(f"/conversations/{uuid4()}/pin", method="PATCH"):
with pytest.raises(NotChatAppError):
ConversationPinApi().patch(_completion_app(), _end_user(), uuid4())
@patch("controllers.web.conversation.WebConversationService.pin")
- def test_pin_success(self, mock_pin: MagicMock, app: Flask) -> None:
+ def test_pin_success(self, mock_pin: MagicMock, app) -> None:
c_id = uuid4()
with app.test_request_context(f"/conversations/{c_id}/pin", method="PATCH"):
result = ConversationPinApi().patch(_chat_app(), _end_user(), c_id)
@@ -161,7 +154,7 @@ class TestConversationPinApi:
assert result["result"] == "success"
@patch("controllers.web.conversation.WebConversationService.pin", side_effect=ConversationNotExistsError())
- def test_pin_not_found(self, mock_pin: MagicMock, app: Flask) -> None:
+ def test_pin_not_found(self, mock_pin: MagicMock, app) -> None:
c_id = uuid4()
with app.test_request_context(f"/conversations/{c_id}/pin", method="PATCH"):
with pytest.raises(NotFound):
@@ -169,13 +162,17 @@ class TestConversationPinApi:
class TestConversationUnPinApi:
- def test_non_chat_mode_raises(self, app: Flask) -> None:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def test_non_chat_mode_raises(self, app) -> None:
with app.test_request_context(f"/conversations/{uuid4()}/unpin", method="PATCH"):
with pytest.raises(NotChatAppError):
ConversationUnPinApi().patch(_completion_app(), _end_user(), uuid4())
@patch("controllers.web.conversation.WebConversationService.unpin")
- def test_unpin_success(self, mock_unpin: MagicMock, app: Flask) -> None:
+ def test_unpin_success(self, mock_unpin: MagicMock, app) -> None:
c_id = uuid4()
with app.test_request_context(f"/conversations/{c_id}/unpin", method="PATCH"):
result = ConversationUnPinApi().patch(_chat_app(), _end_user(), c_id)
diff --git a/api/tests/unit_tests/controllers/web/test_web_forgot_password.py b/api/tests/test_containers_integration_tests/controllers/web/test_web_forgot_password.py
similarity index 91%
rename from api/tests/unit_tests/controllers/web/test_web_forgot_password.py
rename to api/tests/test_containers_integration_tests/controllers/web/test_web_forgot_password.py
index 3d7c319947..04ad143103 100644
--- a/api/tests/unit_tests/controllers/web/test_web_forgot_password.py
+++ b/api/tests/test_containers_integration_tests/controllers/web/test_web_forgot_password.py
@@ -1,9 +1,12 @@
+"""Testcontainers integration tests for controllers.web.forgot_password endpoints."""
+
+from __future__ import annotations
+
import base64
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
import pytest
-from flask import Flask
from controllers.web.forgot_password import (
ForgotPasswordCheckApi,
@@ -12,13 +15,6 @@ from controllers.web.forgot_password import (
)
-@pytest.fixture
-def app():
- flask_app = Flask(__name__)
- flask_app.config["TESTING"] = True
- return flask_app
-
-
@pytest.fixture(autouse=True)
def _patch_wraps():
wraps_features = SimpleNamespace(enable_email_password_login=True)
@@ -33,11 +29,15 @@ def _patch_wraps():
class TestForgotPasswordSendEmailApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
@patch("controllers.web.forgot_password.AccountService.send_reset_password_email")
@patch("controllers.web.forgot_password.AccountService.get_account_by_email_with_case_fallback")
@patch("controllers.web.forgot_password.AccountService.is_email_send_ip_limit", return_value=False)
@patch("controllers.web.forgot_password.extract_remote_ip", return_value="127.0.0.1")
- @patch("controllers.web.forgot_password.Session")
+ @patch("controllers.web.forgot_password.sessionmaker")
def test_should_normalize_email_before_sending(
self,
mock_session_cls,
@@ -51,7 +51,7 @@ class TestForgotPasswordSendEmailApi:
mock_get_account.return_value = mock_account
mock_send_mail.return_value = "token-123"
mock_session = MagicMock()
- mock_session_cls.return_value.__enter__.return_value = mock_session
+ mock_session_cls.return_value.begin.return_value.__enter__.return_value = mock_session
with patch("controllers.web.forgot_password.db", SimpleNamespace(engine="engine")):
with app.test_request_context(
@@ -69,6 +69,10 @@ class TestForgotPasswordSendEmailApi:
class TestForgotPasswordCheckApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
@patch("controllers.web.forgot_password.AccountService.reset_forgot_password_error_rate_limit")
@patch("controllers.web.forgot_password.AccountService.generate_reset_password_token")
@patch("controllers.web.forgot_password.AccountService.revoke_reset_password_token")
@@ -143,9 +147,13 @@ class TestForgotPasswordCheckApi:
class TestForgotPasswordResetApi:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
@patch("controllers.web.forgot_password.ForgotPasswordResetApi._update_existing_account")
@patch("controllers.web.forgot_password.AccountService.get_account_by_email_with_case_fallback")
- @patch("controllers.web.forgot_password.Session")
+ @patch("controllers.web.forgot_password.sessionmaker")
@patch("controllers.web.forgot_password.AccountService.revoke_reset_password_token")
@patch("controllers.web.forgot_password.AccountService.get_reset_password_data")
def test_should_fetch_account_with_fallback(
@@ -161,7 +169,7 @@ class TestForgotPasswordResetApi:
mock_account = MagicMock()
mock_get_account.return_value = mock_account
mock_session = MagicMock()
- mock_session_cls.return_value.__enter__.return_value = mock_session
+ mock_session_cls.return_value.begin.return_value.__enter__.return_value = mock_session
with patch("controllers.web.forgot_password.db", SimpleNamespace(engine="engine")):
with app.test_request_context(
@@ -182,7 +190,7 @@ class TestForgotPasswordResetApi:
@patch("controllers.web.forgot_password.hash_password", return_value=b"hashed-value")
@patch("controllers.web.forgot_password.secrets.token_bytes", return_value=b"0123456789abcdef")
- @patch("controllers.web.forgot_password.Session")
+ @patch("controllers.web.forgot_password.sessionmaker")
@patch("controllers.web.forgot_password.AccountService.revoke_reset_password_token")
@patch("controllers.web.forgot_password.AccountService.get_reset_password_data")
@patch("controllers.web.forgot_password.AccountService.get_account_by_email_with_case_fallback")
@@ -200,7 +208,7 @@ class TestForgotPasswordResetApi:
account = MagicMock()
mock_get_account.return_value = account
mock_session = MagicMock()
- mock_session_cls.return_value.__enter__.return_value = mock_session
+ mock_session_cls.return_value.begin.return_value.__enter__.return_value = mock_session
with patch("controllers.web.forgot_password.db", SimpleNamespace(engine="engine")):
with app.test_request_context(
@@ -223,4 +231,3 @@ class TestForgotPasswordResetApi:
assert account.password == expected_password
expected_salt = base64.b64encode(b"0123456789abcdef").decode()
assert account.password_salt == expected_salt
- mock_session.commit.assert_called_once()
diff --git a/api/tests/unit_tests/controllers/web/test_wraps.py b/api/tests/test_containers_integration_tests/controllers/web/test_wraps.py
similarity index 67%
rename from api/tests/unit_tests/controllers/web/test_wraps.py
rename to api/tests/test_containers_integration_tests/controllers/web/test_wraps.py
index 85049ae975..19833cc772 100644
--- a/api/tests/unit_tests/controllers/web/test_wraps.py
+++ b/api/tests/test_containers_integration_tests/controllers/web/test_wraps.py
@@ -1,13 +1,14 @@
-"""Unit tests for controllers.web.wraps — JWT auth decorator and validation helpers."""
+"""Testcontainers integration tests for controllers.web.wraps — JWT auth decorator and validation helpers."""
from __future__ import annotations
from datetime import UTC, datetime, timedelta
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
+from uuid import uuid4
import pytest
-from flask import Flask
+from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, NotFound, Unauthorized
from controllers.web.error import WebAppAuthAccessDeniedError, WebAppAuthRequiredError
@@ -18,12 +19,8 @@ from controllers.web.wraps import (
)
-# ---------------------------------------------------------------------------
-# _validate_webapp_token
-# ---------------------------------------------------------------------------
class TestValidateWebappToken:
def test_enterprise_enabled_and_app_auth_requires_webapp_source(self) -> None:
- """When both flags are true, a non-webapp source must raise."""
decoded = {"token_source": "other"}
with pytest.raises(WebAppAuthRequiredError):
_validate_webapp_token(decoded, app_web_auth_enabled=True, system_webapp_auth_enabled=True)
@@ -38,7 +35,6 @@ class TestValidateWebappToken:
_validate_webapp_token(decoded, app_web_auth_enabled=True, system_webapp_auth_enabled=True)
def test_public_app_rejects_webapp_source(self) -> None:
- """When auth is not required, a webapp-sourced token must be rejected."""
decoded = {"token_source": "webapp"}
with pytest.raises(Unauthorized):
_validate_webapp_token(decoded, app_web_auth_enabled=False, system_webapp_auth_enabled=False)
@@ -52,18 +48,13 @@ class TestValidateWebappToken:
_validate_webapp_token(decoded, app_web_auth_enabled=False, system_webapp_auth_enabled=False)
def test_system_enabled_but_app_public(self) -> None:
- """system_webapp_auth_enabled=True but app is public — webapp source rejected."""
decoded = {"token_source": "webapp"}
with pytest.raises(Unauthorized):
_validate_webapp_token(decoded, app_web_auth_enabled=False, system_webapp_auth_enabled=True)
-# ---------------------------------------------------------------------------
-# _validate_user_accessibility
-# ---------------------------------------------------------------------------
class TestValidateUserAccessibility:
def test_skips_when_auth_disabled(self) -> None:
- """No checks when system or app auth is disabled."""
_validate_user_accessibility(
decoded={},
app_code="code",
@@ -123,7 +114,6 @@ class TestValidateUserAccessibility:
def test_external_auth_type_checks_sso_update_time(
self, mock_perm_check: MagicMock, mock_sso_time: MagicMock
) -> None:
- # granted_at is before SSO update time → denied
mock_sso_time.return_value = datetime.now(UTC)
old_granted = int((datetime.now(UTC) - timedelta(hours=1)).timestamp())
decoded = {"user_id": "u1", "auth_type": "external", "granted_at": old_granted}
@@ -164,7 +154,6 @@ class TestValidateUserAccessibility:
recent_granted = int(datetime.now(UTC).timestamp())
decoded = {"user_id": "u1", "auth_type": "external", "granted_at": recent_granted}
settings = SimpleNamespace(access_mode="public")
- # Should not raise
_validate_user_accessibility(
decoded=decoded,
app_code="code",
@@ -191,10 +180,49 @@ class TestValidateUserAccessibility:
)
-# ---------------------------------------------------------------------------
-# decode_jwt_token
-# ---------------------------------------------------------------------------
class TestDecodeJwtToken:
+ @pytest.fixture
+ def app(self, flask_app_with_containers):
+ return flask_app_with_containers
+
+ def _create_app_site_enduser(self, db_session: Session, *, enable_site: bool = True):
+ from models.model import App, AppMode, CustomizeTokenStrategy, EndUser, Site
+
+ tenant_id = str(uuid4())
+ app_model = App(
+ tenant_id=tenant_id,
+ mode=AppMode.CHAT.value,
+ name="test-app",
+ enable_site=enable_site,
+ enable_api=True,
+ )
+ db_session.add(app_model)
+ db_session.commit()
+ db_session.expire_all()
+
+ site = Site(
+ app_id=app_model.id,
+ title="test-site",
+ default_language="en-US",
+ customize_token_strategy=CustomizeTokenStrategy.NOT_ALLOW,
+ code="code1",
+ )
+ db_session.add(site)
+ db_session.commit()
+ db_session.expire_all()
+
+ end_user = EndUser(
+ tenant_id=tenant_id,
+ app_id=app_model.id,
+ type="browser",
+ session_id="sess-1",
+ )
+ db_session.add(end_user)
+ db_session.commit()
+ db_session.expire_all()
+
+ return app_model, site, end_user
+
@patch("controllers.web.wraps._validate_user_accessibility")
@patch("controllers.web.wraps._validate_webapp_token")
@patch("controllers.web.wraps.EnterpriseService.WebAppAuth.get_app_access_mode_by_id")
@@ -202,10 +230,8 @@ class TestDecodeJwtToken:
@patch("controllers.web.wraps.FeatureService.get_system_features")
@patch("controllers.web.wraps.PassportService")
@patch("controllers.web.wraps.extract_webapp_passport")
- @patch("controllers.web.wraps.db")
def test_happy_path(
self,
- mock_db: MagicMock,
mock_extract: MagicMock,
mock_passport_cls: MagicMock,
mock_features: MagicMock,
@@ -213,40 +239,28 @@ class TestDecodeJwtToken:
mock_access_mode: MagicMock,
mock_validate_token: MagicMock,
mock_validate_user: MagicMock,
- app: Flask,
+ app,
+ db_session_with_containers: Session,
) -> None:
+ app_model, site, end_user = self._create_app_site_enduser(db_session_with_containers)
+
mock_extract.return_value = "jwt-token"
mock_passport_cls.return_value.verify.return_value = {
- "app_code": "code1",
- "app_id": "app-1",
- "end_user_id": "eu-1",
+ "app_code": site.code,
+ "app_id": app_model.id,
+ "end_user_id": end_user.id,
}
mock_features.return_value = SimpleNamespace(webapp_auth=SimpleNamespace(enabled=False))
- app_model = SimpleNamespace(id="app-1", enable_site=True)
- site = SimpleNamespace(code="code1")
- end_user = SimpleNamespace(id="eu-1", session_id="sess-1")
+ with app.test_request_context("/", headers={"X-App-Code": site.code}):
+ result_app, result_user = decode_jwt_token()
- # Configure session mock to return correct objects via scalar()
- session_mock = MagicMock()
- session_mock.scalar.side_effect = [app_model, site, end_user]
- session_ctx = MagicMock()
- session_ctx.__enter__ = MagicMock(return_value=session_mock)
- session_ctx.__exit__ = MagicMock(return_value=False)
- mock_db.engine = "engine"
-
- with patch("controllers.web.wraps.Session", return_value=session_ctx):
- with app.test_request_context("/", headers={"X-App-Code": "code1"}):
- result_app, result_user = decode_jwt_token()
-
- assert result_app.id == "app-1"
- assert result_user.id == "eu-1"
+ assert result_app.id == app_model.id
+ assert result_user.id == end_user.id
@patch("controllers.web.wraps.FeatureService.get_system_features")
@patch("controllers.web.wraps.extract_webapp_passport")
- def test_missing_token_raises_unauthorized(
- self, mock_extract: MagicMock, mock_features: MagicMock, app: Flask
- ) -> None:
+ def test_missing_token_raises_unauthorized(self, mock_extract: MagicMock, mock_features: MagicMock, app) -> None:
mock_features.return_value = SimpleNamespace(webapp_auth=SimpleNamespace(enabled=False))
mock_extract.return_value = None
@@ -257,137 +271,98 @@ class TestDecodeJwtToken:
@patch("controllers.web.wraps.FeatureService.get_system_features")
@patch("controllers.web.wraps.PassportService")
@patch("controllers.web.wraps.extract_webapp_passport")
- @patch("controllers.web.wraps.db")
def test_missing_app_raises_not_found(
self,
- mock_db: MagicMock,
mock_extract: MagicMock,
mock_passport_cls: MagicMock,
mock_features: MagicMock,
- app: Flask,
+ app,
) -> None:
+ non_existent_id = str(uuid4())
mock_extract.return_value = "jwt-token"
mock_passport_cls.return_value.verify.return_value = {
"app_code": "code1",
- "app_id": "app-1",
- "end_user_id": "eu-1",
+ "app_id": non_existent_id,
+ "end_user_id": str(uuid4()),
}
mock_features.return_value = SimpleNamespace(webapp_auth=SimpleNamespace(enabled=False))
- session_mock = MagicMock()
- session_mock.scalar.return_value = None # No app found
- session_ctx = MagicMock()
- session_ctx.__enter__ = MagicMock(return_value=session_mock)
- session_ctx.__exit__ = MagicMock(return_value=False)
- mock_db.engine = "engine"
-
- with patch("controllers.web.wraps.Session", return_value=session_ctx):
- with app.test_request_context("/", headers={"X-App-Code": "code1"}):
- with pytest.raises(NotFound):
- decode_jwt_token()
+ with app.test_request_context("/", headers={"X-App-Code": "code1"}):
+ with pytest.raises(NotFound):
+ decode_jwt_token()
@patch("controllers.web.wraps.FeatureService.get_system_features")
@patch("controllers.web.wraps.PassportService")
@patch("controllers.web.wraps.extract_webapp_passport")
- @patch("controllers.web.wraps.db")
def test_disabled_site_raises_bad_request(
self,
- mock_db: MagicMock,
mock_extract: MagicMock,
mock_passport_cls: MagicMock,
mock_features: MagicMock,
- app: Flask,
+ app,
+ db_session_with_containers: Session,
) -> None:
+ app_model, site, end_user = self._create_app_site_enduser(db_session_with_containers, enable_site=False)
+
mock_extract.return_value = "jwt-token"
mock_passport_cls.return_value.verify.return_value = {
- "app_code": "code1",
- "app_id": "app-1",
- "end_user_id": "eu-1",
+ "app_code": site.code,
+ "app_id": app_model.id,
+ "end_user_id": end_user.id,
}
mock_features.return_value = SimpleNamespace(webapp_auth=SimpleNamespace(enabled=False))
- app_model = SimpleNamespace(id="app-1", enable_site=False)
-
- session_mock = MagicMock()
- # scalar calls: app_model, site (code found), then end_user
- session_mock.scalar.side_effect = [app_model, SimpleNamespace(code="code1"), None]
- session_ctx = MagicMock()
- session_ctx.__enter__ = MagicMock(return_value=session_mock)
- session_ctx.__exit__ = MagicMock(return_value=False)
- mock_db.engine = "engine"
-
- with patch("controllers.web.wraps.Session", return_value=session_ctx):
- with app.test_request_context("/", headers={"X-App-Code": "code1"}):
- with pytest.raises(BadRequest, match="Site is disabled"):
- decode_jwt_token()
+ with app.test_request_context("/", headers={"X-App-Code": site.code}):
+ with pytest.raises(BadRequest, match="Site is disabled"):
+ decode_jwt_token()
@patch("controllers.web.wraps.FeatureService.get_system_features")
@patch("controllers.web.wraps.PassportService")
@patch("controllers.web.wraps.extract_webapp_passport")
- @patch("controllers.web.wraps.db")
def test_missing_end_user_raises_not_found(
self,
- mock_db: MagicMock,
mock_extract: MagicMock,
mock_passport_cls: MagicMock,
mock_features: MagicMock,
- app: Flask,
+ app,
+ db_session_with_containers: Session,
) -> None:
+ app_model, site, _ = self._create_app_site_enduser(db_session_with_containers)
+ non_existent_eu = str(uuid4())
+
mock_extract.return_value = "jwt-token"
mock_passport_cls.return_value.verify.return_value = {
- "app_code": "code1",
- "app_id": "app-1",
- "end_user_id": "eu-1",
+ "app_code": site.code,
+ "app_id": app_model.id,
+ "end_user_id": non_existent_eu,
}
mock_features.return_value = SimpleNamespace(webapp_auth=SimpleNamespace(enabled=False))
- app_model = SimpleNamespace(id="app-1", enable_site=True)
- site = SimpleNamespace(code="code1")
-
- session_mock = MagicMock()
- session_mock.scalar.side_effect = [app_model, site, None] # end_user is None
- session_ctx = MagicMock()
- session_ctx.__enter__ = MagicMock(return_value=session_mock)
- session_ctx.__exit__ = MagicMock(return_value=False)
- mock_db.engine = "engine"
-
- with patch("controllers.web.wraps.Session", return_value=session_ctx):
- with app.test_request_context("/", headers={"X-App-Code": "code1"}):
- with pytest.raises(NotFound):
- decode_jwt_token()
+ with app.test_request_context("/", headers={"X-App-Code": site.code}):
+ with pytest.raises(NotFound):
+ decode_jwt_token()
@patch("controllers.web.wraps.FeatureService.get_system_features")
@patch("controllers.web.wraps.PassportService")
@patch("controllers.web.wraps.extract_webapp_passport")
- @patch("controllers.web.wraps.db")
def test_user_id_mismatch_raises_unauthorized(
self,
- mock_db: MagicMock,
mock_extract: MagicMock,
mock_passport_cls: MagicMock,
mock_features: MagicMock,
- app: Flask,
+ app,
+ db_session_with_containers: Session,
) -> None:
+ app_model, site, end_user = self._create_app_site_enduser(db_session_with_containers)
+
mock_extract.return_value = "jwt-token"
mock_passport_cls.return_value.verify.return_value = {
- "app_code": "code1",
- "app_id": "app-1",
- "end_user_id": "eu-1",
+ "app_code": site.code,
+ "app_id": app_model.id,
+ "end_user_id": end_user.id,
}
mock_features.return_value = SimpleNamespace(webapp_auth=SimpleNamespace(enabled=False))
- app_model = SimpleNamespace(id="app-1", enable_site=True)
- site = SimpleNamespace(code="code1")
- end_user = SimpleNamespace(id="eu-1", session_id="sess-1")
-
- session_mock = MagicMock()
- session_mock.scalar.side_effect = [app_model, site, end_user]
- session_ctx = MagicMock()
- session_ctx.__enter__ = MagicMock(return_value=session_mock)
- session_ctx.__exit__ = MagicMock(return_value=False)
- mock_db.engine = "engine"
-
- with patch("controllers.web.wraps.Session", return_value=session_ctx):
- with app.test_request_context("/", headers={"X-App-Code": "code1"}):
- with pytest.raises(Unauthorized, match="expired"):
- decode_jwt_token(user_id="different-user")
+ with app.test_request_context("/", headers={"X-App-Code": site.code}):
+ with pytest.raises(Unauthorized, match="expired"):
+ decode_jwt_token(user_id="different-user")
diff --git a/api/tests/test_containers_integration_tests/models/test_types_enum_text.py b/api/tests/test_containers_integration_tests/models/test_types_enum_text.py
index 206c84c750..9cf96c1ca7 100644
--- a/api/tests/test_containers_integration_tests/models/test_types_enum_text.py
+++ b/api/tests/test_containers_integration_tests/models/test_types_enum_text.py
@@ -1,6 +1,6 @@
from collections.abc import Callable, Iterable
from enum import StrEnum
-from typing import Any, NamedTuple, TypeVar
+from typing import Any, NamedTuple
import pytest
import sqlalchemy as sa
@@ -58,10 +58,7 @@ class _ColumnTest(_Base):
long_value: Mapped[_EnumWithLongValue] = mapped_column(EnumText(enum_class=_EnumWithLongValue), nullable=False)
-_T = TypeVar("_T")
-
-
-def _first(it: Iterable[_T]) -> _T:
+def _first[T](it: Iterable[T]) -> T:
ls = list(it)
if not ls:
raise ValueError("List is empty")
diff --git a/api/tests/test_containers_integration_tests/services/auth/test_auth_integration.py b/api/tests/test_containers_integration_tests/services/auth/test_auth_integration.py
index dc4c0fda1d..f48c6da690 100644
--- a/api/tests/test_containers_integration_tests/services/auth/test_auth_integration.py
+++ b/api/tests/test_containers_integration_tests/services/auth/test_auth_integration.py
@@ -79,7 +79,7 @@ class TestAuthIntegration:
@patch("services.auth.api_key_auth_service.encrypter.encrypt_token")
@patch("services.auth.firecrawl.firecrawl.httpx.post")
- @patch("services.auth.jina.jina.httpx.post")
+ @patch("services.auth.jina.jina._http_client.post")
def test_multi_tenant_isolation(
self,
mock_jina_http,
diff --git a/api/tests/test_containers_integration_tests/services/test_feedback_service.py b/api/tests/test_containers_integration_tests/services/test_feedback_service.py
index 771f406775..d82933ccb9 100644
--- a/api/tests/test_containers_integration_tests/services/test_feedback_service.py
+++ b/api/tests/test_containers_integration_tests/services/test_feedback_service.py
@@ -99,7 +99,7 @@ class TestFeedbackService:
)
]
- mock_db_session.query.return_value = mock_query
+ mock_db_session.execute.return_value = mock_query
# Test CSV export
result = FeedbackService.export_feedbacks(app_id=sample_data["app"].id, format_type="csv")
@@ -138,7 +138,7 @@ class TestFeedbackService:
)
]
- mock_db_session.query.return_value = mock_query
+ mock_db_session.execute.return_value = mock_query
# Test JSON export
result = FeedbackService.export_feedbacks(app_id=sample_data["app"].id, format_type="json")
@@ -175,7 +175,7 @@ class TestFeedbackService:
)
]
- mock_db_session.query.return_value = mock_query
+ mock_db_session.execute.return_value = mock_query
# Test with filters
result = FeedbackService.export_feedbacks(
@@ -188,11 +188,8 @@ class TestFeedbackService:
format_type="csv",
)
- # Verify filters were applied
- assert mock_query.filter.called
- filter_calls = mock_query.filter.call_args_list
- # At least three filter invocations are expected (source, rating, comment)
- assert len(filter_calls) >= 3
+ # Verify query was executed (filters are baked into the select statement)
+ assert mock_db_session.execute.called
def test_export_feedbacks_no_data(self, mock_db_session, sample_data):
"""Test exporting feedback when no data exists."""
@@ -206,7 +203,7 @@ class TestFeedbackService:
mock_query.order_by.return_value = mock_query
mock_query.all.return_value = []
- mock_db_session.query.return_value = mock_query
+ mock_db_session.execute.return_value = mock_query
result = FeedbackService.export_feedbacks(app_id=sample_data["app"].id, format_type="csv")
@@ -271,7 +268,7 @@ class TestFeedbackService:
)
]
- mock_db_session.query.return_value = mock_query
+ mock_db_session.execute.return_value = mock_query
# Test export
result = FeedbackService.export_feedbacks(app_id=sample_data["app"].id, format_type="json")
@@ -329,7 +326,7 @@ class TestFeedbackService:
)
]
- mock_db_session.query.return_value = mock_query
+ mock_db_session.execute.return_value = mock_query
# Test export
result = FeedbackService.export_feedbacks(app_id=sample_data["app"].id, format_type="csv")
@@ -367,7 +364,7 @@ class TestFeedbackService:
),
]
- mock_db_session.query.return_value = mock_query
+ mock_db_session.execute.return_value = mock_query
# Test export
result = FeedbackService.export_feedbacks(app_id=sample_data["app"].id, format_type="json")
diff --git a/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py b/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py
index ca6e7afeab..aca3839135 100644
--- a/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py
+++ b/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py
@@ -141,7 +141,7 @@ class TestModelLoadBalancingService:
tenant_id=tenant_id,
provider_name="openai",
model_name="gpt-3.5-turbo",
- model_type="text-generation", # Use the origin model type that matches the query
+ model_type="llm",
enabled=True,
load_balancing_enabled=False,
)
@@ -298,7 +298,7 @@ class TestModelLoadBalancingService:
tenant_id=tenant.id,
provider_name="openai",
model_name="gpt-3.5-turbo",
- model_type="text-generation", # Use the origin model type that matches the query
+ model_type="llm",
name="config1",
encrypted_config='{"api_key": "test_key"}',
enabled=True,
@@ -417,7 +417,7 @@ class TestModelLoadBalancingService:
tenant_id=tenant.id,
provider_name="openai",
model_name="gpt-3.5-turbo",
- model_type="text-generation", # Use the origin model type that matches the query
+ model_type="llm",
name="config1",
encrypted_config='{"api_key": "test_key"}',
enabled=True,
diff --git a/api/tests/unit_tests/controllers/console/app/test_app_import_api.py b/api/tests/unit_tests/controllers/console/app/test_app_import_api.py
deleted file mode 100644
index 91f58460ac..0000000000
--- a/api/tests/unit_tests/controllers/console/app/test_app_import_api.py
+++ /dev/null
@@ -1,157 +0,0 @@
-from __future__ import annotations
-
-from types import SimpleNamespace
-from unittest.mock import MagicMock
-
-import pytest
-
-from controllers.console.app import app_import as app_import_module
-from services.app_dsl_service import ImportStatus
-
-
-def _unwrap(func):
- bound_self = getattr(func, "__self__", None)
- while hasattr(func, "__wrapped__"):
- func = func.__wrapped__
- if bound_self is not None:
- return func.__get__(bound_self, bound_self.__class__)
- return func
-
-
-class _Result:
- def __init__(self, status: ImportStatus, app_id: str | None = "app-1"):
- self.status = status
- self.app_id = app_id
-
- def model_dump(self, mode: str = "json"):
- return {"status": self.status, "app_id": self.app_id}
-
-
-class _SessionContext:
- def __init__(self, session):
- self._session = session
-
- def __enter__(self):
- return self._session
-
- def __exit__(self, exc_type, exc, tb):
- return False
-
-
-def _install_session(monkeypatch: pytest.MonkeyPatch, session: MagicMock) -> None:
- monkeypatch.setattr(app_import_module, "Session", lambda *_: _SessionContext(session))
- monkeypatch.setattr(app_import_module, "db", SimpleNamespace(engine=object()))
-
-
-def _install_features(monkeypatch: pytest.MonkeyPatch, enabled: bool) -> None:
- features = SimpleNamespace(webapp_auth=SimpleNamespace(enabled=enabled))
- monkeypatch.setattr(app_import_module.FeatureService, "get_system_features", lambda: features)
-
-
-def test_import_post_returns_failed_status(app, monkeypatch: pytest.MonkeyPatch) -> None:
- api = app_import_module.AppImportApi()
- method = _unwrap(api.post)
-
- session = MagicMock()
- _install_session(monkeypatch, session)
- _install_features(monkeypatch, enabled=False)
- monkeypatch.setattr(
- app_import_module.AppDslService,
- "import_app",
- lambda *_args, **_kwargs: _Result(ImportStatus.FAILED, app_id=None),
- )
- monkeypatch.setattr(app_import_module, "current_account_with_tenant", lambda: (SimpleNamespace(id="u1"), "t1"))
-
- with app.test_request_context("/console/api/apps/imports", method="POST", json={"mode": "yaml-content"}):
- response, status = method()
-
- session.commit.assert_called_once()
- assert status == 400
- assert response["status"] == ImportStatus.FAILED
-
-
-def test_import_post_returns_pending_status(app, monkeypatch: pytest.MonkeyPatch) -> None:
- api = app_import_module.AppImportApi()
- method = _unwrap(api.post)
-
- session = MagicMock()
- _install_session(monkeypatch, session)
- _install_features(monkeypatch, enabled=False)
- monkeypatch.setattr(
- app_import_module.AppDslService,
- "import_app",
- lambda *_args, **_kwargs: _Result(ImportStatus.PENDING),
- )
- monkeypatch.setattr(app_import_module, "current_account_with_tenant", lambda: (SimpleNamespace(id="u1"), "t1"))
-
- with app.test_request_context("/console/api/apps/imports", method="POST", json={"mode": "yaml-content"}):
- response, status = method()
-
- session.commit.assert_called_once()
- assert status == 202
- assert response["status"] == ImportStatus.PENDING
-
-
-def test_import_post_updates_webapp_auth_when_enabled(app, monkeypatch: pytest.MonkeyPatch) -> None:
- api = app_import_module.AppImportApi()
- method = _unwrap(api.post)
-
- session = MagicMock()
- _install_session(monkeypatch, session)
- _install_features(monkeypatch, enabled=True)
- monkeypatch.setattr(
- app_import_module.AppDslService,
- "import_app",
- lambda *_args, **_kwargs: _Result(ImportStatus.COMPLETED, app_id="app-123"),
- )
- update_access = MagicMock()
- monkeypatch.setattr(app_import_module.EnterpriseService.WebAppAuth, "update_app_access_mode", update_access)
- monkeypatch.setattr(app_import_module, "current_account_with_tenant", lambda: (SimpleNamespace(id="u1"), "t1"))
-
- with app.test_request_context("/console/api/apps/imports", method="POST", json={"mode": "yaml-content"}):
- response, status = method()
-
- session.commit.assert_called_once()
- update_access.assert_called_once_with("app-123", "private")
- assert status == 200
- assert response["status"] == ImportStatus.COMPLETED
-
-
-def test_import_confirm_returns_failed_status(app, monkeypatch: pytest.MonkeyPatch) -> None:
- api = app_import_module.AppImportConfirmApi()
- method = _unwrap(api.post)
-
- session = MagicMock()
- _install_session(monkeypatch, session)
- monkeypatch.setattr(
- app_import_module.AppDslService,
- "confirm_import",
- lambda *_args, **_kwargs: _Result(ImportStatus.FAILED),
- )
- monkeypatch.setattr(app_import_module, "current_account_with_tenant", lambda: (SimpleNamespace(id="u1"), "t1"))
-
- with app.test_request_context("/console/api/apps/imports/import-1/confirm", method="POST"):
- response, status = method(import_id="import-1")
-
- session.commit.assert_called_once()
- assert status == 400
- assert response["status"] == ImportStatus.FAILED
-
-
-def test_import_check_dependencies_returns_result(app, monkeypatch: pytest.MonkeyPatch) -> None:
- api = app_import_module.AppImportCheckDependenciesApi()
- method = _unwrap(api.get)
-
- session = MagicMock()
- _install_session(monkeypatch, session)
- monkeypatch.setattr(
- app_import_module.AppDslService,
- "check_dependencies",
- lambda *_args, **_kwargs: SimpleNamespace(model_dump=lambda mode="json": {"leaked_dependencies": []}),
- )
-
- with app.test_request_context("/console/api/apps/imports/app-1/check-dependencies", method="GET"):
- response, status = method(app_model=SimpleNamespace(id="app-1"))
-
- assert status == 200
- assert response["leaked_dependencies"] == []
diff --git a/api/tests/unit_tests/controllers/console/test_apikey.py b/api/tests/unit_tests/controllers/console/test_apikey.py
deleted file mode 100644
index 2dff9c4037..0000000000
--- a/api/tests/unit_tests/controllers/console/test_apikey.py
+++ /dev/null
@@ -1,139 +0,0 @@
-from unittest.mock import MagicMock, patch
-
-import pytest
-from werkzeug.exceptions import Forbidden
-
-from controllers.console.apikey import (
- BaseApiKeyListResource,
- BaseApiKeyResource,
- _get_resource,
-)
-from models.enums import ApiTokenType
-
-
-@pytest.fixture
-def tenant_context_admin():
- with patch("controllers.console.apikey.current_account_with_tenant") as mock:
- user = MagicMock()
- user.is_admin_or_owner = True
- mock.return_value = (user, "tenant-123")
- yield mock
-
-
-@pytest.fixture
-def tenant_context_non_admin():
- with patch("controllers.console.apikey.current_account_with_tenant") as mock:
- user = MagicMock()
- user.is_admin_or_owner = False
- mock.return_value = (user, "tenant-123")
- yield mock
-
-
-@pytest.fixture
-def db_mock():
- with patch("controllers.console.apikey.db") as mock_db:
- mock_db.session = MagicMock()
- yield mock_db
-
-
-@pytest.fixture(autouse=True)
-def bypass_permissions():
- with patch(
- "controllers.console.apikey.edit_permission_required",
- lambda f: f,
- ):
- yield
-
-
-class DummyApiKeyListResource(BaseApiKeyListResource):
- resource_type = ApiTokenType.APP
- resource_model = MagicMock()
- resource_id_field = "app_id"
- token_prefix = "app-"
-
-
-class DummyApiKeyResource(BaseApiKeyResource):
- resource_type = ApiTokenType.APP
- resource_model = MagicMock()
- resource_id_field = "app_id"
-
-
-class TestGetResource:
- def test_get_resource_success(self):
- fake_resource = MagicMock()
-
- with (
- patch("controllers.console.apikey.select") as mock_select,
- patch("controllers.console.apikey.Session") as mock_session,
- patch("controllers.console.apikey.db") as mock_db,
- ):
- mock_db.engine = MagicMock()
- mock_select.return_value.filter_by.return_value = MagicMock()
-
- session = mock_session.return_value.__enter__.return_value
- session.execute.return_value.scalar_one_or_none.return_value = fake_resource
-
- result = _get_resource("rid", "tid", MagicMock)
- assert result == fake_resource
-
- def test_get_resource_not_found(self):
- with (
- patch("controllers.console.apikey.select") as mock_select,
- patch("controllers.console.apikey.Session") as mock_session,
- patch("controllers.console.apikey.db") as mock_db,
- patch("controllers.console.apikey.flask_restx.abort") as abort,
- ):
- mock_db.engine = MagicMock()
- mock_select.return_value.filter_by.return_value = MagicMock()
-
- session = mock_session.return_value.__enter__.return_value
- session.execute.return_value.scalar_one_or_none.return_value = None
-
- _get_resource("rid", "tid", MagicMock)
-
- abort.assert_called_once()
-
-
-class TestBaseApiKeyListResource:
- def test_get_apikeys_success(self, tenant_context_admin, db_mock):
- resource = DummyApiKeyListResource()
-
- with patch("controllers.console.apikey._get_resource"):
- db_mock.session.scalars.return_value.all.return_value = [MagicMock(), MagicMock()]
-
- result = DummyApiKeyListResource.get.__wrapped__(resource, "resource-id")
- assert "items" in result
-
-
-class TestBaseApiKeyResource:
- def test_delete_forbidden(self, tenant_context_non_admin, db_mock):
- resource = DummyApiKeyResource()
-
- with patch("controllers.console.apikey._get_resource"):
- with pytest.raises(Forbidden):
- DummyApiKeyResource.delete(resource, "rid", "kid")
-
- def test_delete_key_not_found(self, tenant_context_admin, db_mock):
- resource = DummyApiKeyResource()
- db_mock.session.scalar.return_value = None
-
- with patch("controllers.console.apikey._get_resource"):
- with pytest.raises(Exception) as exc_info:
- DummyApiKeyResource.delete(resource, "rid", "kid")
-
- # flask_restx.abort raises HTTPException with message in data attribute
- assert exc_info.value.data["message"] == "API key not found"
-
- def test_delete_success(self, tenant_context_admin, db_mock):
- resource = DummyApiKeyResource()
- db_mock.session.scalar.return_value = MagicMock()
-
- with (
- patch("controllers.console.apikey._get_resource"),
- patch("controllers.console.apikey.ApiTokenCache.delete"),
- ):
- result, status = DummyApiKeyResource.delete(resource, "rid", "kid")
-
- assert status == 204
- assert result == {"result": "success"}
- db_mock.session.commit.assert_called_once()
diff --git a/api/tests/unit_tests/controllers/console/test_document_detail_api_data_source_info.py b/api/tests/unit_tests/controllers/console/test_document_detail_api_data_source_info.py
index f8dd98fdb2..9507fb4a75 100644
--- a/api/tests/unit_tests/controllers/console/test_document_detail_api_data_source_info.py
+++ b/api/tests/unit_tests/controllers/console/test_document_detail_api_data_source_info.py
@@ -6,7 +6,7 @@ and data_source_detail_dict for all data_source_type values, including "local_fi
"""
import json
-from typing import Generic, Literal, NotRequired, TypedDict, TypeVar, Union
+from typing import Literal, NotRequired, TypedDict
from models.dataset import Document
@@ -31,12 +31,10 @@ class WebsiteCrawlInfo(TypedDict):
job_id: str
-RawInfo = Union[LocalFileInfo, UploadFileInfo, NotionImportInfo, WebsiteCrawlInfo]
-T_type = TypeVar("T_type", bound=str)
-T_info = TypeVar("T_info", bound=Union[LocalFileInfo, UploadFileInfo, NotionImportInfo, WebsiteCrawlInfo])
+type RawInfo = LocalFileInfo | UploadFileInfo | NotionImportInfo | WebsiteCrawlInfo
-class Case(TypedDict, Generic[T_type, T_info]):
+class Case[T_type: str, T_info: RawInfo](TypedDict):
data_source_type: T_type
data_source_info: str
expected_raw: T_info
@@ -47,7 +45,7 @@ UploadFileCase = Case[Literal["upload_file"], UploadFileInfo]
NotionImportCase = Case[Literal["notion_import"], NotionImportInfo]
WebsiteCrawlCase = Case[Literal["website_crawl"], WebsiteCrawlInfo]
-AnyCase = Union[LocalFileCase, UploadFileCase, NotionImportCase, WebsiteCrawlCase]
+type AnyCase = LocalFileCase | UploadFileCase | NotionImportCase | WebsiteCrawlCase
case_1: LocalFileCase = {
diff --git a/api/tests/unit_tests/controllers/console/workspace/test_workspace_wraps.py b/api/tests/unit_tests/controllers/console/workspace/test_workspace_wraps.py
deleted file mode 100644
index b290748155..0000000000
--- a/api/tests/unit_tests/controllers/console/workspace/test_workspace_wraps.py
+++ /dev/null
@@ -1,142 +0,0 @@
-from __future__ import annotations
-
-import importlib
-from types import SimpleNamespace
-
-import pytest
-from werkzeug.exceptions import Forbidden
-
-from controllers.console.workspace import plugin_permission_required
-from models.account import TenantPluginPermission
-
-
-class _SessionStub:
- def __init__(self, permission):
- self._permission = permission
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc, tb):
- return False
-
- def query(self, *_args, **_kwargs):
- return self
-
- def where(self, *_args, **_kwargs):
- return self
-
- def first(self):
- return self._permission
-
-
-def _workspace_module():
- return importlib.import_module(plugin_permission_required.__module__)
-
-
-def _patch_session(monkeypatch: pytest.MonkeyPatch, permission):
- module = _workspace_module()
- monkeypatch.setattr(module, "Session", lambda *_args, **_kwargs: _SessionStub(permission))
- monkeypatch.setattr(module, "db", SimpleNamespace(engine=object()))
-
-
-def test_plugin_permission_allows_without_permission(monkeypatch: pytest.MonkeyPatch) -> None:
- user = SimpleNamespace(is_admin_or_owner=False)
- module = _workspace_module()
- monkeypatch.setattr(module, "current_account_with_tenant", lambda: (user, "t1"))
- _patch_session(monkeypatch, None)
-
- @plugin_permission_required()
- def handler():
- return "ok"
-
- assert handler() == "ok"
-
-
-def test_plugin_permission_install_nobody_forbidden(monkeypatch: pytest.MonkeyPatch) -> None:
- user = SimpleNamespace(is_admin_or_owner=True)
- permission = SimpleNamespace(
- install_permission=TenantPluginPermission.InstallPermission.NOBODY,
- debug_permission=TenantPluginPermission.DebugPermission.EVERYONE,
- )
- module = _workspace_module()
- monkeypatch.setattr(module, "current_account_with_tenant", lambda: (user, "t1"))
- _patch_session(monkeypatch, permission)
-
- @plugin_permission_required(install_required=True)
- def handler():
- return "ok"
-
- with pytest.raises(Forbidden):
- handler()
-
-
-def test_plugin_permission_install_admin_requires_admin(monkeypatch: pytest.MonkeyPatch) -> None:
- user = SimpleNamespace(is_admin_or_owner=False)
- permission = SimpleNamespace(
- install_permission=TenantPluginPermission.InstallPermission.ADMINS,
- debug_permission=TenantPluginPermission.DebugPermission.EVERYONE,
- )
- module = _workspace_module()
- monkeypatch.setattr(module, "current_account_with_tenant", lambda: (user, "t1"))
- _patch_session(monkeypatch, permission)
-
- @plugin_permission_required(install_required=True)
- def handler():
- return "ok"
-
- with pytest.raises(Forbidden):
- handler()
-
-
-def test_plugin_permission_install_admin_allows_admin(monkeypatch: pytest.MonkeyPatch) -> None:
- user = SimpleNamespace(is_admin_or_owner=True)
- permission = SimpleNamespace(
- install_permission=TenantPluginPermission.InstallPermission.ADMINS,
- debug_permission=TenantPluginPermission.DebugPermission.EVERYONE,
- )
- module = _workspace_module()
- monkeypatch.setattr(module, "current_account_with_tenant", lambda: (user, "t1"))
- _patch_session(monkeypatch, permission)
-
- @plugin_permission_required(install_required=True)
- def handler():
- return "ok"
-
- assert handler() == "ok"
-
-
-def test_plugin_permission_debug_nobody_forbidden(monkeypatch: pytest.MonkeyPatch) -> None:
- user = SimpleNamespace(is_admin_or_owner=True)
- permission = SimpleNamespace(
- install_permission=TenantPluginPermission.InstallPermission.EVERYONE,
- debug_permission=TenantPluginPermission.DebugPermission.NOBODY,
- )
- module = _workspace_module()
- monkeypatch.setattr(module, "current_account_with_tenant", lambda: (user, "t1"))
- _patch_session(monkeypatch, permission)
-
- @plugin_permission_required(debug_required=True)
- def handler():
- return "ok"
-
- with pytest.raises(Forbidden):
- handler()
-
-
-def test_plugin_permission_debug_admin_requires_admin(monkeypatch: pytest.MonkeyPatch) -> None:
- user = SimpleNamespace(is_admin_or_owner=False)
- permission = SimpleNamespace(
- install_permission=TenantPluginPermission.InstallPermission.EVERYONE,
- debug_permission=TenantPluginPermission.DebugPermission.ADMINS,
- )
- module = _workspace_module()
- monkeypatch.setattr(module, "current_account_with_tenant", lambda: (user, "t1"))
- _patch_session(monkeypatch, permission)
-
- @plugin_permission_required(debug_required=True)
- def handler():
- return "ok"
-
- with pytest.raises(Forbidden):
- handler()
diff --git a/api/tests/unit_tests/controllers/inner_api/plugin/test_plugin_wraps.py b/api/tests/unit_tests/controllers/inner_api/plugin/test_plugin_wraps.py
index eac57fe4b7..957d7fbd9b 100644
--- a/api/tests/unit_tests/controllers/inner_api/plugin/test_plugin_wraps.py
+++ b/api/tests/unit_tests/controllers/inner_api/plugin/test_plugin_wraps.py
@@ -41,15 +41,15 @@ class TestGetUser:
"""Test get_user function"""
@patch("controllers.inner_api.plugin.wraps.EndUser")
- @patch("controllers.inner_api.plugin.wraps.Session")
+ @patch("controllers.inner_api.plugin.wraps.sessionmaker")
@patch("controllers.inner_api.plugin.wraps.db")
- def test_should_return_existing_user_by_id(self, mock_db, mock_session_class, mock_enduser_class, app: Flask):
+ def test_should_return_existing_user_by_id(self, mock_db, mock_sessionmaker, mock_enduser_class, app: Flask):
"""Test returning existing user when found by ID"""
# Arrange
mock_user = MagicMock()
mock_user.id = "user123"
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.get.return_value = mock_user
# Act
@@ -61,17 +61,17 @@ class TestGetUser:
mock_session.get.assert_called_once()
@patch("controllers.inner_api.plugin.wraps.EndUser")
- @patch("controllers.inner_api.plugin.wraps.Session")
+ @patch("controllers.inner_api.plugin.wraps.sessionmaker")
@patch("controllers.inner_api.plugin.wraps.db")
def test_should_return_existing_anonymous_user_by_session_id(
- self, mock_db, mock_session_class, mock_enduser_class, app: Flask
+ self, mock_db, mock_sessionmaker, mock_enduser_class, app: Flask
):
"""Test returning existing anonymous user by session_id"""
# Arrange
mock_user = MagicMock()
mock_user.session_id = "anonymous_session"
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session
# non-anonymous path uses session.get(); anonymous uses session.scalar()
mock_session.get.return_value = mock_user
@@ -83,13 +83,13 @@ class TestGetUser:
assert result == mock_user
@patch("controllers.inner_api.plugin.wraps.EndUser")
- @patch("controllers.inner_api.plugin.wraps.Session")
+ @patch("controllers.inner_api.plugin.wraps.sessionmaker")
@patch("controllers.inner_api.plugin.wraps.db")
- def test_should_create_new_user_when_not_found(self, mock_db, mock_session_class, mock_enduser_class, app: Flask):
+ def test_should_create_new_user_when_not_found(self, mock_db, mock_sessionmaker, mock_enduser_class, app: Flask):
"""Test creating new user when not found in database"""
# Arrange
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.get.return_value = None
mock_new_user = MagicMock()
mock_enduser_class.return_value = mock_new_user
@@ -101,21 +101,20 @@ class TestGetUser:
# Assert
assert result == mock_new_user
mock_session.add.assert_called_once()
- mock_session.commit.assert_called_once()
mock_session.refresh.assert_called_once()
@patch("controllers.inner_api.plugin.wraps.select")
@patch("controllers.inner_api.plugin.wraps.EndUser")
- @patch("controllers.inner_api.plugin.wraps.Session")
+ @patch("controllers.inner_api.plugin.wraps.sessionmaker")
@patch("controllers.inner_api.plugin.wraps.db")
def test_should_use_default_session_id_when_user_id_none(
- self, mock_db, mock_session_class, mock_enduser_class, mock_select, app: Flask
+ self, mock_db, mock_sessionmaker, mock_enduser_class, mock_select, app: Flask
):
"""Test using default session ID when user_id is None"""
# Arrange
mock_user = MagicMock()
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session
# When user_id is None, is_anonymous=True, so session.scalar() is used
mock_session.scalar.return_value = mock_user
@@ -127,15 +126,13 @@ class TestGetUser:
assert result == mock_user
@patch("controllers.inner_api.plugin.wraps.EndUser")
- @patch("controllers.inner_api.plugin.wraps.Session")
+ @patch("controllers.inner_api.plugin.wraps.sessionmaker")
@patch("controllers.inner_api.plugin.wraps.db")
- def test_should_raise_error_on_database_exception(
- self, mock_db, mock_session_class, mock_enduser_class, app: Flask
- ):
+ def test_should_raise_error_on_database_exception(self, mock_db, mock_sessionmaker, mock_enduser_class, app: Flask):
"""Test raising ValueError when database operation fails"""
# Arrange
mock_session = MagicMock()
- mock_session_class.return_value.__enter__.return_value = mock_session
+ mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session.get.side_effect = Exception("Database error")
# Act & Assert
diff --git a/api/tests/unit_tests/controllers/service_api/app/test_conversation.py b/api/tests/unit_tests/controllers/service_api/app/test_conversation.py
index 81c45dcdb7..dbd06677d8 100644
--- a/api/tests/unit_tests/controllers/service_api/app/test_conversation.py
+++ b/api/tests/unit_tests/controllers/service_api/app/test_conversation.py
@@ -433,13 +433,20 @@ class TestConversationApiController:
handler(api, app_model=app_model, end_user=end_user)
def test_list_last_not_found(self, app, monkeypatch: pytest.MonkeyPatch) -> None:
- class _SessionStub:
+ class _BeginStub:
def __enter__(self):
return SimpleNamespace()
def __exit__(self, exc_type, exc, tb):
return False
+ class _SessionMakerStub:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def begin(self):
+ return _BeginStub()
+
monkeypatch.setattr(
ConversationService,
"pagination_by_last_id",
@@ -447,7 +454,7 @@ class TestConversationApiController:
)
conversation_module = sys.modules["controllers.service_api.app.conversation"]
monkeypatch.setattr(conversation_module, "db", SimpleNamespace(engine=object()))
- monkeypatch.setattr(conversation_module, "Session", lambda *_args, **_kwargs: _SessionStub())
+ monkeypatch.setattr(conversation_module, "sessionmaker", _SessionMakerStub)
api = ConversationApi()
handler = _unwrap(api.get)
diff --git a/api/tests/unit_tests/controllers/service_api/app/test_workflow.py b/api/tests/unit_tests/controllers/service_api/app/test_workflow.py
index b1f036c6f3..cfa21bf2dd 100644
--- a/api/tests/unit_tests/controllers/service_api/app/test_workflow.py
+++ b/api/tests/unit_tests/controllers/service_api/app/test_workflow.py
@@ -470,16 +470,23 @@ class TestWorkflowTaskStopApi:
class TestWorkflowAppLogApi:
def test_success(self, app, monkeypatch: pytest.MonkeyPatch) -> None:
- class _SessionStub:
+ class _BeginStub:
def __enter__(self):
return SimpleNamespace()
def __exit__(self, exc_type, exc, tb):
return False
+ class _SessionMakerStub:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def begin(self):
+ return _BeginStub()
+
workflow_module = sys.modules["controllers.service_api.app.workflow"]
monkeypatch.setattr(workflow_module, "db", SimpleNamespace(engine=object()))
- monkeypatch.setattr(workflow_module, "Session", lambda *_args, **_kwargs: _SessionStub())
+ monkeypatch.setattr(workflow_module, "sessionmaker", _SessionMakerStub)
monkeypatch.setattr(
WorkflowAppService,
"get_paginate_workflow_app_logs",
@@ -635,11 +642,14 @@ class TestWorkflowAppLogApiGet:
mock_svc_instance.get_paginate_workflow_app_logs.return_value = mock_pagination
mock_wf_svc_cls.return_value = mock_svc_instance
- # Mock Session context manager
+ # Mock sessionmaker(...).begin() context manager
mock_session = Mock()
mock_db.engine = Mock()
- mock_session.__enter__ = Mock(return_value=mock_session)
- mock_session.__exit__ = Mock(return_value=False)
+ mock_begin = Mock()
+ mock_begin.__enter__ = Mock(return_value=mock_session)
+ mock_begin.__exit__ = Mock(return_value=False)
+ mock_session_factory = Mock()
+ mock_session_factory.begin.return_value = mock_begin
from controllers.service_api.app.workflow import WorkflowAppLogApi
@@ -647,7 +657,7 @@ class TestWorkflowAppLogApiGet:
"/workflows/logs?page=1&limit=20",
method="GET",
):
- with patch("controllers.service_api.app.workflow.Session", return_value=mock_session):
+ with patch("controllers.service_api.app.workflow.sessionmaker", return_value=mock_session_factory):
api = WorkflowAppLogApi()
result = _unwrap(api.get)(api, app_model=mock_workflow_app)
diff --git a/api/tests/unit_tests/core/app/apps/pipeline/test_pipeline_generator.py b/api/tests/unit_tests/core/app/apps/pipeline/test_pipeline_generator.py
index 06face41fe..9a2dc38f74 100644
--- a/api/tests/unit_tests/core/app/apps/pipeline/test_pipeline_generator.py
+++ b/api/tests/unit_tests/core/app/apps/pipeline/test_pipeline_generator.py
@@ -8,6 +8,7 @@ import core.app.apps.pipeline.pipeline_generator as module
from core.app.apps.exc import GenerateTaskStoppedError
from core.app.entities.app_invoke_entities import InvokeFrom
from core.datasource.entities.datasource_entities import DatasourceProviderType
+from models.enums import DataSourceType
class FakeRagPipelineGenerateEntity(SimpleNamespace):
@@ -345,7 +346,7 @@ def test_generate_raises_when_workflow_not_found(generator, mocker):
mocker.patch.object(module, "preserve_flask_contexts", _dummy_preserve)
session = MagicMock()
- session.query.return_value.where.return_value.first.return_value = None
+ session.get.return_value = None
mocker.patch.object(module.db, "session", session)
with pytest.raises(ValueError):
@@ -558,6 +559,24 @@ def test_build_document_sets_metadata_for_builtin_fields(generator, mocker):
assert document.doc_metadata
+def test_build_document_supports_online_drive_datasource_type(generator):
+ document = generator._build_document(
+ tenant_id="tenant",
+ dataset_id="ds",
+ built_in_field_enabled=True,
+ datasource_type=DatasourceProviderType.ONLINE_DRIVE,
+ datasource_info={"id": "file-1", "bucket": "bucket-1", "name": "drive.pdf", "type": "file"},
+ created_from="rag-pipeline",
+ position=1,
+ account=_build_user(),
+ batch="batch",
+ document_form="text",
+ )
+
+ assert DataSourceType(document.data_source_type) == DataSourceType.ONLINE_DRIVE
+ assert document.name == "drive.pdf"
+
+
def test_build_document_invalid_datasource_type(generator):
with pytest.raises(ValueError):
generator._build_document(
diff --git a/api/tests/unit_tests/core/app/apps/pipeline/test_pipeline_runner.py b/api/tests/unit_tests/core/app/apps/pipeline/test_pipeline_runner.py
index ab70996f0a..c8ae288e6f 100644
--- a/api/tests/unit_tests/core/app/apps/pipeline/test_pipeline_runner.py
+++ b/api/tests/unit_tests/core/app/apps/pipeline/test_pipeline_runner.py
@@ -80,9 +80,7 @@ def test_get_workflow_returns_workflow(mocker, runner):
pipeline = MagicMock(tenant_id="tenant", id="pipe")
workflow = MagicMock(id="wf")
- query = MagicMock()
- query.where.return_value.first.return_value = workflow
- mocker.patch.object(module.db, "session", MagicMock(query=MagicMock(return_value=query)))
+ mocker.patch.object(module.db, "session", MagicMock(scalar=MagicMock(return_value=workflow)))
result = runner.get_workflow(pipeline=pipeline, workflow_id="wf")
@@ -115,11 +113,8 @@ def test_init_rag_pipeline_graph_not_found(mocker, runner):
def test_update_document_status_on_failure(mocker, runner):
document = MagicMock()
- query = MagicMock()
- query.where.return_value.first.return_value = document
-
session = MagicMock()
- session.query.return_value = query
+ session.scalar.return_value = document
mocker.patch.object(module.db, "session", session)
event = GraphRunFailedEvent(error="boom")
@@ -189,14 +184,10 @@ def test_run_single_iteration_path(mocker):
app_generate_entity.single_iteration_run = MagicMock()
pipeline = MagicMock(id="pipe")
- query_pipeline = MagicMock()
- query_pipeline.where.return_value.first.return_value = pipeline
-
- query_end_user = MagicMock()
- query_end_user.where.return_value.first.return_value = MagicMock(session_id="sess")
+ end_user = MagicMock(session_id="sess")
session = MagicMock()
- session.query.side_effect = [query_end_user, query_pipeline]
+ session.get.side_effect = [end_user, pipeline]
mocker.patch.object(module.db, "session", session)
runner = PipelineRunner(
@@ -241,14 +232,10 @@ def test_run_normal_path_builds_graph(mocker):
app_generate_entity = _build_app_generate_entity()
pipeline = MagicMock(id="pipe")
- query_pipeline = MagicMock()
- query_pipeline.where.return_value.first.return_value = pipeline
-
- query_end_user = MagicMock()
- query_end_user.where.return_value.first.return_value = MagicMock(session_id="sess")
+ end_user = MagicMock(session_id="sess")
session = MagicMock()
- session.query.side_effect = [query_end_user, query_pipeline]
+ session.get.side_effect = [end_user, pipeline]
mocker.patch.object(module.db, "session", session)
workflow = MagicMock(
diff --git a/api/tests/unit_tests/core/datasource/test_datasource_file_manager.py b/api/tests/unit_tests/core/datasource/test_datasource_file_manager.py
index 7cd1fdf06b..4f39d38831 100644
--- a/api/tests/unit_tests/core/datasource/test_datasource_file_manager.py
+++ b/api/tests/unit_tests/core/datasource/test_datasource_file_manager.py
@@ -287,9 +287,7 @@ class TestDatasourceFileManager:
mock_upload_file.key = "some_key"
mock_upload_file.mime_type = "image/png"
- mock_query = mock_db.session.query.return_value
- mock_where = mock_query.where.return_value
- mock_where.first.return_value = mock_upload_file
+ mock_db.session.get.return_value = mock_upload_file
mock_storage.load_once.return_value = b"file content"
@@ -300,7 +298,7 @@ class TestDatasourceFileManager:
assert result == (b"file content", "image/png")
# Case: Not found
- mock_where.first.return_value = None
+ mock_db.session.get.return_value = None
assert DatasourceFileManager.get_file_binary("unknown") is None
@patch("core.datasource.datasource_file_manager.db")
@@ -314,16 +312,14 @@ class TestDatasourceFileManager:
mock_tool_file.file_key = "tool_key"
mock_tool_file.mimetype = "image/png"
- # Mock query sequence
- def mock_query(model):
- m = MagicMock()
+ def mock_get(model, id):
if model == MessageFile:
- m.where.return_value.first.return_value = mock_message_file
+ return mock_message_file
elif model == ToolFile:
- m.where.return_value.first.return_value = mock_tool_file
- return m
+ return mock_tool_file
+ return None
- mock_db.session.query.side_effect = mock_query
+ mock_db.session.get.side_effect = mock_get
mock_storage.load_once.return_value = b"tool content"
# Execute
@@ -344,15 +340,12 @@ class TestDatasourceFileManager:
mock_tool_file.file_key = "tk"
mock_tool_file.mimetype = "image/png"
- def mock_query(model):
- m = MagicMock()
+ def mock_get(model, id):
if model == MessageFile:
- m.where.return_value.first.return_value = mock_message_file
- else:
- m.where.return_value.first.return_value = mock_tool_file
- return m
+ return mock_message_file
+ return mock_tool_file
- mock_db.session.query.side_effect = mock_query
+ mock_db.session.get.side_effect = mock_get
mock_storage.load_once.return_value = b"bits"
result = DatasourceFileManager.get_file_binary_by_message_file_id("m")
@@ -361,27 +354,20 @@ class TestDatasourceFileManager:
@patch("core.datasource.datasource_file_manager.db")
@patch("core.datasource.datasource_file_manager.storage")
def test_get_file_binary_by_message_file_id_failures(self, mock_storage, mock_db):
- # Setup common mock
- mock_query_obj = MagicMock()
- mock_db.session.query.return_value = mock_query_obj
- mock_query_obj.where.return_value.first.return_value = None
-
# Case 1: Message file not found
+ mock_db.session.get.return_value = None
assert DatasourceFileManager.get_file_binary_by_message_file_id("none") is None
# Case 2: Message file found but tool file not found
mock_message_file = MagicMock(spec=MessageFile)
mock_message_file.url = None
- def mock_query_v2(model):
- m = MagicMock()
+ def mock_get_v2(model, id):
if model == MessageFile:
- m.where.return_value.first.return_value = mock_message_file
- else:
- m.where.return_value.first.return_value = None
- return m
+ return mock_message_file
+ return None
- mock_db.session.query.side_effect = mock_query_v2
+ mock_db.session.get.side_effect = mock_get_v2
assert DatasourceFileManager.get_file_binary_by_message_file_id("msg_id") is None
@patch("core.datasource.datasource_file_manager.db")
@@ -392,7 +378,7 @@ class TestDatasourceFileManager:
mock_upload_file.key = "upload_key"
mock_upload_file.mime_type = "text/plain"
- mock_db.session.query.return_value.where.return_value.first.return_value = mock_upload_file
+ mock_db.session.get.return_value = mock_upload_file
mock_storage.load_stream.return_value = iter([b"chunk1", b"chunk2"])
@@ -404,7 +390,7 @@ class TestDatasourceFileManager:
assert list(stream) == [b"chunk1", b"chunk2"]
# Case: Not found
- mock_db.session.query.return_value.where.return_value.first.return_value = None
+ mock_db.session.get.return_value = None
stream, mimetype = DatasourceFileManager.get_file_generator_by_upload_file_id("none")
assert stream is None
assert mimetype is None
diff --git a/api/tests/unit_tests/core/datasource/test_file_upload.py b/api/tests/unit_tests/core/datasource/test_file_upload.py
index 63b86e64fc..c6d6dd5808 100644
--- a/api/tests/unit_tests/core/datasource/test_file_upload.py
+++ b/api/tests/unit_tests/core/datasource/test_file_upload.py
@@ -1249,9 +1249,9 @@ class TestFileConstants:
"""
def test_image_extensions_set_properties(self):
- """Test that IMAGE_EXTENSIONS set has expected properties."""
- # Assert - Should be a set
- assert isinstance(IMAGE_EXTENSIONS, set)
+ """Test that IMAGE_EXTENSIONS frozenset has expected properties."""
+ # Assert - Should be immutable
+ assert isinstance(IMAGE_EXTENSIONS, frozenset)
# Should not be empty
assert len(IMAGE_EXTENSIONS) > 0
# Should contain common image formats
@@ -1260,9 +1260,9 @@ class TestFileConstants:
assert ext in IMAGE_EXTENSIONS or ext.upper() in IMAGE_EXTENSIONS
def test_video_extensions_set_properties(self):
- """Test that VIDEO_EXTENSIONS set has expected properties."""
- # Assert - Should be a set
- assert isinstance(VIDEO_EXTENSIONS, set)
+ """Test that VIDEO_EXTENSIONS frozenset has expected properties."""
+ # Assert - Should be immutable
+ assert isinstance(VIDEO_EXTENSIONS, frozenset)
# Should not be empty
assert len(VIDEO_EXTENSIONS) > 0
# Should contain common video formats
@@ -1271,9 +1271,9 @@ class TestFileConstants:
assert ext in VIDEO_EXTENSIONS or ext.upper() in VIDEO_EXTENSIONS
def test_audio_extensions_set_properties(self):
- """Test that AUDIO_EXTENSIONS set has expected properties."""
- # Assert - Should be a set
- assert isinstance(AUDIO_EXTENSIONS, set)
+ """Test that AUDIO_EXTENSIONS frozenset has expected properties."""
+ # Assert - Should be immutable
+ assert isinstance(AUDIO_EXTENSIONS, frozenset)
# Should not be empty
assert len(AUDIO_EXTENSIONS) > 0
# Should contain common audio formats
@@ -1282,9 +1282,9 @@ class TestFileConstants:
assert ext in AUDIO_EXTENSIONS or ext.upper() in AUDIO_EXTENSIONS
def test_document_extensions_set_properties(self):
- """Test that DOCUMENT_EXTENSIONS set has expected properties."""
- # Assert - Should be a set
- assert isinstance(DOCUMENT_EXTENSIONS, set)
+ """Test that DOCUMENT_EXTENSIONS frozenset has expected properties."""
+ # Assert - Should be immutable
+ assert isinstance(DOCUMENT_EXTENSIONS, frozenset)
# Should not be empty
assert len(DOCUMENT_EXTENSIONS) > 0
# Should contain common document formats
diff --git a/api/tests/unit_tests/core/datasource/test_website_crawl.py b/api/tests/unit_tests/core/datasource/test_website_crawl.py
index 1d79db2640..53000881dd 100644
--- a/api/tests/unit_tests/core/datasource/test_website_crawl.py
+++ b/api/tests/unit_tests/core/datasource/test_website_crawl.py
@@ -560,7 +560,10 @@ class TestWebsiteService:
mock_response = Mock()
mock_response.json.return_value = {"code": 200, "data": {"taskId": "task-789"}}
- mock_httpx_post = mocker.patch("services.website_service.httpx.post", return_value=mock_response)
+ mock_httpx_post = mocker.patch(
+ "services.website_service._adaptive_http_client.post",
+ return_value=mock_response,
+ )
from services.website_service import WebsiteCrawlApiRequest
@@ -1340,7 +1343,7 @@ class TestProviderSpecificFeatures:
"url": "https://example.com/page",
},
}
- mocker.patch("services.website_service.httpx.get", return_value=mock_response)
+ mocker.patch("services.website_service._jina_http_client.get", return_value=mock_response)
from services.website_service import WebsiteCrawlApiRequest
diff --git a/api/tests/unit_tests/core/helper/code_executor/jinja2/test_jinja2_formatter.py b/api/tests/unit_tests/core/helper/code_executor/jinja2/test_jinja2_formatter.py
new file mode 100644
index 0000000000..60002a757d
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/code_executor/jinja2/test_jinja2_formatter.py
@@ -0,0 +1,24 @@
+from pytest_mock import MockerFixture
+
+from core.helper.code_executor.jinja2.jinja2_formatter import Jinja2Formatter
+
+
+def test_format_returns_result_value_as_string(mocker: MockerFixture) -> None:
+ execute_mock = mocker.patch(
+ "core.helper.code_executor.jinja2.jinja2_formatter.CodeExecutor.execute_workflow_code_template",
+ return_value={"result": 123},
+ )
+
+ formatted = Jinja2Formatter.format("Hello {{ name }}", {"name": "Dify"})
+
+ assert formatted == "123"
+ execute_mock.assert_called_once()
+
+
+def test_format_returns_empty_string_when_result_missing(mocker: MockerFixture) -> None:
+ mocker.patch(
+ "core.helper.code_executor.jinja2.jinja2_formatter.CodeExecutor.execute_workflow_code_template",
+ return_value={},
+ )
+
+ assert Jinja2Formatter.format("Hello", {"name": "Dify"}) == ""
diff --git a/api/tests/unit_tests/core/helper/code_executor/test_code_executor.py b/api/tests/unit_tests/core/helper/code_executor/test_code_executor.py
new file mode 100644
index 0000000000..e09dd03489
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/code_executor/test_code_executor.py
@@ -0,0 +1,110 @@
+from __future__ import annotations
+
+from typing import Any, cast
+from unittest.mock import MagicMock
+
+import pytest
+from pytest_mock import MockerFixture
+
+from core.helper.code_executor import code_executor as code_executor_module
+
+
+def test_execute_workflow_code_template_raises_for_unsupported_language() -> None:
+ with pytest.raises(code_executor_module.CodeExecutionError, match="Unsupported language"):
+ code_executor_module.CodeExecutor.execute_workflow_code_template(cast(Any, "ruby"), "print(1)", {})
+
+
+def test_execute_workflow_code_template_uses_transformer(mocker: MockerFixture) -> None:
+ transformer = MagicMock()
+ transformer.transform_caller.return_value = ("runner-script", "preload-script")
+ transformer.transform_response.return_value = {"result": "ok"}
+ execute_mock = mocker.patch.object(
+ code_executor_module.CodeExecutor,
+ "execute_code",
+ return_value='<>{"result":"ok"}<>',
+ )
+ mocker.patch.dict(code_executor_module.CodeExecutor.code_template_transformers, {"fake": transformer}, clear=False)
+
+ result = code_executor_module.CodeExecutor.execute_workflow_code_template(cast(Any, "fake"), "code", {"a": 1})
+
+ assert result == {"result": "ok"}
+ transformer.transform_caller.assert_called_once_with("code", {"a": 1})
+ execute_mock.assert_called_once_with("fake", "preload-script", "runner-script")
+
+
+def test_execute_code_raises_service_unavailable_for_503(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.status_code = 503
+ client = MagicMock()
+ client.post.return_value = response
+ mocker.patch("core.helper.code_executor.code_executor.get_pooled_http_client", return_value=client)
+
+ with pytest.raises(code_executor_module.CodeExecutionError, match="service is unavailable"):
+ code_executor_module.CodeExecutor.execute_code(cast(Any, "python3"), preload="", code="print(1)")
+
+
+def test_execute_code_returns_stdout_on_success(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.status_code = 200
+ response.json.return_value = {"code": 0, "message": "ok", "data": {"stdout": "done", "error": None}}
+ client = MagicMock()
+ client.post.return_value = response
+ mocker.patch("core.helper.code_executor.code_executor.get_pooled_http_client", return_value=client)
+
+ assert code_executor_module.CodeExecutor.execute_code(cast(Any, "python3"), preload="", code="print(1)") == "done"
+
+
+def test_execute_code_raises_for_non_200_status(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.status_code = 500
+ client = MagicMock()
+ client.post.return_value = response
+ mocker.patch("core.helper.code_executor.code_executor.get_pooled_http_client", return_value=client)
+
+ with pytest.raises(code_executor_module.CodeExecutionError, match="likely a network issue"):
+ code_executor_module.CodeExecutor.execute_code(cast(Any, "python3"), preload="", code="print(1)")
+
+
+def test_execute_code_raises_when_client_post_fails(mocker: MockerFixture) -> None:
+ client = MagicMock()
+ client.post.side_effect = RuntimeError("timeout")
+ mocker.patch("core.helper.code_executor.code_executor.get_pooled_http_client", return_value=client)
+
+ with pytest.raises(code_executor_module.CodeExecutionError, match="likely a network issue"):
+ code_executor_module.CodeExecutor.execute_code(cast(Any, "python3"), preload="", code="print(1)")
+
+
+def test_execute_code_raises_when_response_json_is_invalid(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.status_code = 200
+ response.json.side_effect = ValueError("bad json")
+ client = MagicMock()
+ client.post.return_value = response
+ mocker.patch("core.helper.code_executor.code_executor.get_pooled_http_client", return_value=client)
+
+ with pytest.raises(code_executor_module.CodeExecutionError, match="Failed to parse response"):
+ code_executor_module.CodeExecutor.execute_code(cast(Any, "python3"), preload="", code="print(1)")
+
+
+def test_execute_code_raises_when_sandbox_returns_error_code(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.status_code = 200
+ response.json.return_value = {"code": 1, "message": "boom", "data": {"stdout": "", "error": None}}
+ client = MagicMock()
+ client.post.return_value = response
+ mocker.patch("core.helper.code_executor.code_executor.get_pooled_http_client", return_value=client)
+
+ with pytest.raises(code_executor_module.CodeExecutionError, match="Got error code: 1"):
+ code_executor_module.CodeExecutor.execute_code(cast(Any, "python3"), preload="", code="print(1)")
+
+
+def test_execute_code_raises_when_response_contains_runtime_error(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.status_code = 200
+ response.json.return_value = {"code": 0, "message": "ok", "data": {"stdout": "", "error": "runtime failed"}}
+ client = MagicMock()
+ client.post.return_value = response
+ mocker.patch("core.helper.code_executor.code_executor.get_pooled_http_client", return_value=client)
+
+ with pytest.raises(code_executor_module.CodeExecutionError, match="runtime failed"):
+ code_executor_module.CodeExecutor.execute_code(cast(Any, "python3"), preload="", code="print(1)")
diff --git a/api/tests/unit_tests/core/helper/code_executor/test_code_node_provider.py b/api/tests/unit_tests/core/helper/code_executor/test_code_node_provider.py
new file mode 100644
index 0000000000..47761a32ac
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/code_executor/test_code_node_provider.py
@@ -0,0 +1,29 @@
+from core.helper.code_executor.code_node_provider import CodeNodeProvider
+
+
+class _DummyProvider(CodeNodeProvider):
+ @staticmethod
+ def get_language() -> str:
+ return "dummy"
+
+ @classmethod
+ def get_default_code(cls) -> str:
+ return "def main():\n return {'result': 'ok'}"
+
+
+def test_is_accept_language() -> None:
+ assert _DummyProvider.is_accept_language("dummy") is True
+ assert _DummyProvider.is_accept_language("other") is False
+
+
+def test_get_default_config_contains_expected_shape() -> None:
+ config = _DummyProvider.get_default_config()
+
+ assert config["type"] == "code"
+ assert config["config"]["code_language"] == "dummy"
+ assert config["config"]["code"] == _DummyProvider.get_default_code()
+ assert config["config"]["variables"] == [
+ {"variable": "arg1", "value_selector": []},
+ {"variable": "arg2", "value_selector": []},
+ ]
+ assert config["config"]["outputs"] == {"result": {"type": "string", "children": None}}
diff --git a/api/tests/unit_tests/core/helper/code_executor/test_template_transformer.py b/api/tests/unit_tests/core/helper/code_executor/test_template_transformer.py
new file mode 100644
index 0000000000..5b54b8e647
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/code_executor/test_template_transformer.py
@@ -0,0 +1,81 @@
+import json
+from base64 import b64decode
+from collections.abc import Mapping
+from typing import Any
+
+import pytest
+
+from core.helper.code_executor.template_transformer import TemplateTransformer
+
+
+class _DummyTransformer(TemplateTransformer):
+ @classmethod
+ def get_runner_script(cls) -> str:
+ return f"CODE={cls._code_placeholder};INPUTS={cls._inputs_placeholder}"
+
+
+def test_serialize_code_encodes_to_base64() -> None:
+ encoded = _DummyTransformer.serialize_code("print('hi')")
+
+ assert b64decode(encoded.encode()).decode() == "print('hi')"
+
+
+def test_assemble_runner_script_embeds_code_and_inputs() -> None:
+ script = _DummyTransformer.assemble_runner_script("x = 1", {"a": "b"})
+
+ assert "CODE=x = 1" in script
+ payload = script.split("INPUTS=", maxsplit=1)[1]
+ assert json.loads(b64decode(payload.encode()).decode()) == {"a": "b"}
+
+
+def test_transform_caller_returns_runner_and_empty_preload() -> None:
+ runner, preload = _DummyTransformer.transform_caller("x = 2", {"k": "v"})
+
+ assert "CODE=x = 2" in runner
+ assert preload == ""
+
+
+def test_serialize_inputs_encodes_payload() -> None:
+ payload = _DummyTransformer.serialize_inputs({"foo": "bar"})
+
+ assert json.loads(b64decode(payload.encode()).decode()) == {"foo": "bar"}
+
+
+def test_transform_response_parses_json_result_and_converts_scientific_notation() -> None:
+ response = '<>{"value": "1e+3", "nested": {"x": "2E-2"}, "arr": ["3e+1"]}<>'
+
+ result: Mapping[str, Any] = _DummyTransformer.transform_response(response)
+
+ assert result == {"value": 1000.0, "nested": {"x": 0.02}, "arr": [30.0]}
+
+
+def test_transform_response_raises_for_invalid_json() -> None:
+ with pytest.raises(ValueError, match="Failed to parse JSON response"):
+ _DummyTransformer.transform_response("<>{invalid json}<>")
+
+
+def test_transform_response_raises_for_non_dict_result() -> None:
+ with pytest.raises(ValueError, match="Result must be a dict"):
+ _DummyTransformer.transform_response("<>[1,2,3]<>")
+
+
+def test_transform_response_raises_for_non_string_keys(monkeypatch: pytest.MonkeyPatch) -> None:
+ monkeypatch.setattr("json.loads", lambda _: {1: "x"})
+
+ with pytest.raises(ValueError, match="Result keys must be strings"):
+ _DummyTransformer.transform_response('<>{"ignored": true}<>')
+
+
+def test_transform_response_raises_for_unexpected_errors(monkeypatch: pytest.MonkeyPatch) -> None:
+ def _raise_unexpected(_: str) -> Any:
+ raise RuntimeError("boom")
+
+ monkeypatch.setattr("json.loads", _raise_unexpected)
+
+ with pytest.raises(ValueError, match="Unexpected error during response transformation"):
+ _DummyTransformer.transform_response('<>{"a":1}<>')
+
+
+def test_transform_response_raises_for_missing_result_tag() -> None:
+ with pytest.raises(ValueError, match="no result tag found"):
+ _DummyTransformer.transform_response("plain output")
diff --git a/api/tests/unit_tests/core/helper/test_credential_utils.py b/api/tests/unit_tests/core/helper/test_credential_utils.py
new file mode 100644
index 0000000000..7e0d7d0af7
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/test_credential_utils.py
@@ -0,0 +1,138 @@
+from types import SimpleNamespace
+from typing import cast
+
+import pytest
+from pytest_mock import MockerFixture
+
+from core.helper.credential_utils import check_credential_policy_compliance, is_credential_exists
+from services.enterprise.plugin_manager_service import PluginCredentialType
+
+
+def test_check_credential_policy_compliance_returns_when_feature_disabled(
+ mocker: MockerFixture,
+) -> None:
+ mocker.patch(
+ "services.feature_service.FeatureService.get_system_features",
+ return_value=SimpleNamespace(plugin_manager=SimpleNamespace(enabled=False)),
+ )
+ check_call = mocker.patch(
+ "services.enterprise.plugin_manager_service.PluginManagerService.check_credential_policy_compliance"
+ )
+
+ check_credential_policy_compliance("cred-1", "openai", PluginCredentialType.MODEL)
+
+ check_call.assert_not_called()
+
+
+def test_check_credential_policy_compliance_raises_when_credential_missing(
+ mocker: MockerFixture,
+) -> None:
+ mocker.patch(
+ "services.feature_service.FeatureService.get_system_features",
+ return_value=SimpleNamespace(plugin_manager=SimpleNamespace(enabled=True)),
+ )
+ mocker.patch("core.helper.credential_utils.is_credential_exists", return_value=False)
+
+ with pytest.raises(ValueError, match="Credential with id cred-1 for provider openai not found."):
+ check_credential_policy_compliance("cred-1", "openai", PluginCredentialType.TOOL)
+
+
+def test_check_credential_policy_compliance_calls_plugin_manager_with_request(
+ mocker: MockerFixture,
+) -> None:
+ mocker.patch(
+ "services.feature_service.FeatureService.get_system_features",
+ return_value=SimpleNamespace(plugin_manager=SimpleNamespace(enabled=True)),
+ )
+ mocker.patch("core.helper.credential_utils.is_credential_exists", return_value=True)
+ check_call = mocker.patch(
+ "services.enterprise.plugin_manager_service.PluginManagerService.check_credential_policy_compliance"
+ )
+
+ check_credential_policy_compliance("cred-1", "openai", PluginCredentialType.MODEL)
+
+ check_call.assert_called_once()
+ request_arg = check_call.call_args.args[0]
+ assert request_arg.dify_credential_id == "cred-1"
+ assert request_arg.provider == "openai"
+ assert request_arg.credential_type == PluginCredentialType.MODEL
+
+
+def test_check_credential_policy_compliance_skips_existence_check_when_disabled(
+ mocker: MockerFixture,
+) -> None:
+ mocker.patch(
+ "services.feature_service.FeatureService.get_system_features",
+ return_value=SimpleNamespace(plugin_manager=SimpleNamespace(enabled=True)),
+ )
+ exists_call = mocker.patch("core.helper.credential_utils.is_credential_exists")
+ check_call = mocker.patch(
+ "services.enterprise.plugin_manager_service.PluginManagerService.check_credential_policy_compliance"
+ )
+
+ check_credential_policy_compliance(
+ credential_id="cred-1",
+ provider="openai",
+ credential_type=PluginCredentialType.MODEL,
+ check_existence=False,
+ )
+
+ exists_call.assert_not_called()
+ check_call.assert_called_once()
+
+
+def test_check_credential_policy_compliance_returns_when_credential_id_empty(
+ mocker: MockerFixture,
+) -> None:
+ mocker.patch(
+ "services.feature_service.FeatureService.get_system_features",
+ return_value=SimpleNamespace(plugin_manager=SimpleNamespace(enabled=True)),
+ )
+ exists_call = mocker.patch("core.helper.credential_utils.is_credential_exists")
+ check_call = mocker.patch(
+ "services.enterprise.plugin_manager_service.PluginManagerService.check_credential_policy_compliance"
+ )
+
+ check_credential_policy_compliance("", "openai", PluginCredentialType.MODEL)
+
+ exists_call.assert_not_called()
+ check_call.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ ("credential_type", "scalar_result", "expected"),
+ [
+ (PluginCredentialType.MODEL, "model-credential", True),
+ (PluginCredentialType.MODEL, None, False),
+ (PluginCredentialType.TOOL, "tool-credential", True),
+ (PluginCredentialType.TOOL, None, False),
+ ],
+)
+def test_is_credential_exists_by_type(
+ mocker: MockerFixture,
+ credential_type: PluginCredentialType,
+ scalar_result: str | None,
+ expected: bool,
+) -> None:
+ mocker.patch("extensions.ext_database.db", new=SimpleNamespace(engine=object()))
+ session_cls = mocker.patch("sqlalchemy.orm.Session")
+ session = session_cls.return_value.__enter__.return_value
+ session.scalar.return_value = scalar_result
+
+ result = is_credential_exists("cred-1", credential_type)
+
+ assert result is expected
+ session.scalar.assert_called_once()
+
+
+def test_is_credential_exists_returns_false_for_unknown_type(
+ mocker: MockerFixture,
+) -> None:
+ mocker.patch("extensions.ext_database.db", new=SimpleNamespace(engine=object()))
+ session_cls = mocker.patch("sqlalchemy.orm.Session")
+ session = session_cls.return_value.__enter__.return_value
+
+ result = is_credential_exists("cred-1", cast(PluginCredentialType, "unknown"))
+
+ assert result is False
+ session.scalar.assert_not_called()
diff --git a/api/tests/unit_tests/core/helper/test_download.py b/api/tests/unit_tests/core/helper/test_download.py
new file mode 100644
index 0000000000..0755c25826
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/test_download.py
@@ -0,0 +1,53 @@
+from collections.abc import Iterator
+
+import pytest
+from pytest_mock import MockerFixture
+
+from core.helper.download import download_with_size_limit
+
+
+class _StubResponse:
+ def __init__(self, status_code: int, chunks: list[bytes]) -> None:
+ self.status_code = status_code
+ self._chunks = chunks
+
+ def iter_bytes(self) -> Iterator[bytes]:
+ return iter(self._chunks)
+
+
+def test_download_with_size_limit_returns_content(mocker: MockerFixture) -> None:
+ response = _StubResponse(status_code=200, chunks=[b"ab", b"cd", b"ef"])
+ mock_get = mocker.patch("core.helper.download.ssrf_proxy.get", return_value=response)
+
+ content = download_with_size_limit("https://example.com/a.txt", max_download_size=6, timeout=10)
+
+ assert content == b"abcdef"
+ mock_get.assert_called_once_with("https://example.com/a.txt", follow_redirects=True, timeout=10)
+
+
+def test_download_with_size_limit_raises_for_404(mocker: MockerFixture) -> None:
+ mocker.patch("core.helper.download.ssrf_proxy.get", return_value=_StubResponse(status_code=404, chunks=[]))
+
+ with pytest.raises(ValueError, match="file not found"):
+ download_with_size_limit("https://example.com/missing.txt", max_download_size=10)
+
+
+def test_download_with_size_limit_raises_when_size_exceeds_limit(
+ mocker: MockerFixture,
+) -> None:
+ response = _StubResponse(status_code=200, chunks=[b"abc", b"de"])
+ mocker.patch("core.helper.download.ssrf_proxy.get", return_value=response)
+
+ with pytest.raises(ValueError, match="Max file size reached"):
+ download_with_size_limit("https://example.com/large.bin", max_download_size=4)
+
+
+def test_download_with_size_limit_accepts_content_equal_to_limit(
+ mocker: MockerFixture,
+) -> None:
+ response = _StubResponse(status_code=200, chunks=[b"ab", b"cd"])
+ mocker.patch("core.helper.download.ssrf_proxy.get", return_value=response)
+
+ content = download_with_size_limit("https://example.com/exact.bin", max_download_size=4)
+
+ assert content == b"abcd"
diff --git a/api/tests/unit_tests/core/helper/test_http_client_pooling.py b/api/tests/unit_tests/core/helper/test_http_client_pooling.py
new file mode 100644
index 0000000000..c29962f1b1
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/test_http_client_pooling.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from unittest.mock import MagicMock
+
+import httpx
+
+from core.helper.http_client_pooling import HttpClientPoolFactory
+
+
+def test_get_or_create_reuses_client_for_same_key() -> None:
+ factory = HttpClientPoolFactory()
+ first_client = MagicMock(spec=httpx.Client)
+ second_client = MagicMock(spec=httpx.Client)
+ clients = [first_client, second_client]
+
+ def _builder() -> httpx.Client:
+ return clients.pop(0)
+
+ assert factory.get_or_create("shared", _builder) is first_client
+ assert factory.get_or_create("shared", _builder) is first_client
+
+
+def test_get_or_create_creates_distinct_clients_for_distinct_keys() -> None:
+ factory = HttpClientPoolFactory()
+ client_a = MagicMock(spec=httpx.Client)
+ client_b = MagicMock(spec=httpx.Client)
+
+ assert factory.get_or_create("a", lambda: client_a) is client_a
+ assert factory.get_or_create("b", lambda: client_b) is client_b
+
+
+def test_close_all_closes_pooled_clients_and_allows_recreate() -> None:
+ factory = HttpClientPoolFactory()
+ first_client = MagicMock(spec=httpx.Client)
+ replacement_client = MagicMock(spec=httpx.Client)
+
+ assert factory.get_or_create("x", lambda: first_client) is first_client
+ factory.close_all()
+
+ first_client.close.assert_called_once()
+ assert factory.get_or_create("x", lambda: replacement_client) is replacement_client
diff --git a/api/tests/unit_tests/core/helper/test_marketplace.py b/api/tests/unit_tests/core/helper/test_marketplace.py
new file mode 100644
index 0000000000..bd561b1637
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/test_marketplace.py
@@ -0,0 +1,110 @@
+from types import SimpleNamespace
+from unittest.mock import MagicMock
+
+from pytest_mock import MockerFixture
+
+from core.helper.marketplace import (
+ batch_fetch_plugin_by_ids,
+ batch_fetch_plugin_manifests,
+ download_plugin_pkg,
+ fetch_global_plugin_manifest,
+ get_plugin_pkg_url,
+ record_install_plugin_event,
+)
+
+
+def test_get_plugin_pkg_url_contains_unique_identifier() -> None:
+ url = get_plugin_pkg_url("plugin@1.0.0")
+
+ assert "api/v1/plugins/download" in url
+ assert "unique_identifier=plugin@1.0.0" in url
+
+
+def test_download_plugin_pkg_delegates_with_configured_size(mocker: MockerFixture) -> None:
+ mocked_download = mocker.patch("core.helper.marketplace.download_with_size_limit", return_value=b"pkg")
+ mocker.patch("core.helper.marketplace.dify_config.PLUGIN_MAX_PACKAGE_SIZE", 1234)
+
+ result = download_plugin_pkg("plugin.a.b")
+
+ assert result == b"pkg"
+ mocked_download.assert_called_once()
+ called_url, called_limit = mocked_download.call_args.args
+ assert "unique_identifier=plugin.a.b" in called_url
+ assert called_limit == 1234
+
+
+def test_batch_fetch_plugin_by_ids_returns_empty_for_empty_input(mocker: MockerFixture) -> None:
+ post_mock = mocker.patch("core.helper.marketplace.httpx.post")
+
+ assert batch_fetch_plugin_by_ids([]) == []
+ post_mock.assert_not_called()
+
+
+def test_batch_fetch_plugin_by_ids_returns_plugins_from_response(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.json.return_value = {"data": {"plugins": [{"id": "p1"}]}}
+ response.raise_for_status.return_value = None
+ post_mock = mocker.patch("core.helper.marketplace.httpx.post", return_value=response)
+
+ plugins = batch_fetch_plugin_by_ids(["p1"])
+
+ assert plugins == [{"id": "p1"}]
+ post_mock.assert_called_once()
+ response.raise_for_status.assert_called_once()
+
+
+def test_batch_fetch_plugin_manifests_returns_empty_for_empty_input(mocker: MockerFixture) -> None:
+ post_mock = mocker.patch("core.helper.marketplace.httpx.post")
+
+ assert batch_fetch_plugin_manifests([]) == []
+ post_mock.assert_not_called()
+
+
+def test_batch_fetch_plugin_manifests_validates_and_returns_plugins(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.raise_for_status.return_value = None
+ response.json.return_value = {"data": {"plugins": [{"id": "p1"}, {"id": "p2"}]}}
+ post_mock = mocker.patch("core.helper.marketplace.httpx.post", return_value=response)
+ validate_mock = mocker.patch(
+ "core.helper.marketplace.MarketplacePluginDeclaration.model_validate",
+ side_effect=["manifest-1", "manifest-2"],
+ )
+
+ result = batch_fetch_plugin_manifests(["p1", "p2"])
+
+ assert result == ["manifest-1", "manifest-2"]
+ post_mock.assert_called_once()
+ assert validate_mock.call_count == 2
+ response.raise_for_status.assert_called_once()
+
+
+def test_record_install_plugin_event_posts_and_checks_status(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.raise_for_status.return_value = None
+ post_mock = mocker.patch("core.helper.marketplace.httpx.post", return_value=response)
+
+ record_install_plugin_event("plugin.a")
+
+ post_mock.assert_called_once()
+ response.raise_for_status.assert_called_once()
+
+
+def test_fetch_global_plugin_manifest_caches_each_plugin(mocker: MockerFixture) -> None:
+ response = MagicMock()
+ response.raise_for_status.return_value = None
+ response.json.return_value = {"plugins": [{"id": "a"}, {"id": "b"}]}
+ mocker.patch("core.helper.marketplace.httpx.get", return_value=response)
+
+ snapshot_a = SimpleNamespace(plugin_id="plugin-a", model_dump_json=lambda: '{"id":"a"}')
+ snapshot_b = SimpleNamespace(plugin_id="plugin-b", model_dump_json=lambda: '{"id":"b"}')
+ validate_mock = mocker.patch(
+ "core.helper.marketplace.MarketplacePluginSnapshot.model_validate",
+ side_effect=[snapshot_a, snapshot_b],
+ )
+ setex_mock = mocker.patch("core.helper.marketplace.redis_client.setex")
+
+ fetch_global_plugin_manifest("prefix:", 60)
+
+ assert validate_mock.call_count == 2
+ setex_mock.assert_any_call(name="prefix:plugin-a", time=60, value='{"id":"a"}')
+ setex_mock.assert_any_call(name="prefix:plugin-b", time=60, value='{"id":"b"}')
diff --git a/api/tests/unit_tests/core/helper/test_moderation.py b/api/tests/unit_tests/core/helper/test_moderation.py
new file mode 100644
index 0000000000..4a84099b74
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/test_moderation.py
@@ -0,0 +1,158 @@
+from types import SimpleNamespace
+from typing import cast
+
+import pytest
+from graphon.model_runtime.errors.invoke import InvokeBadRequestError
+from pytest_mock import MockerFixture
+
+from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
+from core.helper.moderation import check_moderation
+from models.provider import ProviderType
+
+
+def _build_model_config(provider: str = "openai") -> SimpleNamespace:
+ return SimpleNamespace(
+ provider=provider,
+ provider_model_bundle=SimpleNamespace(
+ configuration=SimpleNamespace(using_provider_type=ProviderType.SYSTEM),
+ ),
+ )
+
+
+def test_check_moderation_returns_false_when_feature_not_enabled(mocker: MockerFixture) -> None:
+ mocker.patch(
+ "core.helper.moderation.hosting_configuration",
+ SimpleNamespace(moderation_config=None, provider_map={}),
+ )
+
+ assert (
+ check_moderation(
+ "tenant-1",
+ cast(ModelConfigWithCredentialsEntity, _build_model_config()),
+ "hello",
+ )
+ is False
+ )
+
+
+def test_check_moderation_returns_false_when_hosting_credentials_missing(mocker: MockerFixture) -> None:
+ openai_provider = "langgenius/openai/openai"
+ mocker.patch(
+ "core.helper.moderation.hosting_configuration",
+ SimpleNamespace(
+ moderation_config=SimpleNamespace(enabled=True, providers={"openai"}),
+ provider_map={openai_provider: SimpleNamespace(enabled=True, credentials=None)},
+ ),
+ )
+
+ assert (
+ check_moderation(
+ "tenant-1",
+ cast(ModelConfigWithCredentialsEntity, _build_model_config()),
+ "hello",
+ )
+ is False
+ )
+
+
+def test_check_moderation_returns_true_when_model_accepts_text(mocker: MockerFixture) -> None:
+ openai_provider = "langgenius/openai/openai"
+ hosting_openai = SimpleNamespace(enabled=True, credentials={"api_key": "k"})
+ mocker.patch(
+ "core.helper.moderation.hosting_configuration",
+ SimpleNamespace(
+ moderation_config=SimpleNamespace(enabled=True, providers={"openai"}),
+ provider_map={openai_provider: hosting_openai},
+ ),
+ )
+ mocker.patch("core.helper.moderation.secrets.choice", return_value="chunk")
+
+ moderation_model = SimpleNamespace(invoke=lambda **invoke_kwargs: invoke_kwargs["text"] == "chunk")
+ factory = SimpleNamespace(get_model_type_instance=lambda **_factory_kwargs: moderation_model)
+ mocker.patch("core.helper.moderation.create_plugin_model_provider_factory", return_value=factory)
+
+ assert (
+ check_moderation(
+ "tenant-1",
+ cast(ModelConfigWithCredentialsEntity, _build_model_config()),
+ "abc",
+ )
+ is True
+ )
+
+
+def test_check_moderation_returns_true_when_text_is_empty(mocker: MockerFixture) -> None:
+ openai_provider = "langgenius/openai/openai"
+ hosting_openai = SimpleNamespace(enabled=True, credentials={"api_key": "k"})
+ mocker.patch(
+ "core.helper.moderation.hosting_configuration",
+ SimpleNamespace(
+ moderation_config=SimpleNamespace(enabled=True, providers={"openai"}),
+ provider_map={openai_provider: hosting_openai},
+ ),
+ )
+ factory_mock = mocker.patch("core.helper.moderation.create_plugin_model_provider_factory")
+ choice_mock = mocker.patch("core.helper.moderation.secrets.choice")
+
+ assert (
+ check_moderation(
+ "tenant-1",
+ cast(ModelConfigWithCredentialsEntity, _build_model_config()),
+ "",
+ )
+ is True
+ )
+ factory_mock.assert_not_called()
+ choice_mock.assert_not_called()
+
+
+def test_check_moderation_returns_false_when_model_rejects_text(mocker: MockerFixture) -> None:
+ openai_provider = "langgenius/openai/openai"
+ hosting_openai = SimpleNamespace(enabled=True, credentials={"api_key": "k"})
+ mocker.patch(
+ "core.helper.moderation.hosting_configuration",
+ SimpleNamespace(
+ moderation_config=SimpleNamespace(enabled=True, providers={"openai"}),
+ provider_map={openai_provider: hosting_openai},
+ ),
+ )
+ mocker.patch("core.helper.moderation.secrets.choice", return_value="chunk")
+
+ moderation_model = SimpleNamespace(invoke=lambda **_invoke_kwargs: False)
+ factory = SimpleNamespace(get_model_type_instance=lambda **_factory_kwargs: moderation_model)
+ mocker.patch("core.helper.moderation.create_plugin_model_provider_factory", return_value=factory)
+
+ assert (
+ check_moderation(
+ "tenant-1",
+ cast(ModelConfigWithCredentialsEntity, _build_model_config()),
+ "abc",
+ )
+ is False
+ )
+
+
+def test_check_moderation_raises_bad_request_when_provider_call_fails(mocker: MockerFixture) -> None:
+ openai_provider = "langgenius/openai/openai"
+ hosting_openai = SimpleNamespace(enabled=True, credentials={"api_key": "k"})
+ mocker.patch(
+ "core.helper.moderation.hosting_configuration",
+ SimpleNamespace(
+ moderation_config=SimpleNamespace(enabled=True, providers={"openai"}),
+ provider_map={openai_provider: hosting_openai},
+ ),
+ )
+ mocker.patch("core.helper.moderation.secrets.choice", return_value="chunk")
+
+ failing_model = SimpleNamespace(
+ invoke=lambda **_invoke_kwargs: (_ for _ in ()).throw(RuntimeError("boom")),
+ )
+ factory = SimpleNamespace(get_model_type_instance=lambda **_factory_kwargs: failing_model)
+ mocker.patch("core.helper.moderation.create_plugin_model_provider_factory", return_value=factory)
+
+ with pytest.raises(InvokeBadRequestError, match="Rate limit exceeded, please try again later."):
+ check_moderation(
+ "tenant-1",
+ cast(ModelConfigWithCredentialsEntity, _build_model_config()),
+ "abc",
+ )
diff --git a/api/tests/unit_tests/core/helper/test_name_generator.py b/api/tests/unit_tests/core/helper/test_name_generator.py
new file mode 100644
index 0000000000..37a87260f1
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/test_name_generator.py
@@ -0,0 +1,33 @@
+from dataclasses import dataclass
+
+from pytest_mock import MockerFixture
+
+from core.helper.name_generator import generate_incremental_name, generate_provider_name
+from core.plugin.entities.plugin_daemon import CredentialType
+
+
+@dataclass
+class _Provider:
+ name: str
+
+
+def test_generate_incremental_name_uses_next_highest_suffix() -> None:
+ names = ["API KEY 1", "API KEY 3", "API KEY 2", "other", "", "API KEY x"]
+
+ assert generate_incremental_name(names, "API KEY") == "API KEY 4"
+
+
+def test_generate_incremental_name_returns_default_when_no_matches() -> None:
+ assert generate_incremental_name(["custom", " ", ""], "AUTH") == "AUTH 1"
+
+
+def test_generate_provider_name_uses_credential_display_name() -> None:
+ providers = [_Provider(name="API KEY 1"), _Provider(name="API KEY 2")]
+
+ assert generate_provider_name(providers, CredentialType.API_KEY) == "API KEY 3"
+
+
+def test_generate_provider_name_falls_back_on_generation_error(mocker: MockerFixture) -> None:
+ mocker.patch("core.helper.name_generator.generate_incremental_name", side_effect=RuntimeError("boom"))
+
+ assert generate_provider_name([], CredentialType.OAUTH2, fallback_context="ctx") == "AUTH 1"
diff --git a/api/tests/unit_tests/core/helper/test_tool_parameter_cache.py b/api/tests/unit_tests/core/helper/test_tool_parameter_cache.py
new file mode 100644
index 0000000000..3c8b44d010
--- /dev/null
+++ b/api/tests/unit_tests/core/helper/test_tool_parameter_cache.py
@@ -0,0 +1,71 @@
+import json
+
+from pytest_mock import MockerFixture
+
+from core.helper.tool_parameter_cache import ToolParameterCache, ToolParameterCacheType
+
+
+def test_tool_parameter_cache_get_returns_decoded_dict(mocker: MockerFixture) -> None:
+ redis_client_mock = mocker.patch("core.helper.tool_parameter_cache.redis_client")
+ cache = ToolParameterCache(
+ tenant_id="tenant",
+ provider="provider",
+ tool_name="tool",
+ cache_type=ToolParameterCacheType.PARAMETER,
+ identity_id="identity",
+ )
+ payload = {"k": "v", "n": 1}
+ cache_key = cache.cache_key
+
+ redis_client_mock.get.return_value = json.dumps(payload).encode("utf-8")
+
+ assert cache.get() == payload
+ redis_client_mock.get.assert_called_once_with(cache_key)
+
+
+def test_tool_parameter_cache_get_returns_none_for_invalid_json(mocker: MockerFixture) -> None:
+ redis_client_mock = mocker.patch("core.helper.tool_parameter_cache.redis_client")
+ cache = ToolParameterCache(
+ tenant_id="tenant",
+ provider="provider",
+ tool_name="tool",
+ cache_type=ToolParameterCacheType.PARAMETER,
+ identity_id="identity",
+ )
+
+ redis_client_mock.get.return_value = b"{invalid-json"
+
+ assert cache.get() is None
+
+
+def test_tool_parameter_cache_get_returns_none_when_key_is_missing(mocker: MockerFixture) -> None:
+ redis_client_mock = mocker.patch("core.helper.tool_parameter_cache.redis_client")
+ cache = ToolParameterCache(
+ tenant_id="tenant",
+ provider="provider",
+ tool_name="tool",
+ cache_type=ToolParameterCacheType.PARAMETER,
+ identity_id="identity",
+ )
+
+ redis_client_mock.get.return_value = None
+
+ assert cache.get() is None
+
+
+def test_tool_parameter_cache_set_and_delete(mocker: MockerFixture) -> None:
+ redis_client_mock = mocker.patch("core.helper.tool_parameter_cache.redis_client")
+ cache = ToolParameterCache(
+ tenant_id="tenant",
+ provider="provider",
+ tool_name="tool",
+ cache_type=ToolParameterCacheType.PARAMETER,
+ identity_id="identity",
+ )
+
+ params = {"a": "b"}
+ cache.set(params)
+ cache.delete()
+
+ redis_client_mock.setex.assert_called_once_with(cache.cache_key, 86400, json.dumps(params))
+ redis_client_mock.delete.assert_called_once_with(cache.cache_key)
diff --git a/api/tests/unit_tests/core/mcp/server/test_streamable_http.py b/api/tests/unit_tests/core/mcp/server/test_streamable_http.py
index 313d18c695..9a815fb94d 100644
--- a/api/tests/unit_tests/core/mcp/server/test_streamable_http.py
+++ b/api/tests/unit_tests/core/mcp/server/test_streamable_http.py
@@ -415,12 +415,44 @@ class TestUtilityFunctions:
label="Upload",
required=False,
),
+ VariableEntity(
+ type=VariableEntityType.CHECKBOX,
+ variable="enabled",
+ description="Enable flag",
+ label="Enabled",
+ required=False,
+ ),
+ VariableEntity(
+ type=VariableEntityType.JSON_OBJECT,
+ variable="config",
+ description="Config object",
+ label="Config",
+ required=True,
+ ),
+ VariableEntity(
+ type=VariableEntityType.JSON_OBJECT,
+ variable="schema_config",
+ description="Config with schema",
+ label="Schema Config",
+ required=False,
+ json_schema={
+ "properties": {
+ "host": {"type": "string"},
+ "port": {"type": "number"},
+ },
+ "required": ["host"],
+ "additionalProperties": False,
+ },
+ ),
]
parameters_dict: dict[str, str] = {
"name": "Enter your name",
"category": "Select category",
"count": "Enter count",
+ "enabled": "Enable flag",
+ "config": "Config object",
+ "schema_config": "Config with schema",
}
parameters, required = convert_input_form_to_parameters(user_input_form, parameters_dict)
@@ -437,20 +469,35 @@ class TestUtilityFunctions:
assert "count" in parameters
assert parameters["count"]["type"] == "number"
- # FILE type should be skipped - it creates empty dict but gets filtered later
- # Check that it doesn't have any meaningful content
- if "upload" in parameters:
- assert parameters["upload"] == {}
+ # FILE type is skipped entirely via `continue` — key should not exist
+ assert "upload" not in parameters
+
+ # CHECKBOX maps to boolean
+ assert parameters["enabled"]["type"] == "boolean"
+
+ # JSON_OBJECT without json_schema maps to object
+ assert parameters["config"]["type"] == "object"
+ assert "properties" not in parameters["config"]
+
+ # JSON_OBJECT with json_schema forwards schema keys
+ assert parameters["schema_config"]["type"] == "object"
+ assert parameters["schema_config"]["properties"] == {
+ "host": {"type": "string"},
+ "port": {"type": "number"},
+ }
+ assert parameters["schema_config"]["required"] == ["host"]
+ assert parameters["schema_config"]["additionalProperties"] is False
# Check required fields
assert "name" in required
assert "count" in required
+ assert "config" in required
assert "category" not in required
# Note: _get_request_id function has been removed as request_id is now passed as parameter
def test_convert_input_form_to_parameters_jsonschema_validation_ok(self):
- """Current schema uses 'number' for numeric fields; it should be a valid JSON Schema."""
+ """Generated schema with all supported types should be valid JSON Schema."""
user_input_form = [
VariableEntity(
type=VariableEntityType.NUMBER,
@@ -466,11 +513,27 @@ class TestUtilityFunctions:
label="Name",
required=False,
),
+ VariableEntity(
+ type=VariableEntityType.CHECKBOX,
+ variable="enabled",
+ description="Toggle",
+ label="Enabled",
+ required=False,
+ ),
+ VariableEntity(
+ type=VariableEntityType.JSON_OBJECT,
+ variable="metadata",
+ description="Metadata",
+ label="Metadata",
+ required=False,
+ ),
]
parameters_dict = {
"count": "Enter count",
"name": "Enter your name",
+ "enabled": "Toggle flag",
+ "metadata": "Metadata object",
}
parameters, required = convert_input_form_to_parameters(user_input_form, parameters_dict)
@@ -485,9 +548,12 @@ class TestUtilityFunctions:
# 1) The schema itself must be valid
jsonschema.Draft202012Validator.check_schema(schema)
- # 2) Both float and integer instances should pass validation
+ # 2) Validate instances with all types
jsonschema.validate(instance={"count": 3.14, "name": "alice"}, schema=schema)
- jsonschema.validate(instance={"count": 2, "name": "bob"}, schema=schema)
+ jsonschema.validate(
+ instance={"count": 2, "enabled": True, "metadata": {"key": "val"}},
+ schema=schema,
+ )
def test_legacy_float_type_schema_is_invalid(self):
"""Legacy/buggy behavior: using 'float' should produce an invalid JSON Schema."""
diff --git a/api/tests/unit_tests/core/plugin/impl/test_base_client_impl.py b/api/tests/unit_tests/core/plugin/impl/test_base_client_impl.py
index c216906d68..23894bd417 100644
--- a/api/tests/unit_tests/core/plugin/impl/test_base_client_impl.py
+++ b/api/tests/unit_tests/core/plugin/impl/test_base_client_impl.py
@@ -57,7 +57,7 @@ class TestBasePluginClientImpl:
def test_stream_request_handles_data_lines_and_dict_payload(self, mocker):
client = BasePluginClient()
stream_mock = mocker.patch(
- "core.plugin.impl.base.httpx.stream",
+ "httpx.Client.stream",
return_value=_StreamContext([b"", b"data: hello", "world"]),
)
diff --git a/api/tests/unit_tests/core/plugin/test_endpoint_client.py b/api/tests/unit_tests/core/plugin/test_endpoint_client.py
index 48e30e9c2f..ff9deb918a 100644
--- a/api/tests/unit_tests/core/plugin/test_endpoint_client.py
+++ b/api/tests/unit_tests/core/plugin/test_endpoint_client.py
@@ -10,12 +10,23 @@ Tests follow the Arrange-Act-Assert pattern for clarity.
from unittest.mock import MagicMock, patch
+import httpx
import pytest
from core.plugin.impl.endpoint import PluginEndpointClient
from core.plugin.impl.exc import PluginDaemonInternalServerError
+@pytest.fixture(autouse=True)
+def _patch_shared_httpx_client():
+ """Patch module-level client methods to delegate to module httpx.request/stream."""
+ with (
+ patch("core.plugin.impl.base._httpx_client.request", side_effect=lambda **kw: httpx.request(**kw)),
+ patch("core.plugin.impl.base._httpx_client.stream", side_effect=lambda **kw: httpx.stream(**kw)),
+ ):
+ yield
+
+
class TestPluginEndpointClientDelete:
"""Unit tests for PluginEndpointClient delete_endpoint operation.
diff --git a/api/tests/unit_tests/core/plugin/test_plugin_runtime.py b/api/tests/unit_tests/core/plugin/test_plugin_runtime.py
index 3063ca0197..a3b1e5f6b0 100644
--- a/api/tests/unit_tests/core/plugin/test_plugin_runtime.py
+++ b/api/tests/unit_tests/core/plugin/test_plugin_runtime.py
@@ -47,6 +47,20 @@ from core.plugin.impl.plugin import PluginInstaller
from core.plugin.impl.tool import PluginToolManager
+@pytest.fixture(autouse=True)
+def _patch_shared_httpx_client():
+ """Make BasePluginClient's module-level httpx client delegate to patched httpx.request/stream.
+
+ After refactor, code uses core.plugin.impl.base._httpx_client directly.
+ Patch its request/stream to route through module-level httpx so existing mocks still apply.
+ """
+ with (
+ patch("core.plugin.impl.base._httpx_client.request", side_effect=lambda **kw: httpx.request(**kw)),
+ patch("core.plugin.impl.base._httpx_client.stream", side_effect=lambda **kw: httpx.stream(**kw)),
+ ):
+ yield
+
+
class TestPluginRuntimeExecution:
"""Unit tests for plugin execution functionality.
diff --git a/api/tests/unit_tests/core/rag/datasource/keyword/jieba/test_stopwords.py b/api/tests/unit_tests/core/rag/datasource/keyword/jieba/test_stopwords.py
index 1b1541ddd6..4375d854ba 100644
--- a/api/tests/unit_tests/core/rag/datasource/keyword/jieba/test_stopwords.py
+++ b/api/tests/unit_tests/core/rag/datasource/keyword/jieba/test_stopwords.py
@@ -2,5 +2,6 @@ from core.rag.datasource.keyword.jieba.stopwords import STOPWORDS
def test_stopwords_loaded():
+ assert isinstance(STOPWORDS, frozenset)
assert "during" in STOPWORDS
assert "the" in STOPWORDS
diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/baidu/test_baidu_vector.py b/api/tests/unit_tests/core/rag/datasource/vdb/baidu/test_baidu_vector.py
index c46c3d5e4b..487d021697 100644
--- a/api/tests/unit_tests/core/rag/datasource/vdb/baidu/test_baidu_vector.py
+++ b/api/tests/unit_tests/core/rag/datasource/vdb/baidu/test_baidu_vector.py
@@ -381,13 +381,22 @@ def test_init_client_constructs_configuration_and_client(baidu_module, monkeypat
monkeypatch.setattr(baidu_module, "MochowClient", client_cls)
vector = baidu_module.BaiduVector.__new__(baidu_module.BaiduVector)
- config = SimpleNamespace(account="account", api_key="key", endpoint="https://endpoint")
+ config = SimpleNamespace(
+ account="account",
+ api_key="key",
+ endpoint="https://endpoint",
+ connection_timeout_in_mills=12_345,
+ )
client = vector._init_client(config)
assert client == "client"
credentials.assert_called_once_with("account", "key")
- configuration.assert_called_once_with(credentials="credentials", endpoint="https://endpoint")
+ configuration.assert_called_once_with(
+ credentials="credentials",
+ endpoint="https://endpoint",
+ connection_timeout_in_mills=12_345,
+ )
client_cls.assert_called_once_with("configuration")
diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/test_field.py b/api/tests/unit_tests/core/rag/datasource/vdb/test_field.py
new file mode 100644
index 0000000000..d68c93b021
--- /dev/null
+++ b/api/tests/unit_tests/core/rag/datasource/vdb/test_field.py
@@ -0,0 +1,45 @@
+import pytest
+
+from core.rag.datasource.vdb.field import parse_metadata_json
+
+
+class TestParseMetadataJson:
+ def test_none_returns_empty_dict(self):
+ assert parse_metadata_json(None) == {}
+
+ def test_empty_string_returns_empty_dict(self):
+ assert parse_metadata_json("") == {}
+
+ def test_valid_json_string(self):
+ result = parse_metadata_json('{"doc_id": "abc", "score": 0.9}')
+ assert result == {"doc_id": "abc", "score": 0.9}
+
+ def test_dict_passthrough(self):
+ original = {"doc_id": "abc", "document_id": "123"}
+ result = parse_metadata_json(original)
+ assert result == original
+
+ def test_empty_json_object(self):
+ assert parse_metadata_json("{}") == {}
+
+ def test_invalid_json_raises_value_error(self):
+ with pytest.raises(ValueError):
+ parse_metadata_json("{invalid json")
+
+ def test_nested_metadata(self):
+ result = parse_metadata_json('{"doc_id": "1", "extra": {"nested": true}}')
+ assert result["extra"]["nested"] is True
+
+ def test_non_str_non_dict_returns_empty_dict(self):
+ assert parse_metadata_json(123) == {}
+ assert parse_metadata_json([1, 2]) == {}
+
+ def test_bytes_input(self):
+ result = parse_metadata_json(b'{"key": "value"}')
+ assert result == {"key": "value"}
+
+ def test_empty_bytes_returns_empty_dict(self):
+ assert parse_metadata_json(b"") == {}
+
+ def test_empty_bytearray_returns_empty_dict(self):
+ assert parse_metadata_json(bytearray(b"")) == {}
diff --git a/api/tests/unit_tests/core/rag/indexing/test_index_processor.py b/api/tests/unit_tests/core/rag/indexing/test_index_processor.py
new file mode 100644
index 0000000000..a3f284955b
--- /dev/null
+++ b/api/tests/unit_tests/core/rag/indexing/test_index_processor.py
@@ -0,0 +1,15 @@
+from core.rag.index_processor.index_processor import IndexProcessor
+
+
+class TestIndexProcessor:
+ def test_format_preview_supports_qa_preview_shape(self) -> None:
+ preview = IndexProcessor().format_preview(
+ "qa_model",
+ {"qa_chunks": [{"question": "Q1", "answer": "A1"}]},
+ )
+
+ assert preview.chunk_structure == "qa_model"
+ assert preview.total_segments == 1
+ assert len(preview.qa_preview) == 1
+ assert preview.qa_preview[0].question == "Q1"
+ assert preview.qa_preview[0].answer == "A1"
diff --git a/api/tests/unit_tests/core/rag/indexing/test_indexing_runner.py b/api/tests/unit_tests/core/rag/indexing/test_indexing_runner.py
index 450e716636..641c5d9ba0 100644
--- a/api/tests/unit_tests/core/rag/indexing/test_indexing_runner.py
+++ b/api/tests/unit_tests/core/rag/indexing/test_indexing_runner.py
@@ -795,33 +795,21 @@ class TestIndexingRunnerRun:
doc = sample_dataset_documents[0]
# Mock database queries
- mock_dependencies["db"].session.get.return_value = doc
-
mock_dataset = Mock(spec=Dataset)
mock_dataset.id = doc.dataset_id
mock_dataset.tenant_id = doc.tenant_id
mock_dataset.indexing_technique = IndexTechniqueType.ECONOMY
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.first.return_value = mock_dataset
+
+ mock_current_user = MagicMock()
+ mock_current_user.set_tenant_id = MagicMock()
+
+ get_dispatch = {"Document": doc, "Dataset": mock_dataset, "Account": mock_current_user}
+ mock_dependencies["db"].session.get.side_effect = lambda model, id: get_dispatch.get(model.__name__)
mock_process_rule = Mock(spec=DatasetProcessRule)
mock_process_rule.to_dict.return_value = {"mode": "automatic", "rules": {}}
mock_dependencies["db"].session.scalar.return_value = mock_process_rule
- # Mock current_user (Account) for _transform
- mock_current_user = MagicMock()
- mock_current_user.set_tenant_id = MagicMock()
-
- # Setup db.session.query to return different results based on the model
- def mock_query_side_effect(model):
- mock_query_result = MagicMock()
- if model.__name__ == "Dataset":
- mock_query_result.filter_by.return_value.first.return_value = mock_dataset
- elif model.__name__ == "Account":
- mock_query_result.filter_by.return_value.first.return_value = mock_current_user
- return mock_query_result
-
- mock_dependencies["db"].session.query.side_effect = mock_query_side_effect
-
# Mock processor
mock_processor = MagicMock()
mock_dependencies["factory"].return_value.init_index_processor.return_value = mock_processor
@@ -891,10 +879,11 @@ class TestIndexingRunnerRun:
doc = sample_dataset_documents[0]
# Mock database
- mock_dependencies["db"].session.get.return_value = doc
-
mock_dataset = Mock(spec=Dataset)
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.first.return_value = mock_dataset
+ mock_dataset.tenant_id = doc.tenant_id
+
+ get_dispatch = {"Document": doc, "Dataset": mock_dataset}
+ mock_dependencies["db"].session.get.side_effect = lambda model, id: get_dispatch.get(model.__name__)
mock_process_rule = Mock(spec=DatasetProcessRule)
mock_process_rule.to_dict.return_value = {"mode": "automatic", "rules": {}}
@@ -917,11 +906,12 @@ class TestIndexingRunnerRun:
runner = IndexingRunner()
doc = sample_dataset_documents[0]
- # Mock database to raise ObjectDeletedError
- mock_dependencies["db"].session.get.return_value = doc
-
+ # Mock database
mock_dataset = Mock(spec=Dataset)
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.first.return_value = mock_dataset
+ mock_dataset.tenant_id = doc.tenant_id
+
+ get_dispatch = {"Document": doc, "Dataset": mock_dataset}
+ mock_dependencies["db"].session.get.side_effect = lambda model, id: get_dispatch.get(model.__name__)
mock_process_rule = Mock(spec=DatasetProcessRule)
mock_process_rule.to_dict.return_value = {"mode": "automatic", "rules": {}}
@@ -945,17 +935,21 @@ class TestIndexingRunnerRun:
docs = sample_dataset_documents
# Mock database
- def get_side_effect(model_class, doc_id):
- for doc in docs:
- if doc.id == doc_id:
- return doc
- return None
-
- mock_dependencies["db"].session.get.side_effect = get_side_effect
-
mock_dataset = Mock(spec=Dataset)
mock_dataset.indexing_technique = IndexTechniqueType.ECONOMY
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.first.return_value = mock_dataset
+ mock_current_user = MagicMock()
+ mock_current_user.set_tenant_id = MagicMock()
+
+ doc_map = {doc.id: doc for doc in docs}
+ model_dispatch = {"Dataset": mock_dataset, "Account": mock_current_user}
+
+ def get_side_effect(model_class, id):
+ name = model_class.__name__
+ if name == "Document":
+ return doc_map.get(id)
+ return model_dispatch.get(name)
+
+ mock_dependencies["db"].session.get.side_effect = get_side_effect
mock_process_rule = Mock(spec=DatasetProcessRule)
mock_process_rule.to_dict.return_value = {"mode": "automatic", "rules": {}}
@@ -1035,9 +1029,8 @@ class TestIndexingRunnerRetryLogic:
mock_document = Mock(spec=DatasetDocument)
mock_document.id = document_id
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.count.return_value = 0
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.first.return_value = mock_document
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.update.return_value = None
+ mock_dependencies["db"].session.scalar.return_value = 0
+ mock_dependencies["db"].session.get.return_value = mock_document
# Act
IndexingRunner._update_document_index_status(
@@ -1053,7 +1046,7 @@ class TestIndexingRunnerRetryLogic:
"""Test document status update when document is paused."""
# Arrange
document_id = str(uuid.uuid4())
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.count.return_value = 1
+ mock_dependencies["db"].session.scalar.return_value = 1
# Act & Assert
with pytest.raises(DocumentIsPausedError):
@@ -1063,8 +1056,8 @@ class TestIndexingRunnerRetryLogic:
"""Test document status update when document is deleted."""
# Arrange
document_id = str(uuid.uuid4())
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.count.return_value = 0
- mock_dependencies["db"].session.query.return_value.filter_by.return_value.first.return_value = None
+ mock_dependencies["db"].session.scalar.return_value = 0
+ mock_dependencies["db"].session.get.return_value = None
# Act & Assert
with pytest.raises(DocumentIsDeletedPausedError):
diff --git a/api/tests/unit_tests/core/test_provider_manager.py b/api/tests/unit_tests/core/test_provider_manager.py
index 259cb5fdd0..ee26172459 100644
--- a/api/tests/unit_tests/core/test_provider_manager.py
+++ b/api/tests/unit_tests/core/test_provider_manager.py
@@ -48,7 +48,7 @@ def test__to_model_settings(mocker: MockerFixture, mock_provider_entity):
tenant_id="tenant_id",
provider_name="openai",
model_name="gpt-4",
- model_type="text-generation",
+ model_type="llm",
enabled=True,
load_balancing_enabled=True,
)
@@ -61,7 +61,7 @@ def test__to_model_settings(mocker: MockerFixture, mock_provider_entity):
tenant_id="tenant_id",
provider_name="openai",
model_name="gpt-4",
- model_type="text-generation",
+ model_type="llm",
name="__inherit__",
encrypted_config=None,
enabled=True,
@@ -70,7 +70,7 @@ def test__to_model_settings(mocker: MockerFixture, mock_provider_entity):
tenant_id="tenant_id",
provider_name="openai",
model_name="gpt-4",
- model_type="text-generation",
+ model_type="llm",
name="first",
encrypted_config='{"openai_api_key": "fake_key"}',
enabled=True,
@@ -110,7 +110,7 @@ def test__to_model_settings_only_one_lb(mocker: MockerFixture, mock_provider_ent
tenant_id="tenant_id",
provider_name="openai",
model_name="gpt-4",
- model_type="text-generation",
+ model_type="llm",
enabled=True,
load_balancing_enabled=True,
)
@@ -121,7 +121,7 @@ def test__to_model_settings_only_one_lb(mocker: MockerFixture, mock_provider_ent
tenant_id="tenant_id",
provider_name="openai",
model_name="gpt-4",
- model_type="text-generation",
+ model_type="llm",
name="__inherit__",
encrypted_config=None,
enabled=True,
@@ -157,7 +157,7 @@ def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_ent
tenant_id="tenant_id",
provider_name="openai",
model_name="gpt-4",
- model_type="text-generation",
+ model_type="llm",
enabled=True,
load_balancing_enabled=False,
)
@@ -168,7 +168,7 @@ def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_ent
tenant_id="tenant_id",
provider_name="openai",
model_name="gpt-4",
- model_type="text-generation",
+ model_type="llm",
name="__inherit__",
encrypted_config=None,
enabled=True,
@@ -177,7 +177,7 @@ def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_ent
tenant_id="tenant_id",
provider_name="openai",
model_name="gpt-4",
- model_type="text-generation",
+ model_type="llm",
name="first",
encrypted_config='{"openai_api_key": "fake_key"}',
enabled=True,
@@ -270,7 +270,7 @@ def test_get_default_model_uses_injected_runtime_for_existing_default_record(moc
tenant_id="tenant-id",
provider_name="openai",
model_name="gpt-4",
- model_type=ModelType.LLM.to_origin_model_type(),
+ model_type=ModelType.LLM,
)
mock_session = Mock()
mock_session.scalar.return_value = existing_default_model
@@ -449,7 +449,7 @@ def test_update_default_model_record_updates_existing_record(mocker: MockerFixtu
tenant_id="tenant-id",
provider_name="anthropic",
model_name="claude-3-sonnet",
- model_type=ModelType.LLM.to_origin_model_type(),
+ model_type=ModelType.LLM,
)
mock_session = Mock()
mock_session.scalar.return_value = existing_default_model
@@ -487,7 +487,7 @@ def test_update_default_model_record_creates_record_with_origin_model_type(mocke
assert created_default_model.tenant_id == "tenant-id"
assert created_default_model.provider_name == "openai"
assert created_default_model.model_name == "gpt-4"
- assert created_default_model.model_type == ModelType.LLM.to_origin_model_type()
+ assert created_default_model.model_type == ModelType.LLM
mock_session.commit.assert_called_once()
diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py
index a215e9d350..7841bf05ad 100644
--- a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py
+++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py
@@ -1,4 +1,5 @@
import base64
+import logging
import uuid
from collections.abc import Sequence
from unittest import mock
@@ -1261,6 +1262,10 @@ def test_llm_node_image_file_to_markdown(llm_node: LLMNode):
class TestSaveMultimodalOutputAndConvertResultToMarkdown:
+ class _UnknownItem:
+ def __str__(self) -> str:
+ return ""
+
def test_str_content(self, llm_node_for_multimodal):
llm_node, mock_file_saver = llm_node_for_multimodal
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
@@ -1330,18 +1335,23 @@ class TestSaveMultimodalOutputAndConvertResultToMarkdown:
def test_unknown_content_type(self, llm_node_for_multimodal):
llm_node, mock_file_saver = llm_node_for_multimodal
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
- contents=frozenset(["hello world"]), file_saver=mock_file_saver, file_outputs=[]
+ contents=frozenset(("hello world",)), file_saver=mock_file_saver, file_outputs=[]
)
assert list(gen) == ["hello world"]
mock_file_saver.save_binary_string.assert_not_called()
mock_file_saver.save_remote_url.assert_not_called()
- def test_unknown_item_type(self, llm_node_for_multimodal):
+ def test_unknown_item_type(self, llm_node_for_multimodal, caplog):
llm_node, mock_file_saver = llm_node_for_multimodal
- gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
- contents=[frozenset(["hello world"])], file_saver=mock_file_saver, file_outputs=[]
- )
- assert list(gen) == ["frozenset({'hello world'})"]
+ unknown_item = self._UnknownItem()
+
+ with caplog.at_level(logging.WARNING, logger="graphon.nodes.llm.node"):
+ gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
+ contents=[unknown_item], file_saver=mock_file_saver, file_outputs=[]
+ )
+ assert list(gen) == [str(unknown_item)]
+
+ assert "unknown item type encountered" in caplog.text
mock_file_saver.save_binary_string.assert_not_called()
mock_file_saver.save_remote_url.assert_not_called()
diff --git a/api/tests/unit_tests/factories/test_variable_factory.py b/api/tests/unit_tests/factories/test_variable_factory.py
index 8d573b1154..a06c42507d 100644
--- a/api/tests/unit_tests/factories/test_variable_factory.py
+++ b/api/tests/unit_tests/factories/test_variable_factory.py
@@ -837,7 +837,7 @@ class TestBuildSegmentValueErrors:
self.ValueErrorTestCase(
name="frozenset_type",
description="frozenset (unsupported type)",
- test_value=frozenset([1, 2, 3]),
+ test_value=frozenset((1, 2, 3)),
),
self.ValueErrorTestCase(
name="memoryview_type",
diff --git a/api/tests/unit_tests/libs/test_oauth_clients.py b/api/tests/unit_tests/libs/test_oauth_clients.py
index ab468c8687..830284e697 100644
--- a/api/tests/unit_tests/libs/test_oauth_clients.py
+++ b/api/tests/unit_tests/libs/test_oauth_clients.py
@@ -68,7 +68,7 @@ class TestGitHubOAuth(BaseOAuthTest):
({}, None, True),
],
)
- @patch("httpx.post", autospec=True)
+ @patch("libs.oauth._http_client.post", autospec=True)
def test_should_retrieve_access_token(
self, mock_post, oauth, mock_response, response_data, expected_token, should_raise
):
@@ -109,7 +109,7 @@ class TestGitHubOAuth(BaseOAuthTest):
),
],
)
- @patch("httpx.get", autospec=True)
+ @patch("libs.oauth._http_client.get", autospec=True)
def test_should_retrieve_user_info_correctly(self, mock_get, oauth, user_data, email_data, expected_email):
user_response = MagicMock()
user_response.json.return_value = user_data
@@ -127,7 +127,7 @@ class TestGitHubOAuth(BaseOAuthTest):
# The profile email is absent/null, so /user/emails should be called
assert mock_get.call_count == 2
- @patch("httpx.get", autospec=True)
+ @patch("libs.oauth._http_client.get", autospec=True)
def test_should_skip_email_endpoint_when_profile_email_present(self, mock_get, oauth):
"""When the /user profile already contains an email, do not call /user/emails."""
user_response = MagicMock()
@@ -162,7 +162,7 @@ class TestGitHubOAuth(BaseOAuthTest):
),
],
)
- @patch("httpx.get", autospec=True)
+ @patch("libs.oauth._http_client.get", autospec=True)
def test_should_use_noreply_email_when_no_usable_email(self, mock_get, oauth, user_data, email_data):
user_response = MagicMock()
user_response.json.return_value = user_data
@@ -177,7 +177,7 @@ class TestGitHubOAuth(BaseOAuthTest):
assert user_info.id == str(user_data["id"])
assert user_info.email == "12345@users.noreply.github.com"
- @patch("httpx.get", autospec=True)
+ @patch("libs.oauth._http_client.get", autospec=True)
def test_should_use_noreply_email_when_email_endpoint_fails(self, mock_get, oauth):
user_response = MagicMock()
user_response.json.return_value = {"id": 12345, "login": "testuser", "name": "Test User"}
@@ -194,7 +194,7 @@ class TestGitHubOAuth(BaseOAuthTest):
assert user_info.id == "12345"
assert user_info.email == "12345@users.noreply.github.com"
- @patch("httpx.get", autospec=True)
+ @patch("libs.oauth._http_client.get", autospec=True)
def test_should_handle_network_errors(self, mock_get, oauth):
mock_get.side_effect = httpx.RequestError("Network error")
@@ -240,7 +240,7 @@ class TestGoogleOAuth(BaseOAuthTest):
({}, None, True),
],
)
- @patch("httpx.post", autospec=True)
+ @patch("libs.oauth._http_client.post", autospec=True)
def test_should_retrieve_access_token(
self, mock_post, oauth, oauth_config, mock_response, response_data, expected_token, should_raise
):
@@ -274,7 +274,7 @@ class TestGoogleOAuth(BaseOAuthTest):
({"sub": "123", "email": "test@example.com", "name": "Test User"}, ""), # Always returns empty string
],
)
- @patch("httpx.get", autospec=True)
+ @patch("libs.oauth._http_client.get", autospec=True)
def test_should_retrieve_user_info_correctly(self, mock_get, oauth, mock_response, user_data, expected_name):
mock_response.json.return_value = user_data
mock_get.return_value = mock_response
@@ -295,7 +295,7 @@ class TestGoogleOAuth(BaseOAuthTest):
httpx.TimeoutException,
],
)
- @patch("httpx.get", autospec=True)
+ @patch("libs.oauth._http_client.get", autospec=True)
def test_should_handle_http_errors(self, mock_get, oauth, exception_type):
mock_response = MagicMock()
mock_response.raise_for_status.side_effect = exception_type("Error")
diff --git a/api/tests/unit_tests/models/test_provider_models.py b/api/tests/unit_tests/models/test_provider_models.py
index f628e54a4d..d7b597e5fb 100644
--- a/api/tests/unit_tests/models/test_provider_models.py
+++ b/api/tests/unit_tests/models/test_provider_models.py
@@ -202,7 +202,7 @@ class TestProviderModel:
# Assert
assert provider.provider_type == ProviderType.CUSTOM
assert provider.is_valid is False
- assert provider.quota_type == ""
+ assert provider.quota_type is None
assert provider.quota_limit is None
assert provider.quota_used == 0
assert provider.credential_id is None
diff --git a/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py b/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py
index 10388a8880..52abfdd72e 100644
--- a/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py
+++ b/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py
@@ -4,13 +4,14 @@ import pytest
from oss2 import Auth
from extensions.storage.aliyun_oss_storage import AliyunOssStorage
-from tests.unit_tests.oss.__mock.aliyun_oss import setup_aliyun_oss_mock
from tests.unit_tests.oss.__mock.base import (
BaseStorageTest,
get_example_bucket,
get_example_folder,
)
+pytest_plugins = ("tests.unit_tests.oss.__mock.aliyun_oss",)
+
class TestAliyunOss(BaseStorageTest):
@pytest.fixture(autouse=True)
diff --git a/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py b/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py
index d54116555e..2802a2f1e3 100644
--- a/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py
+++ b/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py
@@ -8,7 +8,8 @@ from tests.unit_tests.oss.__mock.base import (
BaseStorageTest,
get_example_bucket,
)
-from tests.unit_tests.oss.__mock.tencent_cos import setup_tencent_cos_mock
+
+pytest_plugins = ("tests.unit_tests.oss.__mock.tencent_cos",)
class TestTencentCos(BaseStorageTest):
diff --git a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py
index a06623a69e..8adea88811 100644
--- a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py
+++ b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py
@@ -8,7 +8,8 @@ from tests.unit_tests.oss.__mock.base import (
BaseStorageTest,
get_example_bucket,
)
-from tests.unit_tests.oss.__mock.volcengine_tos import setup_volcengine_tos_mock
+
+pytest_plugins = ("tests.unit_tests.oss.__mock.volcengine_tos",)
class TestVolcengineTos(BaseStorageTest):
diff --git a/api/tests/unit_tests/services/auth/test_auth_type.py b/api/tests/unit_tests/services/auth/test_auth_type.py
index 94073f451e..fb67dabcc5 100644
--- a/api/tests/unit_tests/services/auth/test_auth_type.py
+++ b/api/tests/unit_tests/services/auth/test_auth_type.py
@@ -77,7 +77,6 @@ class TestAuthType:
def test_auth_type_immutability(self):
"""Test that enum values cannot be modified"""
- # In Python 3.11+, enum members are read-only
with pytest.raises(AttributeError):
AuthType.FIRECRAWL = "modified"
diff --git a/api/tests/unit_tests/services/auth/test_jina_auth.py b/api/tests/unit_tests/services/auth/test_jina_auth.py
index 67f252390d..2c34d46f1e 100644
--- a/api/tests/unit_tests/services/auth/test_jina_auth.py
+++ b/api/tests/unit_tests/services/auth/test_jina_auth.py
@@ -35,7 +35,7 @@ class TestJinaAuth:
JinaAuth(credentials)
assert str(exc_info.value) == "No API key provided"
- @patch("services.auth.jina.jina.httpx.post", autospec=True)
+ @patch("services.auth.jina.jina._http_client.post", autospec=True)
def test_should_validate_valid_credentials_successfully(self, mock_post):
"""Test successful credential validation"""
mock_response = MagicMock()
@@ -53,7 +53,7 @@ class TestJinaAuth:
json={"url": "https://example.com"},
)
- @patch("services.auth.jina.jina.httpx.post", autospec=True)
+ @patch("services.auth.jina.jina._http_client.post", autospec=True)
def test_should_handle_http_402_error(self, mock_post):
"""Test handling of 402 Payment Required error"""
mock_response = MagicMock()
@@ -68,7 +68,7 @@ class TestJinaAuth:
auth.validate_credentials()
assert str(exc_info.value) == "Failed to authorize. Status code: 402. Error: Payment required"
- @patch("services.auth.jina.jina.httpx.post", autospec=True)
+ @patch("services.auth.jina.jina._http_client.post", autospec=True)
def test_should_handle_http_409_error(self, mock_post):
"""Test handling of 409 Conflict error"""
mock_response = MagicMock()
@@ -83,7 +83,7 @@ class TestJinaAuth:
auth.validate_credentials()
assert str(exc_info.value) == "Failed to authorize. Status code: 409. Error: Conflict error"
- @patch("services.auth.jina.jina.httpx.post", autospec=True)
+ @patch("services.auth.jina.jina._http_client.post", autospec=True)
def test_should_handle_http_500_error(self, mock_post):
"""Test handling of 500 Internal Server Error"""
mock_response = MagicMock()
@@ -98,7 +98,7 @@ class TestJinaAuth:
auth.validate_credentials()
assert str(exc_info.value) == "Failed to authorize. Status code: 500. Error: Internal server error"
- @patch("services.auth.jina.jina.httpx.post", autospec=True)
+ @patch("services.auth.jina.jina._http_client.post", autospec=True)
def test_should_handle_unexpected_error_with_text_response(self, mock_post):
"""Test handling of unexpected errors with text response"""
mock_response = MagicMock()
@@ -114,7 +114,7 @@ class TestJinaAuth:
auth.validate_credentials()
assert str(exc_info.value) == "Failed to authorize. Status code: 403. Error: Forbidden"
- @patch("services.auth.jina.jina.httpx.post", autospec=True)
+ @patch("services.auth.jina.jina._http_client.post", autospec=True)
def test_should_handle_unexpected_error_without_text(self, mock_post):
"""Test handling of unexpected errors without text response"""
mock_response = MagicMock()
@@ -130,7 +130,7 @@ class TestJinaAuth:
auth.validate_credentials()
assert str(exc_info.value) == "Unexpected error occurred while trying to authorize. Status code: 404"
- @patch("services.auth.jina.jina.httpx.post", autospec=True)
+ @patch("services.auth.jina.jina._http_client.post", autospec=True)
def test_should_handle_network_errors(self, mock_post):
"""Test handling of network connection errors"""
mock_post.side_effect = httpx.ConnectError("Network error")
diff --git a/api/tests/unit_tests/services/auth/test_jina_auth_standalone_module.py b/api/tests/unit_tests/services/auth/test_jina_auth_standalone_module.py
index c2fcd71875..4b5a97bf3f 100644
--- a/api/tests/unit_tests/services/auth/test_jina_auth_standalone_module.py
+++ b/api/tests/unit_tests/services/auth/test_jina_auth_standalone_module.py
@@ -60,7 +60,7 @@ def test_prepare_headers_includes_bearer_api_key(jina_module: ModuleType) -> Non
def test_post_request_calls_httpx(jina_module: ModuleType, monkeypatch: pytest.MonkeyPatch) -> None:
auth = jina_module.JinaAuth(_credentials(api_key="k"))
post_mock = MagicMock(name="httpx.post")
- monkeypatch.setattr(jina_module.httpx, "post", post_mock)
+ monkeypatch.setattr(jina_module._http_client, "post", post_mock)
auth._post_request("https://r.jina.ai", {"url": "https://example.com"}, {"h": "v"})
post_mock.assert_called_once_with("https://r.jina.ai", headers={"h": "v"}, json={"url": "https://example.com"})
@@ -72,7 +72,7 @@ def test_validate_credentials_success(jina_module: ModuleType, monkeypatch: pyte
response = MagicMock()
response.status_code = 200
post_mock = MagicMock(return_value=response)
- monkeypatch.setattr(jina_module.httpx, "post", post_mock)
+ monkeypatch.setattr(jina_module._http_client, "post", post_mock)
assert auth.validate_credentials() is True
post_mock.assert_called_once_with(
@@ -90,7 +90,7 @@ def test_validate_credentials_non_200_raises_via_handle_error(
response = MagicMock()
response.status_code = 402
response.json.return_value = {"error": "Payment required"}
- monkeypatch.setattr(jina_module.httpx, "post", MagicMock(return_value=response))
+ monkeypatch.setattr(jina_module._http_client, "post", MagicMock(return_value=response))
with pytest.raises(Exception, match="Status code: 402.*Payment required"):
auth.validate_credentials()
@@ -151,7 +151,7 @@ def test_validate_credentials_propagates_network_errors(
jina_module: ModuleType, monkeypatch: pytest.MonkeyPatch
) -> None:
auth = jina_module.JinaAuth(_credentials(api_key="k"))
- monkeypatch.setattr(jina_module.httpx, "post", MagicMock(side_effect=httpx.ConnectError("boom")))
+ monkeypatch.setattr(jina_module._http_client, "post", MagicMock(side_effect=httpx.ConnectError("boom")))
with pytest.raises(httpx.ConnectError, match="boom"):
auth.validate_credentials()
diff --git a/api/tests/unit_tests/services/enterprise/test_enterprise_service.py b/api/tests/unit_tests/services/enterprise/test_enterprise_service.py
index 59c07bfb37..6ad6a490b0 100644
--- a/api/tests/unit_tests/services/enterprise/test_enterprise_service.py
+++ b/api/tests/unit_tests/services/enterprise/test_enterprise_service.py
@@ -5,6 +5,7 @@ Covers:
- License status caching (get_cached_license_status)
"""
+from datetime import datetime
from unittest.mock import patch
import pytest
@@ -15,9 +16,178 @@ from services.enterprise.enterprise_service import (
VALID_LICENSE_CACHE_TTL,
DefaultWorkspaceJoinResult,
EnterpriseService,
+ WebAppSettings,
+ WorkspacePermission,
try_join_default_workspace,
)
+MODULE = "services.enterprise.enterprise_service"
+
+
+class TestEnterpriseServiceInfo:
+ def test_get_info_delegates(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {"version": "1.0"}
+ result = EnterpriseService.get_info()
+
+ req.send_request.assert_called_once_with("GET", "/info")
+ assert result == {"version": "1.0"}
+
+ def test_get_workspace_info_delegates(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {"name": "ws"}
+ result = EnterpriseService.get_workspace_info("tenant-1")
+
+ req.send_request.assert_called_once_with("GET", "/workspace/tenant-1/info")
+ assert result == {"name": "ws"}
+
+
+class TestSsoSettingsLastUpdateTime:
+ def test_app_sso_parses_valid_timestamp(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = "2025-01-15T10:30:00+00:00"
+ result = EnterpriseService.get_app_sso_settings_last_update_time()
+
+ assert isinstance(result, datetime)
+ assert result.year == 2025
+
+ def test_app_sso_raises_on_empty(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = ""
+ with pytest.raises(ValueError, match="No data found"):
+ EnterpriseService.get_app_sso_settings_last_update_time()
+
+ def test_app_sso_raises_on_invalid_format(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = "not-a-date"
+ with pytest.raises(ValueError, match="Invalid date format"):
+ EnterpriseService.get_app_sso_settings_last_update_time()
+
+ def test_workspace_sso_parses_valid_timestamp(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = "2025-06-01T00:00:00+00:00"
+ result = EnterpriseService.get_workspace_sso_settings_last_update_time()
+
+ assert isinstance(result, datetime)
+
+ def test_workspace_sso_raises_on_empty(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = None
+ with pytest.raises(ValueError, match="No data found"):
+ EnterpriseService.get_workspace_sso_settings_last_update_time()
+
+
+class TestWorkspacePermissionService:
+ def test_raises_on_empty_workspace_id(self):
+ with pytest.raises(ValueError, match="workspace_id must be provided"):
+ EnterpriseService.WorkspacePermissionService.get_permission("")
+
+ def test_raises_on_missing_data(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = None
+ with pytest.raises(ValueError, match="No data found"):
+ EnterpriseService.WorkspacePermissionService.get_permission("ws-1")
+
+ def test_raises_on_missing_permission_key(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {"other": "data"}
+ with pytest.raises(ValueError, match="No data found"):
+ EnterpriseService.WorkspacePermissionService.get_permission("ws-1")
+
+ def test_returns_parsed_permission(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {
+ "permission": {
+ "workspaceId": "ws-1",
+ "allowMemberInvite": True,
+ "allowOwnerTransfer": False,
+ }
+ }
+ result = EnterpriseService.WorkspacePermissionService.get_permission("ws-1")
+
+ assert isinstance(result, WorkspacePermission)
+ assert result.workspace_id == "ws-1"
+ assert result.allow_member_invite is True
+ assert result.allow_owner_transfer is False
+
+
+class TestWebAppAuth:
+ def test_is_user_allowed_returns_result_field(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {"result": True}
+ assert EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp("u1", "a1") is True
+
+ def test_is_user_allowed_defaults_false(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {}
+ assert EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp("u1", "a1") is False
+
+ def test_batch_is_user_allowed_returns_empty_for_no_apps(self):
+ assert EnterpriseService.WebAppAuth.batch_is_user_allowed_to_access_webapps("u1", []) == {}
+
+ def test_batch_is_user_allowed_raises_on_empty_response(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = None
+ with pytest.raises(ValueError, match="No data found"):
+ EnterpriseService.WebAppAuth.batch_is_user_allowed_to_access_webapps("u1", ["a1"])
+
+ def test_get_app_access_mode_raises_on_empty_app_id(self):
+ with pytest.raises(ValueError, match="app_id must be provided"):
+ EnterpriseService.WebAppAuth.get_app_access_mode_by_id("")
+
+ def test_get_app_access_mode_returns_settings(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {"accessMode": "public"}
+ result = EnterpriseService.WebAppAuth.get_app_access_mode_by_id("a1")
+
+ assert isinstance(result, WebAppSettings)
+ assert result.access_mode == "public"
+
+ def test_batch_get_returns_empty_for_no_apps(self):
+ assert EnterpriseService.WebAppAuth.batch_get_app_access_mode_by_id([]) == {}
+
+ def test_batch_get_maps_access_modes(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {"accessModes": {"a1": "public", "a2": "private"}}
+ result = EnterpriseService.WebAppAuth.batch_get_app_access_mode_by_id(["a1", "a2"])
+
+ assert result["a1"].access_mode == "public"
+ assert result["a2"].access_mode == "private"
+
+ def test_batch_get_raises_on_invalid_format(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {"accessModes": "not-a-dict"}
+ with pytest.raises(ValueError, match="Invalid data format"):
+ EnterpriseService.WebAppAuth.batch_get_app_access_mode_by_id(["a1"])
+
+ def test_update_access_mode_raises_on_empty_app_id(self):
+ with pytest.raises(ValueError, match="app_id must be provided"):
+ EnterpriseService.WebAppAuth.update_app_access_mode("", "public")
+
+ def test_update_access_mode_raises_on_invalid_mode(self):
+ with pytest.raises(ValueError, match="access_mode must be"):
+ EnterpriseService.WebAppAuth.update_app_access_mode("a1", "invalid")
+
+ def test_update_access_mode_delegates_and_returns(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ req.send_request.return_value = {"result": True}
+ result = EnterpriseService.WebAppAuth.update_app_access_mode("a1", "public")
+
+ assert result is True
+ req.send_request.assert_called_once_with(
+ "POST", "/webapp/access-mode", json={"appId": "a1", "accessMode": "public"}
+ )
+
+ def test_cleanup_webapp_raises_on_empty_app_id(self):
+ with pytest.raises(ValueError, match="app_id must be provided"):
+ EnterpriseService.WebAppAuth.cleanup_webapp("")
+
+ def test_cleanup_webapp_delegates(self):
+ with patch(f"{MODULE}.EnterpriseRequest") as req:
+ EnterpriseService.WebAppAuth.cleanup_webapp("a1")
+
+ req.send_request.assert_called_once_with("DELETE", "/webapp/clean", params={"appId": "a1"})
+
class TestJoinDefaultWorkspace:
def test_join_default_workspace_success(self):
diff --git a/api/tests/unit_tests/services/enterprise/test_plugin_manager_service.py b/api/tests/unit_tests/services/enterprise/test_plugin_manager_service.py
index 6ee328ae2c..759d907934 100644
--- a/api/tests/unit_tests/services/enterprise/test_plugin_manager_service.py
+++ b/api/tests/unit_tests/services/enterprise/test_plugin_manager_service.py
@@ -7,14 +7,20 @@ This module covers the pre-uninstall plugin hook behavior:
from unittest.mock import patch
+import pytest
from httpx import HTTPStatusError
from configs import dify_config
from services.enterprise.plugin_manager_service import (
+ CheckCredentialPolicyComplianceRequest,
+ CredentialPolicyViolationError,
+ PluginCredentialType,
PluginManagerService,
PreUninstallPluginRequest,
)
+MODULE = "services.enterprise.plugin_manager_service"
+
class TestTryPreUninstallPlugin:
def test_try_pre_uninstall_plugin_success(self):
@@ -88,3 +94,46 @@ class TestTryPreUninstallPlugin:
timeout=dify_config.ENTERPRISE_REQUEST_TIMEOUT,
)
mock_logger.exception.assert_called_once()
+
+
+class TestCheckCredentialPolicyCompliance:
+ def _request(self, cred_type=PluginCredentialType.MODEL):
+ return CheckCredentialPolicyComplianceRequest(
+ dify_credential_id="cred-1", provider="openai", credential_type=cred_type
+ )
+
+ def test_passes_when_result_true(self):
+ with patch(f"{MODULE}.EnterprisePluginManagerRequest") as req:
+ req.send_request.return_value = {"result": True}
+ PluginManagerService.check_credential_policy_compliance(self._request())
+
+ req.send_request.assert_called_once()
+
+ def test_raises_violation_when_result_false(self):
+ with patch(f"{MODULE}.EnterprisePluginManagerRequest") as req:
+ req.send_request.return_value = {"result": False}
+ with pytest.raises(CredentialPolicyViolationError, match="Credentials not available"):
+ PluginManagerService.check_credential_policy_compliance(self._request())
+
+ def test_raises_violation_on_invalid_response_format(self):
+ with patch(f"{MODULE}.EnterprisePluginManagerRequest") as req:
+ req.send_request.return_value = "not-a-dict"
+ with pytest.raises(CredentialPolicyViolationError, match="error occurred"):
+ PluginManagerService.check_credential_policy_compliance(self._request())
+
+ def test_raises_violation_on_api_exception(self):
+ with patch(f"{MODULE}.EnterprisePluginManagerRequest") as req:
+ req.send_request.side_effect = ConnectionError("network fail")
+ with pytest.raises(CredentialPolicyViolationError, match="error occurred"):
+ PluginManagerService.check_credential_policy_compliance(self._request())
+
+ def test_model_dump_serializes_credential_type_as_number(self):
+ body = self._request(PluginCredentialType.TOOL)
+ data = body.model_dump()
+
+ assert data["credential_type"] == 1
+ assert data["dify_credential_id"] == "cred-1"
+
+ def test_model_credential_type_values(self):
+ assert PluginCredentialType.MODEL.to_number() == 0
+ assert PluginCredentialType.TOOL.to_number() == 1
diff --git a/api/tests/unit_tests/services/external_dataset_service.py b/api/tests/unit_tests/services/external_dataset_service.py
index a8ef35a0d0..70bd1c73b3 100644
--- a/api/tests/unit_tests/services/external_dataset_service.py
+++ b/api/tests/unit_tests/services/external_dataset_service.py
@@ -294,7 +294,7 @@ class TestExternalDatasetServiceCrudExternalKnowledgeApi:
api = Mock(spec=ExternalKnowledgeApis)
mock_db_session.query.return_value.filter_by.return_value.first.return_value = api
- result = ExternalDatasetService.get_external_knowledge_api("api-id")
+ result = ExternalDatasetService.get_external_knowledge_api("api-id", "tenant-id")
assert result is api
def test_get_external_knowledge_api_not_found_raises(self, mock_db_session: MagicMock):
@@ -305,7 +305,7 @@ class TestExternalDatasetServiceCrudExternalKnowledgeApi:
mock_db_session.query.return_value.filter_by.return_value.first.return_value = None
with pytest.raises(ValueError, match="api template not found"):
- ExternalDatasetService.get_external_knowledge_api("missing-id")
+ ExternalDatasetService.get_external_knowledge_api("missing-id", "tenant-id")
def test_update_external_knowledge_api_success_with_hidden_api_key(self, mock_db_session: MagicMock):
"""
diff --git a/api/tests/unit_tests/services/plugin/test_plugin_auto_upgrade_service.py b/api/tests/unit_tests/services/plugin/test_plugin_auto_upgrade_service.py
new file mode 100644
index 0000000000..edb50d09a6
--- /dev/null
+++ b/api/tests/unit_tests/services/plugin/test_plugin_auto_upgrade_service.py
@@ -0,0 +1,183 @@
+from unittest.mock import MagicMock, patch
+
+from models.account import TenantPluginAutoUpgradeStrategy
+
+MODULE = "services.plugin.plugin_auto_upgrade_service"
+
+
+def _patched_session():
+ """Patch Session(db.engine) to return a mock session as context manager."""
+ session = MagicMock()
+ session_cls = MagicMock()
+ session_cls.return_value.__enter__ = MagicMock(return_value=session)
+ session_cls.return_value.__exit__ = MagicMock(return_value=False)
+ patcher = patch(f"{MODULE}.Session", session_cls)
+ db_patcher = patch(f"{MODULE}.db")
+ return patcher, db_patcher, session
+
+
+class TestGetStrategy:
+ def test_returns_strategy_when_found(self):
+ p1, p2, session = _patched_session()
+ strategy = MagicMock()
+ session.query.return_value.where.return_value.first.return_value = strategy
+
+ with p1, p2:
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ result = PluginAutoUpgradeService.get_strategy("t1")
+
+ assert result is strategy
+
+ def test_returns_none_when_not_found(self):
+ p1, p2, session = _patched_session()
+ session.query.return_value.where.return_value.first.return_value = None
+
+ with p1, p2:
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ result = PluginAutoUpgradeService.get_strategy("t1")
+
+ assert result is None
+
+
+class TestChangeStrategy:
+ def test_creates_new_strategy(self):
+ p1, p2, session = _patched_session()
+ session.query.return_value.where.return_value.first.return_value = None
+
+ with p1, p2, patch(f"{MODULE}.TenantPluginAutoUpgradeStrategy") as strat_cls:
+ strat_cls.return_value = MagicMock()
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ result = PluginAutoUpgradeService.change_strategy(
+ "t1",
+ TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
+ 3,
+ TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
+ [],
+ [],
+ )
+
+ assert result is True
+ session.add.assert_called_once()
+ session.commit.assert_called_once()
+
+ def test_updates_existing_strategy(self):
+ p1, p2, session = _patched_session()
+ existing = MagicMock()
+ session.query.return_value.where.return_value.first.return_value = existing
+
+ with p1, p2:
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ result = PluginAutoUpgradeService.change_strategy(
+ "t1",
+ TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST,
+ 5,
+ TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL,
+ ["p1"],
+ ["p2"],
+ )
+
+ assert result is True
+ assert existing.strategy_setting == TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST
+ assert existing.upgrade_time_of_day == 5
+ assert existing.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL
+ assert existing.exclude_plugins == ["p1"]
+ assert existing.include_plugins == ["p2"]
+ session.commit.assert_called_once()
+
+
+class TestExcludePlugin:
+ def test_creates_default_strategy_when_none_exists(self):
+ p1, p2, session = _patched_session()
+ session.query.return_value.where.return_value.first.return_value = None
+
+ with (
+ p1,
+ p2,
+ patch(f"{MODULE}.TenantPluginAutoUpgradeStrategy") as strat_cls,
+ patch(f"{MODULE}.PluginAutoUpgradeService.change_strategy") as cs,
+ ):
+ strat_cls.StrategySetting.FIX_ONLY = "fix_only"
+ strat_cls.UpgradeMode.EXCLUDE = "exclude"
+ cs.return_value = True
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ result = PluginAutoUpgradeService.exclude_plugin("t1", "plugin-1")
+
+ assert result is True
+ cs.assert_called_once()
+
+ def test_appends_to_exclude_list_in_exclude_mode(self):
+ p1, p2, session = _patched_session()
+ existing = MagicMock()
+ existing.upgrade_mode = "exclude"
+ existing.exclude_plugins = ["p-existing"]
+ session.query.return_value.where.return_value.first.return_value = existing
+
+ with p1, p2, patch(f"{MODULE}.TenantPluginAutoUpgradeStrategy") as strat_cls:
+ strat_cls.UpgradeMode.EXCLUDE = "exclude"
+ strat_cls.UpgradeMode.PARTIAL = "partial"
+ strat_cls.UpgradeMode.ALL = "all"
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ result = PluginAutoUpgradeService.exclude_plugin("t1", "p-new")
+
+ assert result is True
+ assert existing.exclude_plugins == ["p-existing", "p-new"]
+ session.commit.assert_called_once()
+
+ def test_removes_from_include_list_in_partial_mode(self):
+ p1, p2, session = _patched_session()
+ existing = MagicMock()
+ existing.upgrade_mode = "partial"
+ existing.include_plugins = ["p1", "p2"]
+ session.query.return_value.where.return_value.first.return_value = existing
+
+ with p1, p2, patch(f"{MODULE}.TenantPluginAutoUpgradeStrategy") as strat_cls:
+ strat_cls.UpgradeMode.EXCLUDE = "exclude"
+ strat_cls.UpgradeMode.PARTIAL = "partial"
+ strat_cls.UpgradeMode.ALL = "all"
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ result = PluginAutoUpgradeService.exclude_plugin("t1", "p1")
+
+ assert result is True
+ assert existing.include_plugins == ["p2"]
+
+ def test_switches_to_exclude_mode_from_all(self):
+ p1, p2, session = _patched_session()
+ existing = MagicMock()
+ existing.upgrade_mode = "all"
+ session.query.return_value.where.return_value.first.return_value = existing
+
+ with p1, p2, patch(f"{MODULE}.TenantPluginAutoUpgradeStrategy") as strat_cls:
+ strat_cls.UpgradeMode.EXCLUDE = "exclude"
+ strat_cls.UpgradeMode.PARTIAL = "partial"
+ strat_cls.UpgradeMode.ALL = "all"
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ result = PluginAutoUpgradeService.exclude_plugin("t1", "p1")
+
+ assert result is True
+ assert existing.upgrade_mode == "exclude"
+ assert existing.exclude_plugins == ["p1"]
+
+ def test_no_duplicate_in_exclude_list(self):
+ p1, p2, session = _patched_session()
+ existing = MagicMock()
+ existing.upgrade_mode = "exclude"
+ existing.exclude_plugins = ["p1"]
+ session.query.return_value.where.return_value.first.return_value = existing
+
+ with p1, p2, patch(f"{MODULE}.TenantPluginAutoUpgradeStrategy") as strat_cls:
+ strat_cls.UpgradeMode.EXCLUDE = "exclude"
+ strat_cls.UpgradeMode.PARTIAL = "partial"
+ strat_cls.UpgradeMode.ALL = "all"
+ from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
+
+ PluginAutoUpgradeService.exclude_plugin("t1", "p1")
+
+ assert existing.exclude_plugins == ["p1"]
diff --git a/api/tests/unit_tests/services/plugin/test_plugin_permission_service.py b/api/tests/unit_tests/services/plugin/test_plugin_permission_service.py
new file mode 100644
index 0000000000..69091110db
--- /dev/null
+++ b/api/tests/unit_tests/services/plugin/test_plugin_permission_service.py
@@ -0,0 +1,75 @@
+from unittest.mock import MagicMock, patch
+
+from models.account import TenantPluginPermission
+
+MODULE = "services.plugin.plugin_permission_service"
+
+
+def _patched_session():
+ """Patch Session(db.engine) to return a mock session as context manager."""
+ session = MagicMock()
+ session_cls = MagicMock()
+ session_cls.return_value.__enter__ = MagicMock(return_value=session)
+ session_cls.return_value.__exit__ = MagicMock(return_value=False)
+ patcher = patch(f"{MODULE}.Session", session_cls)
+ db_patcher = patch(f"{MODULE}.db")
+ return patcher, db_patcher, session
+
+
+class TestGetPermission:
+ def test_returns_permission_when_found(self):
+ p1, p2, session = _patched_session()
+ permission = MagicMock()
+ session.query.return_value.where.return_value.first.return_value = permission
+
+ with p1, p2:
+ from services.plugin.plugin_permission_service import PluginPermissionService
+
+ result = PluginPermissionService.get_permission("t1")
+
+ assert result is permission
+
+ def test_returns_none_when_not_found(self):
+ p1, p2, session = _patched_session()
+ session.query.return_value.where.return_value.first.return_value = None
+
+ with p1, p2:
+ from services.plugin.plugin_permission_service import PluginPermissionService
+
+ result = PluginPermissionService.get_permission("t1")
+
+ assert result is None
+
+
+class TestChangePermission:
+ def test_creates_new_permission_when_not_exists(self):
+ p1, p2, session = _patched_session()
+ session.query.return_value.where.return_value.first.return_value = None
+
+ with p1, p2, patch(f"{MODULE}.TenantPluginPermission") as perm_cls:
+ perm_cls.return_value = MagicMock()
+ from services.plugin.plugin_permission_service import PluginPermissionService
+
+ result = PluginPermissionService.change_permission(
+ "t1", TenantPluginPermission.InstallPermission.EVERYONE, TenantPluginPermission.DebugPermission.EVERYONE
+ )
+
+ session.add.assert_called_once()
+ session.commit.assert_called_once()
+
+ def test_updates_existing_permission(self):
+ p1, p2, session = _patched_session()
+ existing = MagicMock()
+ session.query.return_value.where.return_value.first.return_value = existing
+
+ with p1, p2:
+ from services.plugin.plugin_permission_service import PluginPermissionService
+
+ result = PluginPermissionService.change_permission(
+ "t1", TenantPluginPermission.InstallPermission.ADMINS, TenantPluginPermission.DebugPermission.ADMINS
+ )
+
+ assert existing.install_permission == TenantPluginPermission.InstallPermission.ADMINS
+ assert existing.debug_permission == TenantPluginPermission.DebugPermission.ADMINS
+ session.commit.assert_called_once()
+ session.add.assert_not_called()
diff --git a/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_built_in_retrieval.py b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_built_in_retrieval.py
new file mode 100644
index 0000000000..1928958ea4
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_built_in_retrieval.py
@@ -0,0 +1,110 @@
+from services.rag_pipeline.pipeline_template.built_in.built_in_retrieval import BuiltInPipelineTemplateRetrieval
+from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType
+
+
+def test_get_type() -> None:
+ retrieval = BuiltInPipelineTemplateRetrieval()
+
+ assert retrieval.get_type() == PipelineTemplateType.BUILTIN
+
+
+def test_get_pipeline_templates(mocker) -> None:
+ mocker.patch.object(
+ BuiltInPipelineTemplateRetrieval,
+ "_get_builtin_data",
+ return_value={
+ "pipeline_templates": {
+ "en-US": {"pipeline_templates": [{"id": "tpl-1"}]},
+ "tpl-1": {"id": "tpl-1", "name": "Template 1"},
+ }
+ },
+ )
+ retrieval = BuiltInPipelineTemplateRetrieval()
+
+ templates = retrieval.get_pipeline_templates("en-US")
+
+ assert templates == {"pipeline_templates": [{"id": "tpl-1"}]}
+
+
+def test_get_pipeline_template_detail(mocker) -> None:
+ mocker.patch.object(
+ BuiltInPipelineTemplateRetrieval,
+ "_get_builtin_data",
+ return_value={
+ "pipeline_templates": {
+ "tpl-1": {"id": "tpl-1", "name": "Template 1"},
+ }
+ },
+ )
+ retrieval = BuiltInPipelineTemplateRetrieval()
+
+ detail = retrieval.get_pipeline_template_detail("tpl-1")
+
+ assert detail == {"id": "tpl-1", "name": "Template 1"}
+
+
+def test_get_pipeline_templates_missing_language_returns_empty_dict(mocker) -> None:
+ mocker.patch.object(
+ BuiltInPipelineTemplateRetrieval,
+ "_get_builtin_data",
+ return_value={"pipeline_templates": {}},
+ )
+ retrieval = BuiltInPipelineTemplateRetrieval()
+
+ result = retrieval.get_pipeline_templates("fr-FR")
+
+ assert result == {}
+
+
+def test_get_pipeline_template_detail_returns_none_for_unknown_id(mocker) -> None:
+ mocker.patch.object(
+ BuiltInPipelineTemplateRetrieval,
+ "_get_builtin_data",
+ return_value={"pipeline_templates": {"tpl-1": {"id": "tpl-1"}}},
+ )
+ retrieval = BuiltInPipelineTemplateRetrieval()
+
+ result = retrieval.get_pipeline_template_detail("nonexistent-id")
+
+ assert result is None
+
+
+def test_get_builtin_data_reads_from_file_and_caches(mocker) -> None:
+ import json
+
+ # Ensure no cached data
+ BuiltInPipelineTemplateRetrieval.builtin_data = None
+
+ mock_app = mocker.Mock()
+ mock_app.root_path = "/fake/root"
+
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.built_in.built_in_retrieval.current_app",
+ mock_app,
+ )
+
+ test_data = {"pipeline_templates": {"en-US": {"templates": []}}}
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.built_in.built_in_retrieval.Path.read_text",
+ return_value=json.dumps(test_data),
+ )
+
+ result = BuiltInPipelineTemplateRetrieval._get_builtin_data()
+
+ assert result == test_data
+ assert BuiltInPipelineTemplateRetrieval.builtin_data == test_data
+
+ # Reset class state
+ BuiltInPipelineTemplateRetrieval.builtin_data = None
+
+
+def test_get_builtin_data_returns_cache_on_second_call(mocker) -> None:
+ cached_data = {"pipeline_templates": {"en-US": {}}}
+ BuiltInPipelineTemplateRetrieval.builtin_data = cached_data
+
+ result = BuiltInPipelineTemplateRetrieval._get_builtin_data()
+
+ assert result == cached_data
+
+ # Reset class state
+ BuiltInPipelineTemplateRetrieval.builtin_data = None
diff --git a/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_customized_retrieval.py b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_customized_retrieval.py
new file mode 100644
index 0000000000..647a2f0bfc
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_customized_retrieval.py
@@ -0,0 +1,89 @@
+from types import SimpleNamespace
+
+from services.rag_pipeline.pipeline_template.customized.customized_retrieval import CustomizedPipelineTemplateRetrieval
+from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType
+
+
+def test_get_pipeline_templates(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.customized.customized_retrieval.current_account_with_tenant",
+ return_value=("account-id", "tenant-id"),
+ )
+ customized_template = SimpleNamespace(
+ id="tpl-1",
+ name="Custom Template",
+ description="desc",
+ icon={"background": "#fff"},
+ position=2,
+ chunk_structure="parent-child",
+ )
+ scalars_mock = mocker.Mock()
+ scalars_mock.all.return_value = [customized_template]
+ session_mock = mocker.Mock()
+ session_mock.scalars.return_value = scalars_mock
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.customized.customized_retrieval.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+ retrieval = CustomizedPipelineTemplateRetrieval()
+
+ result = retrieval.get_pipeline_templates("en-US")
+
+ assert retrieval.get_type() == PipelineTemplateType.CUSTOMIZED
+ assert result == {
+ "pipeline_templates": [
+ {
+ "id": "tpl-1",
+ "name": "Custom Template",
+ "description": "desc",
+ "icon": {"background": "#fff"},
+ "position": 2,
+ "chunk_structure": "parent-child",
+ }
+ ]
+ }
+
+
+def test_get_pipeline_template_detail_returns_detail(mocker) -> None:
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = SimpleNamespace(
+ id="tpl-1",
+ name="Custom Template",
+ icon={"background": "#fff"},
+ description="desc",
+ chunk_structure="parent-child",
+ yaml_content="workflow:\n graph:\n edges: []",
+ created_user_name="creator",
+ )
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.customized.customized_retrieval.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+ retrieval = CustomizedPipelineTemplateRetrieval()
+
+ detail = retrieval.get_pipeline_template_detail("tpl-1")
+
+ assert detail == {
+ "id": "tpl-1",
+ "name": "Custom Template",
+ "icon_info": {"background": "#fff"},
+ "description": "desc",
+ "chunk_structure": "parent-child",
+ "export_data": "workflow:\n graph:\n edges: []",
+ "graph": {"edges": []},
+ "created_by": "creator",
+ }
+
+
+def test_get_pipeline_template_detail_returns_none_when_not_found(mocker) -> None:
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = None
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.customized.customized_retrieval.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+ retrieval = CustomizedPipelineTemplateRetrieval()
+
+ result = retrieval.get_pipeline_template_detail("missing")
+
+ assert result is None
diff --git a/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_database_retrieval.py b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_database_retrieval.py
new file mode 100644
index 0000000000..0175f66808
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_database_retrieval.py
@@ -0,0 +1,87 @@
+from types import SimpleNamespace
+
+from services.rag_pipeline.pipeline_template.database.database_retrieval import DatabasePipelineTemplateRetrieval
+from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType
+
+
+def test_get_pipeline_templates(mocker) -> None:
+ built_in_template = SimpleNamespace(
+ id="tpl-1",
+ name="Template 1",
+ description="desc",
+ icon={"background": "#fff"},
+ copyright="copyright",
+ privacy_policy="https://example.com/privacy",
+ position=1,
+ chunk_structure="general",
+ )
+ scalars_mock = mocker.Mock()
+ scalars_mock.all.return_value = [built_in_template]
+ session_mock = mocker.Mock()
+ session_mock.scalars.return_value = scalars_mock
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.database.database_retrieval.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+ retrieval = DatabasePipelineTemplateRetrieval()
+
+ result = retrieval.get_pipeline_templates("en-US")
+
+ assert retrieval.get_type() == PipelineTemplateType.DATABASE
+ assert result == {
+ "pipeline_templates": [
+ {
+ "id": "tpl-1",
+ "name": "Template 1",
+ "description": "desc",
+ "icon": {"background": "#fff"},
+ "copyright": "copyright",
+ "privacy_policy": "https://example.com/privacy",
+ "position": 1,
+ "chunk_structure": "general",
+ }
+ ]
+ }
+
+
+def test_get_pipeline_template_detail_returns_detail(mocker) -> None:
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = SimpleNamespace(
+ id="tpl-1",
+ name="Template 1",
+ icon={"background": "#fff"},
+ description="desc",
+ chunk_structure="general",
+ yaml_content="workflow:\n graph:\n nodes: []",
+ )
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.database.database_retrieval.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+ retrieval = DatabasePipelineTemplateRetrieval()
+
+ detail = retrieval.get_pipeline_template_detail("tpl-1")
+
+ assert detail == {
+ "id": "tpl-1",
+ "name": "Template 1",
+ "icon_info": {"background": "#fff"},
+ "description": "desc",
+ "chunk_structure": "general",
+ "export_data": "workflow:\n graph:\n nodes: []",
+ "graph": {"nodes": []},
+ }
+
+
+def test_get_pipeline_template_detail_returns_none_when_not_found(mocker) -> None:
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = None
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.database.database_retrieval.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+ retrieval = DatabasePipelineTemplateRetrieval()
+
+ result = retrieval.get_pipeline_template_detail("missing")
+
+ assert result is None
diff --git a/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_package_imports.py b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_package_imports.py
new file mode 100644
index 0000000000..a8b545508f
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_package_imports.py
@@ -0,0 +1,19 @@
+import importlib
+
+import pytest
+
+
+@pytest.mark.parametrize(
+ "module_name",
+ [
+ "services.rag_pipeline.pipeline_template",
+ "services.rag_pipeline.pipeline_template.built_in",
+ "services.rag_pipeline.pipeline_template.customized",
+ "services.rag_pipeline.pipeline_template.database",
+ "services.rag_pipeline.pipeline_template.remote",
+ ],
+)
+def test_package_imports(module_name: str) -> None:
+ module = importlib.import_module(module_name)
+
+ assert module is not None
diff --git a/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_base.py b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_base.py
new file mode 100644
index 0000000000..304ee8faa3
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_base.py
@@ -0,0 +1,43 @@
+import pytest
+
+from services.rag_pipeline.pipeline_template.pipeline_template_base import PipelineTemplateRetrievalBase
+
+
+class DummyRetrieval(PipelineTemplateRetrievalBase):
+ def get_pipeline_templates(self, language: str) -> dict:
+ return {"language": language}
+
+ def get_pipeline_template_detail(self, template_id: str) -> dict | None:
+ return {"id": template_id}
+
+ def get_type(self) -> str:
+ return "dummy"
+
+
+class MissingTypeRetrieval(PipelineTemplateRetrievalBase):
+ def get_pipeline_templates(self, language: str) -> dict:
+ return {"language": language}
+
+ def get_pipeline_template_detail(self, template_id: str) -> dict | None:
+ return {"id": template_id}
+
+
+def test_pipeline_template_retrieval_base_concrete_implementation() -> None:
+ retrieval = DummyRetrieval()
+
+ assert retrieval.get_pipeline_templates("en-US") == {"language": "en-US"}
+ assert retrieval.get_pipeline_template_detail("tpl-1") == {"id": "tpl-1"}
+ assert retrieval.get_type() == "dummy"
+
+
+def test_pipeline_template_retrieval_base_requires_abstract_methods() -> None:
+ assert "get_type" in MissingTypeRetrieval.__abstractmethods__
+
+
+def test_pipeline_template_retrieval_base_default_methods_raise() -> None:
+ with pytest.raises(NotImplementedError):
+ PipelineTemplateRetrievalBase.get_pipeline_templates(DummyRetrieval(), "en-US")
+ with pytest.raises(NotImplementedError):
+ PipelineTemplateRetrievalBase.get_pipeline_template_detail(DummyRetrieval(), "tpl-1")
+ with pytest.raises(NotImplementedError):
+ PipelineTemplateRetrievalBase.get_type(DummyRetrieval())
diff --git a/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_factory.py b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_factory.py
new file mode 100644
index 0000000000..d8178490e9
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_factory.py
@@ -0,0 +1,34 @@
+import pytest
+
+from services.rag_pipeline.pipeline_template.built_in.built_in_retrieval import BuiltInPipelineTemplateRetrieval
+from services.rag_pipeline.pipeline_template.customized.customized_retrieval import CustomizedPipelineTemplateRetrieval
+from services.rag_pipeline.pipeline_template.database.database_retrieval import DatabasePipelineTemplateRetrieval
+from services.rag_pipeline.pipeline_template.pipeline_template_factory import PipelineTemplateRetrievalFactory
+from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType
+from services.rag_pipeline.pipeline_template.remote.remote_retrieval import RemotePipelineTemplateRetrieval
+
+
+@pytest.mark.parametrize(
+ ("mode", "expected_cls"),
+ [
+ (PipelineTemplateType.REMOTE, RemotePipelineTemplateRetrieval),
+ (PipelineTemplateType.CUSTOMIZED, CustomizedPipelineTemplateRetrieval),
+ (PipelineTemplateType.DATABASE, DatabasePipelineTemplateRetrieval),
+ (PipelineTemplateType.BUILTIN, BuiltInPipelineTemplateRetrieval),
+ ],
+)
+def test_get_pipeline_template_factory(mode: str, expected_cls: type) -> None:
+ result = PipelineTemplateRetrievalFactory.get_pipeline_template_factory(mode)
+
+ assert result is expected_cls
+
+
+def test_get_pipeline_template_factory_invalid_mode() -> None:
+ with pytest.raises(ValueError):
+ PipelineTemplateRetrievalFactory.get_pipeline_template_factory("invalid")
+
+
+def test_get_built_in_pipeline_template_retrieval() -> None:
+ result = PipelineTemplateRetrievalFactory.get_built_in_pipeline_template_retrieval()
+
+ assert result is BuiltInPipelineTemplateRetrieval
diff --git a/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_type.py b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_type.py
new file mode 100644
index 0000000000..738ab6a5e7
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_pipeline_template_type.py
@@ -0,0 +1,8 @@
+from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType
+
+
+def test_pipeline_template_type_values() -> None:
+ assert PipelineTemplateType.REMOTE == "remote"
+ assert PipelineTemplateType.DATABASE == "database"
+ assert PipelineTemplateType.CUSTOMIZED == "customized"
+ assert PipelineTemplateType.BUILTIN == "builtin"
diff --git a/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_remote_retrieval.py b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_remote_retrieval.py
new file mode 100644
index 0000000000..10b5bc7cf6
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/pipeline_template/test_remote_retrieval.py
@@ -0,0 +1,98 @@
+import pytest
+
+from services.rag_pipeline.pipeline_template.database.database_retrieval import DatabasePipelineTemplateRetrieval
+from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType
+from services.rag_pipeline.pipeline_template.remote.remote_retrieval import RemotePipelineTemplateRetrieval
+
+
+def test_get_pipeline_templates_fallbacks_to_database_on_error(mocker) -> None:
+ fetch_mock = mocker.patch.object(
+ RemotePipelineTemplateRetrieval,
+ "fetch_pipeline_templates_from_dify_official",
+ side_effect=RuntimeError("boom"),
+ )
+ fallback_mock = mocker.patch.object(
+ DatabasePipelineTemplateRetrieval,
+ "fetch_pipeline_templates_from_db",
+ return_value={"pipeline_templates": [{"id": "db-1"}]},
+ )
+ retrieval = RemotePipelineTemplateRetrieval()
+
+ result = retrieval.get_pipeline_templates("en-US")
+
+ assert retrieval.get_type() == PipelineTemplateType.REMOTE
+ assert result == {"pipeline_templates": [{"id": "db-1"}]}
+ fetch_mock.assert_called_once_with("en-US")
+ fallback_mock.assert_called_once_with("en-US")
+
+
+def test_get_pipeline_template_detail_fallbacks_to_database_on_error(mocker) -> None:
+ fetch_mock = mocker.patch.object(
+ RemotePipelineTemplateRetrieval,
+ "fetch_pipeline_template_detail_from_dify_official",
+ side_effect=RuntimeError("boom"),
+ )
+ fallback_mock = mocker.patch.object(
+ DatabasePipelineTemplateRetrieval,
+ "fetch_pipeline_template_detail_from_db",
+ return_value={"id": "db-1"},
+ )
+ retrieval = RemotePipelineTemplateRetrieval()
+
+ result = retrieval.get_pipeline_template_detail("tpl-1")
+
+ assert result == {"id": "db-1"}
+ fetch_mock.assert_called_once_with("tpl-1")
+ fallback_mock.assert_called_once_with("tpl-1")
+
+
+def test_fetch_pipeline_templates_from_dify_official(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.remote.remote_retrieval"
+ ".dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN",
+ "https://example.com",
+ )
+
+ success_response = mocker.Mock(status_code=200)
+ success_response.json.return_value = {"pipeline_templates": [{"id": "remote-1"}]}
+
+ failed_response = mocker.Mock(status_code=500)
+
+ http_get_mock = mocker.patch(
+ "services.rag_pipeline.pipeline_template.remote.remote_retrieval.httpx.get",
+ side_effect=[success_response, failed_response],
+ )
+
+ success_result = RemotePipelineTemplateRetrieval.fetch_pipeline_templates_from_dify_official("en-US")
+
+ with pytest.raises(ValueError):
+ RemotePipelineTemplateRetrieval.fetch_pipeline_templates_from_dify_official("en-US")
+
+ assert success_result == {"pipeline_templates": [{"id": "remote-1"}]}
+ assert http_get_mock.call_count == 2
+
+
+def test_fetch_pipeline_template_detail_from_dify_official(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.pipeline_template.remote.remote_retrieval"
+ ".dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN",
+ "https://example.com",
+ )
+
+ success_response = mocker.Mock(status_code=200)
+ success_response.json.return_value = {"id": "remote-1", "name": "Remote Template"}
+
+ failed_response = mocker.Mock(status_code=404)
+ failed_response.text = "Not Found"
+
+ http_get_mock = mocker.patch(
+ "services.rag_pipeline.pipeline_template.remote.remote_retrieval.httpx.get",
+ side_effect=[success_response, failed_response],
+ )
+
+ success_result = RemotePipelineTemplateRetrieval.fetch_pipeline_template_detail_from_dify_official("remote-1")
+ with pytest.raises(ValueError):
+ RemotePipelineTemplateRetrieval.fetch_pipeline_template_detail_from_dify_official("missing")
+
+ assert success_result == {"id": "remote-1", "name": "Remote Template"}
+ assert http_get_mock.call_count == 2
diff --git a/api/tests/unit_tests/services/rag_pipeline/test_pipeline_generate_service.py b/api/tests/unit_tests/services/rag_pipeline/test_pipeline_generate_service.py
new file mode 100644
index 0000000000..82a5598b13
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/test_pipeline_generate_service.py
@@ -0,0 +1,155 @@
+from types import SimpleNamespace
+from typing import cast
+
+import pytest
+
+from core.app.entities.app_invoke_entities import InvokeFrom
+from models.dataset import Pipeline
+from models.model import Account, App, EndUser
+from services.rag_pipeline.pipeline_generate_service import PipelineGenerateService
+
+
+def test_get_max_active_requests_uses_smallest_non_zero_limit(mocker) -> None:
+ mocker.patch("services.rag_pipeline.pipeline_generate_service.dify_config.APP_DEFAULT_ACTIVE_REQUESTS", 5)
+ mocker.patch("services.rag_pipeline.pipeline_generate_service.dify_config.APP_MAX_ACTIVE_REQUESTS", 3)
+
+ app_model = cast(App, SimpleNamespace(max_active_requests=10))
+
+ result = PipelineGenerateService._get_max_active_requests(app_model)
+
+ assert result == 3
+
+
+def test_get_max_active_requests_returns_zero_when_all_unlimited(mocker) -> None:
+ mocker.patch("services.rag_pipeline.pipeline_generate_service.dify_config.APP_DEFAULT_ACTIVE_REQUESTS", 0)
+ mocker.patch("services.rag_pipeline.pipeline_generate_service.dify_config.APP_MAX_ACTIVE_REQUESTS", 0)
+
+ app_model = cast(App, SimpleNamespace(max_active_requests=0))
+
+ result = PipelineGenerateService._get_max_active_requests(app_model)
+
+ assert result == 0
+
+
+@pytest.mark.parametrize(
+ ("invoke_from", "workflow", "expected_error"),
+ [
+ (InvokeFrom.DEBUGGER, None, "Workflow not initialized"),
+ (InvokeFrom.WEB_APP, None, "Workflow not published"),
+ (InvokeFrom.DEBUGGER, SimpleNamespace(id="wf-1"), None),
+ ],
+)
+def test_get_workflow(mocker, invoke_from, workflow, expected_error) -> None:
+ rag_pipeline_service_cls = mocker.patch("services.rag_pipeline.pipeline_generate_service.RagPipelineService")
+ rag_pipeline_service = rag_pipeline_service_cls.return_value
+ rag_pipeline_service.get_draft_workflow.return_value = workflow
+ rag_pipeline_service.get_published_workflow.return_value = workflow
+
+ pipeline = cast(Pipeline, SimpleNamespace(id="pipeline-1"))
+
+ if expected_error:
+ with pytest.raises(ValueError, match=expected_error):
+ PipelineGenerateService._get_workflow(pipeline, invoke_from)
+ else:
+ result = PipelineGenerateService._get_workflow(pipeline, invoke_from)
+ assert result == workflow
+
+
+def test_generate_updates_document_status_and_returns_event_stream(mocker) -> None:
+ pipeline = cast(Pipeline, SimpleNamespace(id="pipeline-1"))
+ user = cast(Account | EndUser, SimpleNamespace(id="user-1"))
+ args = {"original_document_id": "doc-1", "query": "hello"}
+
+ mocker.patch.object(PipelineGenerateService, "_get_workflow", return_value=SimpleNamespace(id="wf-1"))
+ update_status_mock = mocker.patch.object(PipelineGenerateService, "update_document_status")
+
+ generator_cls = mocker.patch("services.rag_pipeline.pipeline_generate_service.PipelineGenerator")
+ generator_instance = generator_cls.return_value
+ generator_instance.generate.return_value = "raw-events"
+ generator_cls.convert_to_event_stream.return_value = "stream-events"
+
+ result = PipelineGenerateService.generate(
+ pipeline=pipeline,
+ user=user,
+ args=args,
+ invoke_from=InvokeFrom.WEB_APP,
+ streaming=True,
+ )
+
+ assert result == "stream-events"
+ update_status_mock.assert_called_once_with("doc-1")
+
+
+def test_update_document_status_updates_existing_document(mocker) -> None:
+ document = SimpleNamespace(indexing_status="completed")
+
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = document
+ add_mock = session_mock.add
+ commit_mock = session_mock.commit
+ mocker.patch(
+ "services.rag_pipeline.pipeline_generate_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ PipelineGenerateService.update_document_status("doc-1")
+
+ assert document.indexing_status == "waiting"
+ add_mock.assert_called_once_with(document)
+ commit_mock.assert_called_once()
+
+
+def test_update_document_status_skips_when_document_missing(mocker) -> None:
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = None
+ add_mock = session_mock.add
+ commit_mock = session_mock.commit
+ mocker.patch(
+ "services.rag_pipeline.pipeline_generate_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ PipelineGenerateService.update_document_status("missing")
+
+ add_mock.assert_not_called()
+ commit_mock.assert_not_called()
+
+
+# --- generate_single_iteration ---
+
+
+def test_generate_single_iteration_delegates(mocker) -> None:
+ mocker.patch.object(PipelineGenerateService, "_get_workflow", return_value=SimpleNamespace(id="wf-1"))
+
+ generator_cls = mocker.patch("services.rag_pipeline.pipeline_generate_service.PipelineGenerator")
+ generator_instance = generator_cls.return_value
+ generator_instance.single_iteration_generate.return_value = "raw-iter"
+ generator_cls.convert_to_event_stream.return_value = "stream-iter"
+
+ pipeline = cast(Pipeline, SimpleNamespace(id="p1"))
+ user = cast(Account, SimpleNamespace(id="u1"))
+
+ result = PipelineGenerateService.generate_single_iteration(pipeline, user, "node-1", {"key": "val"})
+
+ assert result == "stream-iter"
+ generator_instance.single_iteration_generate.assert_called_once()
+
+
+# --- generate_single_loop ---
+
+
+def test_generate_single_loop_delegates(mocker) -> None:
+ mocker.patch.object(PipelineGenerateService, "_get_workflow", return_value=SimpleNamespace(id="wf-1"))
+
+ generator_cls = mocker.patch("services.rag_pipeline.pipeline_generate_service.PipelineGenerator")
+ generator_instance = generator_cls.return_value
+ generator_instance.single_loop_generate.return_value = "raw-loop"
+ generator_cls.convert_to_event_stream.return_value = "stream-loop"
+
+ pipeline = cast(Pipeline, SimpleNamespace(id="p1"))
+ user = cast(Account, SimpleNamespace(id="u1"))
+
+ result = PipelineGenerateService.generate_single_loop(pipeline, user, "node-1", {"key": "val"})
+
+ assert result == "stream-loop"
+ generator_instance.single_loop_generate.assert_called_once()
diff --git a/api/tests/unit_tests/services/rag_pipeline/test_pipeline_service_api_entities.py b/api/tests/unit_tests/services/rag_pipeline/test_pipeline_service_api_entities.py
new file mode 100644
index 0000000000..30dda6127a
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/test_pipeline_service_api_entities.py
@@ -0,0 +1,34 @@
+import pytest
+from pydantic import ValidationError
+
+from services.rag_pipeline.entity.pipeline_service_api_entities import (
+ DatasourceNodeRunApiEntity,
+ PipelineRunApiEntity,
+)
+
+
+def test_datasource_node_run_api_entity_valid_payload() -> None:
+ entity = DatasourceNodeRunApiEntity(
+ pipeline_id="pipeline-1",
+ node_id="node-1",
+ inputs={"q": "hello"},
+ datasource_type="local_file",
+ credential_id="cred-1",
+ is_published=True,
+ )
+
+ assert entity.pipeline_id == "pipeline-1"
+ assert entity.credential_id == "cred-1"
+
+
+def test_pipeline_run_api_entity_requires_start_node_id() -> None:
+ with pytest.raises(ValidationError):
+ PipelineRunApiEntity.model_validate(
+ {
+ "inputs": {"q": "hello"},
+ "datasource_type": "local_file",
+ "datasource_info_list": [{"id": "ds-1"}],
+ "is_published": True,
+ "response_mode": "streaming",
+ }
+ )
diff --git a/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_dsl_service.py b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_dsl_service.py
new file mode 100644
index 0000000000..f4fdac5f9f
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_dsl_service.py
@@ -0,0 +1,1325 @@
+from types import SimpleNamespace
+from typing import cast
+from unittest.mock import MagicMock, Mock
+
+import pytest
+import yaml
+from graphon.enums import BuiltinNodeTypes
+from sqlalchemy.orm import Session
+
+from core.workflow.nodes.knowledge_index import KNOWLEDGE_INDEX_NODE_TYPE
+from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, RagPipelineDatasetCreateEntity
+from services.rag_pipeline.rag_pipeline_dsl_service import (
+ ImportStatus,
+ RagPipelineDslService,
+ _check_version_compatibility,
+)
+
+
+@pytest.mark.parametrize(
+ ("imported_version", "expected_status"),
+ [
+ ("invalid", ImportStatus.FAILED),
+ ("1.0.0", ImportStatus.PENDING),
+ ("0.0.9", ImportStatus.COMPLETED_WITH_WARNINGS),
+ ("0.1.0", ImportStatus.COMPLETED),
+ ],
+)
+def test_check_version_compatibility(imported_version: str, expected_status: ImportStatus) -> None:
+ assert _check_version_compatibility(imported_version) == expected_status
+
+
+def test_encrypt_decrypt_dataset_id_roundtrip() -> None:
+ service = RagPipelineDslService(session=Mock())
+
+ encrypted = service.encrypt_dataset_id("dataset-1", "tenant-1")
+ decrypted = service.decrypt_dataset_id(encrypted, "tenant-1")
+
+ assert decrypted == "dataset-1"
+
+
+def test_decrypt_dataset_id_returns_none_for_invalid_payload() -> None:
+ service = RagPipelineDslService(session=Mock())
+
+ result = service.decrypt_dataset_id("not-base64", "tenant-1")
+
+ assert result is None
+
+
+def test_get_leaked_dependencies_returns_empty_list_for_empty_input() -> None:
+ result = RagPipelineDslService.get_leaked_dependencies("tenant-1", [])
+
+ assert result == []
+
+
+def test_get_leaked_dependencies_delegates_to_analysis_service(mocker) -> None:
+ expected = [Mock()]
+ get_leaked_mock = mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.get_leaked_dependencies",
+ return_value=expected,
+ )
+
+ dependency = Mock()
+ result = RagPipelineDslService.get_leaked_dependencies("tenant-1", [dependency])
+
+ assert result == expected
+ get_leaked_mock.assert_called_once_with(tenant_id="tenant-1", dependencies=[dependency])
+
+
+# --- check_dependencies ---
+
+
+def test_check_dependencies_returns_empty_when_no_redis_data(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.redis_client.get",
+ return_value=None,
+ )
+ service = RagPipelineDslService(session=Mock())
+ pipeline = Mock(id="p1", tenant_id="t1")
+
+ result = service.check_dependencies(pipeline=pipeline)
+
+ assert result.leaked_dependencies == []
+
+
+def test_check_dependencies_returns_leaked_deps_from_redis(mocker) -> None:
+ from core.plugin.entities.plugin import PluginDependency
+ from services.rag_pipeline.rag_pipeline_dsl_service import CheckDependenciesPendingData
+
+ dep = PluginDependency(
+ type=PluginDependency.Type.Marketplace,
+ value=PluginDependency.Marketplace(marketplace_plugin_unique_identifier="test/plugin:0.1.0"),
+ )
+ pending_data = CheckDependenciesPendingData(
+ dependencies=[dep],
+ pipeline_id="p1",
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.redis_client.get",
+ return_value=pending_data.model_dump_json(),
+ )
+ leaked = [dep]
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.get_leaked_dependencies",
+ return_value=leaked,
+ )
+ service = RagPipelineDslService(session=Mock())
+ pipeline = Mock(id="p1", tenant_id="t1")
+
+ result = service.check_dependencies(pipeline=pipeline)
+
+ assert result.leaked_dependencies == leaked
+
+
+# --- _extract_dependencies_from_model_config ---
+
+
+def test_extract_dependencies_from_model_config_extracts_model(mocker) -> None:
+ analyze_mock = mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_model_provider_dependency",
+ return_value="langgenius/openai",
+ )
+ config = {"model": {"provider": "openai"}}
+
+ result = RagPipelineDslService._extract_dependencies_from_model_config(config)
+
+ assert "langgenius/openai" in result
+ analyze_mock.assert_called_with("openai")
+
+
+def test_extract_dependencies_from_model_config_extracts_tools(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_model_provider_dependency",
+ return_value="x",
+ )
+ analyze_tool_mock = mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_tool_dependency",
+ return_value="langgenius/google",
+ )
+ config = {
+ "model": {"provider": "openai"},
+ "agent_mode": {"tools": [{"provider_id": "google"}]},
+ }
+
+ result = RagPipelineDslService._extract_dependencies_from_model_config(config)
+
+ assert "langgenius/google" in result
+ analyze_tool_mock.assert_called_with("google")
+
+
+def test_extract_dependencies_from_model_config_empty_config() -> None:
+ result = RagPipelineDslService._extract_dependencies_from_model_config({})
+
+ assert result == []
+
+
+# --- _extract_dependencies_from_workflow_graph ---
+
+
+def test_extract_dependencies_from_workflow_graph_ignores_unknown_types(mocker) -> None:
+ service = RagPipelineDslService(session=Mock())
+ graph = {"nodes": [{"data": {"type": "some-unknown-type"}}]}
+
+ result = service._extract_dependencies_from_workflow_graph(graph)
+
+ assert result == []
+
+
+def test_extract_dependencies_from_workflow_graph_handles_empty_graph() -> None:
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph({})
+
+ assert result == []
+
+
+def test_extract_dependencies_from_workflow_graph_handles_malformed_node(mocker) -> None:
+ service = RagPipelineDslService(session=Mock())
+ # Node with TOOL type but invalid data should be caught by exception handler
+ from graphon.enums import BuiltinNodeTypes
+
+ graph = {"nodes": [{"data": {"type": BuiltinNodeTypes.TOOL}}]}
+
+ result = service._extract_dependencies_from_workflow_graph(graph)
+
+ # Should not raise, error is caught internally
+ assert isinstance(result, list)
+
+
+# --- export_rag_pipeline_dsl ---
+
+
+def test_export_rag_pipeline_dsl_raises_when_dataset_missing() -> None:
+ pipeline = Mock()
+ pipeline.retrieve_dataset.return_value = None
+
+ service = RagPipelineDslService(session=Mock())
+
+ with pytest.raises(ValueError, match="Missing dataset"):
+ service.export_rag_pipeline_dsl(pipeline=pipeline)
+
+
+# --- import_rag_pipeline ---
+
+
+def test_import_rag_pipeline_url_fetch_error(mocker) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.ssrf_proxy.get", side_effect=Exception("fetch failed"))
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(
+ account=account, import_mode="yaml-url", yaml_url="https://example.com/dsl.yml"
+ )
+
+ assert result.status == ImportStatus.FAILED
+ assert "fetch failed" in result.error
+
+
+def test_import_rag_pipeline_yaml_content_success(mocker) -> None:
+ yaml_content = """
+version: 0.1.0
+kind: rag_pipeline
+rag_pipeline:
+ name: Test Pipeline
+workflow:
+ graph:
+ nodes:
+ - data:
+ type: knowledge-index
+"""
+ pipeline = Mock()
+ pipeline.name = "Test Pipeline"
+ pipeline.description = "desc"
+ pipeline.id = "p1"
+ pipeline.is_published = False
+ mocker.patch.object(RagPipelineDslService, "_create_or_update_pipeline", return_value=pipeline)
+
+ config_mock = Mock()
+ config_mock.indexing_technique = "high_quality"
+ config_mock.embedding_model = "m"
+ config_mock.embedding_model_provider = "p"
+ config_mock.summary_index_setting = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeConfiguration.model_validate",
+ return_value=config_mock,
+ )
+
+ dataset_mock = Mock()
+ dataset_mock.id = "d1"
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.Dataset", return_value=dataset_mock)
+
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ session.query.return_value.filter_by.return_value.all.return_value = []
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(account=account, import_mode="yaml-content", yaml_content=yaml_content)
+
+ if result.status == ImportStatus.FAILED:
+ print(f"DEBUG: {result.error}")
+ assert result.status == ImportStatus.COMPLETED
+
+
+def test_import_rag_pipeline_pending_version(mocker) -> None:
+ yaml_content = "version: 1.0.0\nkind: rag_pipeline\nrag_pipeline: {name: x}"
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.redis_client.setex")
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1", id="u1")
+
+ result = service.import_rag_pipeline(account=account, import_mode="yaml-content", yaml_content=yaml_content)
+
+ assert result.status == ImportStatus.PENDING
+ assert result.imported_dsl_version == "1.0.0"
+
+
+# --- confirm_import ---
+
+
+def test_confirm_import_success(mocker) -> None:
+ from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelinePendingData
+
+ yaml_content = """
+version: 0.1.0
+kind: rag_pipeline
+rag_pipeline:
+ name: Test Pipeline
+workflow:
+ graph:
+ nodes:
+ - data:
+ type: knowledge-index
+"""
+ pending = RagPipelinePendingData(import_mode="yaml-content", yaml_content=yaml_content, pipeline_id="p1")
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.redis_client.get",
+ return_value=pending.model_dump_json(),
+ )
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.redis_client.delete")
+
+ pipeline = Mock()
+ pipeline.id = "p1"
+ pipeline.name = "Test Pipeline"
+ pipeline.description = "desc"
+ pipeline.retrieve_dataset.return_value = None
+
+ mocker.patch.object(RagPipelineDslService, "_create_or_update_pipeline", return_value=pipeline)
+
+ config_mock = Mock()
+ config_mock.indexing_technique = "high_quality"
+ config_mock.embedding_model = "m"
+ config_mock.embedding_model_provider = "p"
+ config_mock.chunk_structure = "text_model"
+ config_mock.retrieval_model.model_dump.return_value = {}
+ config_mock.summary_index_setting = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeConfiguration.model_validate",
+ return_value=config_mock,
+ )
+
+ dataset_mock = Mock()
+ dataset_mock.id = "d1"
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.Dataset", return_value=dataset_mock)
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.DatasetCollectionBinding", return_value=Mock(id="b1"))
+
+ service = RagPipelineDslService(session=Mock())
+ # Mocking self._session.scalar for the pipeline lookup
+ service._session.scalar.return_value = pipeline
+
+ account = Mock()
+ account.id = "u1"
+ account.current_tenant_id = "t1"
+
+ result = service.confirm_import(account=account, import_id="imp-1")
+
+ assert result.status == ImportStatus.COMPLETED
+ assert result.pipeline_id == "p1"
+ assert result.dataset_id == "d1"
+
+
+# --- _extract_dependencies_from_workflow_graph all types ---
+
+
+@pytest.mark.parametrize(
+ "node_type",
+ [
+ BuiltinNodeTypes.TOOL,
+ BuiltinNodeTypes.LLM,
+ BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL,
+ BuiltinNodeTypes.PARAMETER_EXTRACTOR,
+ BuiltinNodeTypes.QUESTION_CLASSIFIER,
+ ],
+)
+def test_extract_dependencies_from_workflow_graph_types(mocker, node_type) -> None:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_tool_dependency",
+ return_value="t1",
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_model_provider_dependency",
+ return_value="m1",
+ )
+
+ # Mock all potential node data classes
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.ToolNodeData.model_validate",
+ return_value=Mock(provider_id="p1"),
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.LLMNodeData.model_validate",
+ return_value=Mock(model=Mock(provider="p1")),
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeRetrievalNodeData.model_validate",
+ return_value=Mock(
+ retrieval_mode="single",
+ single_retrieval_config=Mock(model=Mock(provider="p1")),
+ ),
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.ParameterExtractorNodeData.model_validate",
+ return_value=Mock(model=Mock(provider="p1")),
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.QuestionClassifierNodeData.model_validate",
+ return_value=Mock(model=Mock(provider="p1")),
+ )
+
+ service = RagPipelineDslService(session=Mock())
+ graph = {"nodes": [{"data": {"type": node_type}}]}
+
+ result = service._extract_dependencies_from_workflow_graph(graph)
+
+ assert len(result) > 0
+
+
+# --- _create_or_update_pipeline ---
+
+
+def test_create_or_update_pipeline_create_new(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ account = Mock(current_tenant_id="t1", id="u1")
+ data = {
+ "rag_pipeline": {"name": "New", "description": "desc"},
+ "workflow": {"graph": {"nodes": []}},
+ }
+
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.current_user", SimpleNamespace(id="u1"))
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.Workflow", return_value=Mock())
+ pipeline_cls = mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.Pipeline")
+ pipeline_instance = pipeline_cls.return_value
+ pipeline_instance.tenant_id = "t1"
+ pipeline_instance.id = "p1"
+ pipeline_instance.name = "P"
+ pipeline_instance.is_published = False
+
+ result = service._create_or_update_pipeline(pipeline=None, data=data, account=account, dependencies=[])
+
+ assert result == pipeline_instance
+ session.add.assert_called()
+
+
+# --- export_rag_pipeline_dsl comprehensive ---
+
+
+def test_export_rag_pipeline_dsl_with_workflow(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ pipeline = Mock()
+ pipeline.id = "p1"
+ pipeline.tenant_id = "t1"
+ pipeline.name = "P"
+ pipeline.description = "d"
+
+ dataset = Mock()
+ dataset.id = "d1"
+ dataset.name = "D"
+ dataset.chunk_structure = "text_model"
+ dataset.doc_form = "text_model"
+ dataset.icon_info = {"icon": "i"}
+ pipeline.retrieve_dataset.return_value = dataset
+
+ workflow = Mock()
+ workflow.app_id = "p1"
+ workflow.graph_dict = {"nodes": []}
+ workflow.environment_variables = []
+ workflow.conversation_variables = []
+ workflow.rag_pipeline_variables = []
+ workflow.to_dict.return_value = {"graph": {"nodes": []}}
+
+ # Mocking single .where() call
+ session.query.return_value.where.return_value.first.return_value = workflow
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.generate_dependencies",
+ return_value=[],
+ )
+
+ result_yaml = service.export_rag_pipeline_dsl(pipeline=pipeline)
+ data = yaml.safe_load(result_yaml)
+
+ assert data["kind"] == "rag_pipeline"
+ assert data["rag_pipeline"]["name"] == "D"
+ assert "workflow" in data
+
+
+# --- _extract_dependencies_from_workflow_graph more types ---
+
+
+def test_extract_dependencies_from_workflow_graph_datasource(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DatasourceNodeData.model_validate",
+ return_value=Mock(provider_type="online", plugin_id="ds1"),
+ )
+ service = RagPipelineDslService(session=Mock())
+ graph = {"nodes": [{"data": {"type": BuiltinNodeTypes.DATASOURCE}}]}
+
+ result = service._extract_dependencies_from_workflow_graph(graph)
+
+ assert "ds1" in result
+
+
+def test_import_rag_pipeline_raises_for_invalid_mode() -> None:
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ with pytest.raises(ValueError, match="Invalid import_mode"):
+ service.import_rag_pipeline(account=account, import_mode="invalid-mode")
+
+
+def test_import_rag_pipeline_yaml_url_requires_url() -> None:
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(account=account, import_mode="yaml-url", yaml_url=None)
+
+ assert result.status == ImportStatus.FAILED
+ assert "yaml_url is required" in result.error
+
+
+def test_import_rag_pipeline_yaml_content_requires_content() -> None:
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(account=account, import_mode="yaml-content", yaml_content=None)
+
+ assert result.status == ImportStatus.FAILED
+ assert "yaml_content is required" in result.error
+
+
+def test_import_rag_pipeline_yaml_content_requires_mapping() -> None:
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(account=account, import_mode="yaml-content", yaml_content="- one\n- two")
+
+ assert result.status == ImportStatus.FAILED
+ assert "content must be a mapping" in result.error
+
+
+def test_confirm_import_returns_failed_when_pending_data_is_invalid_type(mocker) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.redis_client.get", return_value=object())
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ result = service.confirm_import(import_id="imp-1", account=account)
+
+ assert result.status == ImportStatus.FAILED
+ assert "Invalid import information" in result.error
+
+
+def test_append_workflow_export_data_filters_credentials(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ workflow = Mock()
+ workflow.graph_dict = {"nodes": []}
+ workflow.to_dict.return_value = {
+ "graph": {
+ "nodes": [
+ {
+ "data": {
+ "type": BuiltinNodeTypes.TOOL,
+ "credential_id": "secret",
+ }
+ },
+ {
+ "data": {
+ "type": BuiltinNodeTypes.AGENT,
+ "agent_parameters": {"tools": {"value": [{"credential_id": "secret-agent"}]}},
+ }
+ },
+ ]
+ }
+ }
+ session.query.return_value.where.return_value.first.return_value = workflow
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.generate_dependencies",
+ return_value=[],
+ )
+ export_data: dict = {}
+ pipeline = Mock(id="p1", tenant_id="t1")
+
+ service._append_workflow_export_data(export_data=export_data, pipeline=pipeline, include_secret=False)
+
+ nodes = export_data["workflow"]["graph"]["nodes"]
+ assert "credential_id" not in nodes[0]["data"]
+ assert "credential_id" not in nodes[1]["data"]["agent_parameters"]["tools"]["value"][0]
+
+
+def test_create_rag_pipeline_dataset_raises_when_name_conflicts(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ session.query.return_value.filter_by.return_value.first.return_value = Mock()
+ create_entity = RagPipelineDatasetCreateEntity(
+ name="Existing Name",
+ description="",
+ icon_info=IconInfo(icon="book"),
+ permission="only_me",
+ yaml_content="x",
+ )
+
+ with pytest.raises(ValueError, match="already exists"):
+ service.create_rag_pipeline_dataset("tenant-1", create_entity)
+
+
+def test_create_rag_pipeline_dataset_generates_name_when_missing(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ session.query.return_value.filter_by.return_value.first.return_value = None
+ session.query.return_value.filter_by.return_value.all.return_value = [Mock(name="Untitled")]
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.generate_incremental_name", return_value="Untitled 2")
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.current_user", Mock(id="u1", current_tenant_id="t1"))
+ mocker.patch.object(
+ service,
+ "import_rag_pipeline",
+ return_value=SimpleNamespace(
+ id="imp-1",
+ dataset_id="d1",
+ pipeline_id="p1",
+ status=ImportStatus.COMPLETED,
+ imported_dsl_version="0.1.0",
+ current_dsl_version="0.1.0",
+ error="",
+ ),
+ )
+ create_entity = RagPipelineDatasetCreateEntity(
+ name="",
+ description="",
+ icon_info=IconInfo(icon="book"),
+ permission="only_me",
+ yaml_content="x",
+ )
+
+ result = service.create_rag_pipeline_dataset("tenant-1", create_entity)
+
+ assert create_entity.name == "Untitled 2"
+ assert result["status"] == ImportStatus.COMPLETED
+
+
+def test_append_workflow_export_data_encrypts_knowledge_retrieval_dataset_ids(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ workflow = Mock()
+ workflow.graph_dict = {"nodes": []}
+ workflow.to_dict.return_value = {
+ "graph": {
+ "nodes": [
+ {
+ "data": {
+ "type": BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL,
+ "dataset_ids": ["d1", "d2"],
+ }
+ }
+ ]
+ }
+ }
+ session.query.return_value.where.return_value.first.return_value = workflow
+ mocker.patch.object(service, "encrypt_dataset_id", side_effect=lambda dataset_id, tenant_id: f"enc-{dataset_id}")
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.generate_dependencies",
+ return_value=[],
+ )
+ export_data: dict = {}
+ pipeline = Mock(id="p1", tenant_id="t1")
+
+ service._append_workflow_export_data(export_data=export_data, pipeline=pipeline, include_secret=False)
+
+ ids = export_data["workflow"]["graph"]["nodes"][0]["data"]["dataset_ids"]
+ assert ids == ["enc-d1", "enc-d2"]
+
+
+def test_confirm_import_updates_existing_dataset(mocker) -> None:
+ from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelinePendingData
+
+ yaml_content = (
+ "version: 0.1.0\n"
+ "kind: rag_pipeline\n"
+ "rag_pipeline: {name: x}\n"
+ "workflow: {graph: {nodes: [{data: {type: knowledge-index}}]}}"
+ )
+ pending = RagPipelinePendingData(import_mode="yaml-content", yaml_content=yaml_content, pipeline_id="p1")
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.redis_client.get",
+ return_value=pending.model_dump_json(),
+ )
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.redis_client.delete")
+ pipeline = Mock(id="p1", name="P", description="D")
+ dataset = Mock(id="d1")
+ pipeline.retrieve_dataset.return_value = dataset
+ mocker.patch.object(RagPipelineDslService, "_create_or_update_pipeline", return_value=pipeline)
+ config_mock = Mock()
+ config_mock.indexing_technique = "economy"
+ config_mock.keyword_number = 3
+ config_mock.retrieval_model.model_dump.return_value = {"top_k": 3}
+ config_mock.chunk_structure = "text_model"
+ config_mock.summary_index_setting = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeConfiguration.model_validate",
+ return_value=config_mock,
+ )
+ service = RagPipelineDslService(session=Mock())
+ service._session.scalar.return_value = pipeline
+ account = Mock(id="u1", current_tenant_id="t1")
+
+ result = service.confirm_import(import_id="imp-1", account=account)
+
+ assert result.status == ImportStatus.COMPLETED
+ assert dataset.indexing_technique == "economy"
+
+
+def test_import_rag_pipeline_yaml_url_handles_empty_content_after_github_rewrite(mocker) -> None:
+ response = Mock()
+ response.raise_for_status.return_value = None
+ response.content = b""
+ get_mock = mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.ssrf_proxy.get", return_value=response)
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(
+ account=account,
+ import_mode="yaml-url",
+ yaml_url="https://github.com/langgenius/dify/blob/main/pipeline.yml",
+ )
+
+ assert result.status == ImportStatus.FAILED
+ assert "Empty content from url" in result.error
+ called_url = get_mock.call_args.args[0]
+ assert "raw.githubusercontent.com" in called_url
+
+
+def test_create_or_update_pipeline_decrypts_knowledge_retrieval_dataset_ids(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ account = Mock(id="u1", current_tenant_id="t1")
+ pipeline = Mock(id="p1", tenant_id="t1", name="N", description="D")
+ data = {
+ "rag_pipeline": {"name": "N2", "description": "D2"},
+ "workflow": {
+ "graph": {
+ "nodes": [
+ {
+ "data": {
+ "type": BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL,
+ "dataset_ids": ["enc-1", "enc-2"],
+ }
+ }
+ ]
+ }
+ },
+ }
+ draft_workflow = Mock(id="wf1")
+ session.query.return_value.where.return_value.first.return_value = draft_workflow
+ mocker.patch.object(service, "decrypt_dataset_id", side_effect=["d1", None])
+
+ result = service._create_or_update_pipeline(pipeline=pipeline, data=data, account=account)
+
+ assert result is pipeline
+ assert data["workflow"]["graph"]["nodes"][0]["data"]["dataset_ids"] == ["d1"]
+ assert draft_workflow.graph is not None
+
+
+def test_create_or_update_pipeline_creates_draft_when_missing(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ account = Mock(id="u1", current_tenant_id="t1")
+ pipeline = Mock(id="p1", tenant_id="t1", name="N", description="D")
+ data = {"rag_pipeline": {"name": "N2", "description": "D2"}, "workflow": {"graph": {"nodes": []}}}
+ session.query.return_value.where.return_value.first.return_value = None
+ workflow_cls = mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.Workflow")
+ workflow_cls.return_value.id = "wf-new"
+
+ service._create_or_update_pipeline(pipeline=pipeline, data=data, account=account)
+
+ assert pipeline.workflow_id == "wf-new"
+
+
+def test_import_rag_pipeline_url_size_exceeds_limit(mocker) -> None:
+ response = Mock()
+ response.raise_for_status.return_value = None
+ response.content = b"x" * (10 * 1024 * 1024 + 1)
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.ssrf_proxy.get", return_value=response)
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(
+ account=account,
+ import_mode="yaml-url",
+ yaml_url="https://example.com/pipeline.yaml",
+ )
+
+ assert result.status == ImportStatus.FAILED
+ assert "10MB" in result.error
+
+
+def test_import_rag_pipeline_fails_when_rag_pipeline_data_missing() -> None:
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+ result = service.import_rag_pipeline(
+ account=account,
+ import_mode="yaml-content",
+ yaml_content="version: 0.1.0\nkind: rag_pipeline\nworkflow: {}",
+ )
+
+ assert result.status == ImportStatus.FAILED
+ assert "Missing rag_pipeline data" in result.error
+
+
+def test_import_rag_pipeline_fails_when_pipeline_id_not_found() -> None:
+ session = cast(MagicMock, Mock())
+ session.scalar.return_value = None
+ service = RagPipelineDslService(session=cast(Session, session))
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(
+ account=account,
+ import_mode="yaml-content",
+ yaml_content="version: 0.1.0\nkind: rag_pipeline\nrag_pipeline: {name: x}\nworkflow: {}",
+ pipeline_id="missing-pipeline",
+ )
+
+ assert result.status == ImportStatus.FAILED
+ assert "Pipeline not found" in result.error
+
+
+def test_import_rag_pipeline_fails_for_non_string_version_type() -> None:
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1")
+
+ result = service.import_rag_pipeline(
+ account=account,
+ import_mode="yaml-content",
+ yaml_content="version: 1\nkind: rag_pipeline\nrag_pipeline: {name: x}\nworkflow: {}",
+ )
+
+ assert result.status == ImportStatus.FAILED
+ assert "Invalid version type" in result.error
+
+
+def test_append_workflow_export_data_raises_when_draft_workflow_missing() -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ session.query.return_value.where.return_value.first.return_value = None
+
+ with pytest.raises(ValueError, match="Missing draft workflow configuration"):
+ service._append_workflow_export_data(export_data={}, pipeline=Mock(tenant_id="t1"), include_secret=False)
+
+
+def test_append_workflow_export_data_keeps_secret_fields_when_include_secret_true(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ workflow = Mock()
+ workflow.graph_dict = {"nodes": []}
+ workflow.to_dict.return_value = {
+ "graph": {
+ "nodes": [
+ {"data": {"type": BuiltinNodeTypes.TOOL, "credential_id": "tool-secret"}},
+ {
+ "data": {
+ "type": BuiltinNodeTypes.AGENT,
+ "agent_parameters": {"tools": {"value": [{"credential_id": "agent-secret"}]}},
+ }
+ },
+ ]
+ }
+ }
+ session.query.return_value.where.return_value.first.return_value = workflow
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.generate_dependencies",
+ return_value=[],
+ )
+
+ export_data: dict[str, object] = {}
+ service._append_workflow_export_data(export_data=export_data, pipeline=Mock(tenant_id="t1"), include_secret=True)
+
+ workflow_data = cast(dict[str, object], export_data["workflow"])
+ graph = cast(dict[str, object], workflow_data["graph"])
+ nodes = cast(list[dict[str, object]], graph["nodes"])
+ node0_data = cast(dict[str, object], nodes[0]["data"])
+ node1_data = cast(dict[str, object], nodes[1]["data"])
+ agent_parameters = cast(dict[str, object], node1_data["agent_parameters"])
+ tools = cast(dict[str, object], agent_parameters["tools"])
+ tool_values = cast(list[dict[str, object]], tools["value"])
+ assert node0_data["credential_id"] == "tool-secret"
+ assert tool_values[0]["credential_id"] == "agent-secret"
+
+
+def test_extract_dependencies_from_workflow_graph_skips_local_file_datasource(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DatasourceNodeData.model_validate",
+ return_value=Mock(provider_type="local_file", plugin_id="plugin-x"),
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": BuiltinNodeTypes.DATASOURCE}}]}
+ )
+
+ assert result == []
+
+
+def test_extract_dependencies_from_workflow_graph_knowledge_index_reranking(mocker) -> None:
+ analyze = mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_model_provider_dependency",
+ side_effect=lambda provider: f"dep:{provider}",
+ )
+ knowledge = Mock()
+ knowledge.indexing_technique = "high_quality"
+ knowledge.embedding_model_provider = "embed-provider"
+ knowledge.retrieval_model.reranking_mode = "reranking_model"
+ knowledge.retrieval_model.reranking_enable = True
+ knowledge.retrieval_model.reranking_model.reranking_provider_name = "rerank-provider"
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeConfiguration.model_validate",
+ return_value=knowledge,
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": KNOWLEDGE_INDEX_NODE_TYPE}}]}
+ )
+
+ assert result == ["dep:embed-provider", "dep:rerank-provider"]
+ assert analyze.call_count == 2
+
+
+def test_extract_dependencies_from_workflow_graph_multiple_retrieval_weighted_score(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_model_provider_dependency",
+ return_value="dep:weighted",
+ )
+ retrieval = Mock()
+ retrieval.retrieval_mode = "multiple"
+ retrieval.multiple_retrieval_config.reranking_mode = "weighted_score"
+ retrieval.multiple_retrieval_config.weights.vector_setting.embedding_provider_name = "emb-provider"
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeRetrievalNodeData.model_validate",
+ return_value=retrieval,
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL}}]}
+ )
+
+ assert result == ["dep:weighted"]
+
+
+def test_extract_dependencies_from_workflow_graph_multiple_retrieval_reranking_model(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_model_provider_dependency",
+ return_value="dep:rerank",
+ )
+ retrieval = Mock()
+ retrieval.retrieval_mode = "multiple"
+ retrieval.multiple_retrieval_config.reranking_mode = "reranking_model"
+ retrieval.multiple_retrieval_config.reranking_model.provider = "rerank-provider"
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeRetrievalNodeData.model_validate",
+ return_value=retrieval,
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL}}]}
+ )
+
+ assert result == ["dep:rerank"]
+
+
+def test_extract_dependencies_from_model_config_includes_dataset_reranking_and_tools(mocker) -> None:
+ model_analyze = mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_model_provider_dependency",
+ side_effect=["dep:model", "dep:rerank"],
+ )
+ tool_analyze = mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_tool_dependency",
+ return_value="dep:tool",
+ )
+ config = {
+ "model": {"provider": "openai"},
+ "dataset_configs": {
+ "datasets": {
+ "datasets": [
+ {
+ "reranking_model": {
+ "reranking_provider_name": {"provider": "cohere"},
+ }
+ }
+ ]
+ }
+ },
+ "agent_mode": {"tools": [{"provider_id": "google"}]},
+ }
+
+ deps = RagPipelineDslService._extract_dependencies_from_model_config(config)
+
+ assert deps == ["dep:model", "dep:rerank", "dep:tool"]
+ assert model_analyze.call_count == 2
+ tool_analyze.assert_called_once_with("google")
+
+
+def test_check_version_compatibility_hits_major_older_branch(mocker) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.CURRENT_DSL_VERSION", "1.0.0")
+
+ status = _check_version_compatibility("0.9.0")
+
+ assert status == ImportStatus.PENDING
+
+
+def test_import_rag_pipeline_sets_default_version_and_kind(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ account = Mock(current_tenant_id="t1")
+ pipeline = Mock(id="p1", name="P", description="D", is_published=False)
+ mocker.patch.object(service, "_create_or_update_pipeline", return_value=pipeline)
+ config = Mock()
+ config.indexing_technique = "economy"
+ config.keyword_number = 2
+ config.retrieval_model.model_dump.return_value = {}
+ config.summary_index_setting = None
+ config.chunk_structure = "text_model"
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeConfiguration.model_validate",
+ return_value=config,
+ )
+ dataset = Mock(id="d1")
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.Dataset", return_value=dataset)
+ session.query.return_value.filter_by.return_value.all.return_value = []
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.generate_incremental_name", return_value="P")
+
+ result = service.import_rag_pipeline(
+ account=account,
+ import_mode="yaml-content",
+ yaml_content="rag_pipeline: {name: x}\nworkflow: {graph: {nodes: [{data: {type: knowledge-index}}]}}",
+ )
+
+ assert result.status == ImportStatus.COMPLETED
+ assert result.imported_dsl_version == "0.1.0"
+
+
+def test_import_rag_pipeline_creates_pending_for_dependencies(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ account = Mock(current_tenant_id="t1")
+ setex = mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.redis_client.setex")
+ yaml_content = """
+version: 1.0.0
+kind: rag_pipeline
+rag_pipeline: {name: x}
+dependencies:
+ - type: marketplace
+ value:
+ marketplace_plugin_unique_identifier: langgenius/example:0.1.0
+workflow: {graph: {nodes: []}}
+"""
+
+ result = service.import_rag_pipeline(account=account, import_mode="yaml-content", yaml_content=yaml_content)
+
+ assert result.status == ImportStatus.PENDING
+ setex.assert_called_once()
+
+
+def test_confirm_import_returns_failed_when_pending_pipeline_missing(mocker) -> None:
+ from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelinePendingData
+
+ pending = RagPipelinePendingData(import_mode="yaml-content", yaml_content="version: 0.1.0", pipeline_id="p1")
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.redis_client.get", return_value=pending.model_dump_json()
+ )
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ session.scalar.return_value = None
+ mocker.patch.object(RagPipelineDslService, "_create_or_update_pipeline", side_effect=ValueError("pipeline missing"))
+
+ result = service.confirm_import(import_id="imp-1", account=Mock(current_tenant_id="t1"))
+
+ assert result.status == ImportStatus.FAILED
+
+
+def test_append_workflow_export_data_skips_empty_node_data(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ workflow = Mock()
+ workflow.graph_dict = {"nodes": []}
+ workflow.to_dict.return_value = {"graph": {"nodes": [{"data": {}}, {}]}}
+ session.query.return_value.where.return_value.first.return_value = workflow
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.generate_dependencies",
+ return_value=[],
+ )
+ export_data = {}
+
+ service._append_workflow_export_data(export_data=export_data, pipeline=Mock(tenant_id="t1"), include_secret=False)
+
+ assert "workflow" in export_data
+
+
+def test_extract_dependencies_from_workflow_graph_multiple_config_none(mocker) -> None:
+ retrieval = Mock()
+ retrieval.retrieval_mode = "multiple"
+ retrieval.multiple_retrieval_config = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeRetrievalNodeData.model_validate",
+ return_value=retrieval,
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL}}]}
+ )
+
+ assert result == []
+
+
+def test_extract_dependencies_from_workflow_graph_single_config_none(mocker) -> None:
+ retrieval = Mock()
+ retrieval.retrieval_mode = "single"
+ retrieval.single_retrieval_config = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeRetrievalNodeData.model_validate",
+ return_value=retrieval,
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL}}]}
+ )
+
+ assert result == []
+
+
+def test_create_or_update_pipeline_raises_when_workflow_missing() -> None:
+ service = RagPipelineDslService(session=Mock())
+ account = Mock(current_tenant_id="t1", id="u1")
+
+ with pytest.raises(ValueError, match="Missing workflow data for rag pipeline"):
+ service._create_or_update_pipeline(pipeline=None, data={"rag_pipeline": {"name": "x"}}, account=account)
+
+
+def test_import_rag_pipeline_with_pipeline_id_uses_existing_dataset(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ existing_dataset = Mock(id="d1", chunk_structure="text_model")
+ existing_pipeline = Mock(id="p1", name="P", description="D", is_published=False)
+ existing_pipeline.retrieve_dataset.return_value = existing_dataset
+ session.scalar.return_value = existing_pipeline
+ mocker.patch.object(service, "_create_or_update_pipeline", return_value=existing_pipeline)
+ config = Mock()
+ config.indexing_technique = "economy"
+ config.keyword_number = 3
+ config.chunk_structure = "text_model"
+ config.summary_index_setting = {"enabled": True}
+ config.retrieval_model.model_dump.return_value = {"top_k": 3}
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeConfiguration.model_validate", return_value=config
+ )
+
+ yaml_content = (
+ "version: 0.1.0\n"
+ "kind: rag_pipeline\n"
+ "rag_pipeline: {name: x}\n"
+ "workflow: {graph: {nodes: [{data: {type: knowledge-index}}]}}"
+ )
+
+ result = service.import_rag_pipeline(
+ account=Mock(id="u1", current_tenant_id="t1"),
+ import_mode="yaml-content",
+ yaml_content=yaml_content,
+ pipeline_id="p1",
+ )
+
+ assert result.status == ImportStatus.COMPLETED
+ assert result.dataset_id == "d1"
+
+
+def test_import_rag_pipeline_raises_for_chunk_structure_mismatch_on_published(mocker) -> None:
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ existing_dataset = Mock(id="d1", chunk_structure="hierarchical_model")
+ existing_pipeline = Mock(id="p1", name="P", description="D", is_published=True)
+ existing_pipeline.retrieve_dataset.return_value = existing_dataset
+ session.scalar.return_value = existing_pipeline
+ mocker.patch.object(service, "_create_or_update_pipeline", return_value=existing_pipeline)
+ config = Mock()
+ config.chunk_structure = "text_model"
+ config.indexing_technique = "economy"
+ config.keyword_number = 3
+ config.summary_index_setting = None
+ config.retrieval_model.model_dump.return_value = {}
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeConfiguration.model_validate", return_value=config
+ )
+
+ yaml_content = (
+ "version: 0.1.0\n"
+ "kind: rag_pipeline\n"
+ "rag_pipeline: {name: x}\n"
+ "workflow: {graph: {nodes: [{data: {type: knowledge-index}}]}}"
+ )
+
+ result = service.import_rag_pipeline(
+ account=Mock(id="u1", current_tenant_id="t1"),
+ import_mode="yaml-content",
+ yaml_content=yaml_content,
+ pipeline_id="p1",
+ )
+
+ assert result.status == ImportStatus.FAILED
+ assert "Chunk structure is not compatible" in result.error
+
+
+def test_import_rag_pipeline_fails_when_no_knowledge_index_node(mocker) -> None:
+ service = RagPipelineDslService(session=Mock())
+ pipeline = Mock(id="p1", name="P", description="D", is_published=False)
+ mocker.patch.object(service, "_create_or_update_pipeline", return_value=pipeline)
+
+ yaml_content = (
+ "version: 0.1.0\n"
+ "kind: rag_pipeline\n"
+ "rag_pipeline: {name: x}\n"
+ "workflow: {graph: {nodes: [{data: {type: start}}]}}"
+ )
+
+ result = service.import_rag_pipeline(
+ account=Mock(id="u1", current_tenant_id="t1"),
+ import_mode="yaml-content",
+ yaml_content=yaml_content,
+ )
+
+ assert result.status == ImportStatus.FAILED
+ assert "Knowledge Index node" in result.error
+
+
+def test_confirm_import_fails_when_no_knowledge_index_node(mocker) -> None:
+ from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelinePendingData
+
+ yaml_content = (
+ "version: 0.1.0\n"
+ "kind: rag_pipeline\n"
+ "rag_pipeline: {name: x}\n"
+ "workflow: {graph: {nodes: [{data: {type: start}}]}}"
+ )
+
+ pending = RagPipelinePendingData(
+ import_mode="yaml-content",
+ yaml_content=yaml_content,
+ pipeline_id=None,
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.redis_client.get", return_value=pending.model_dump_json()
+ )
+ service = RagPipelineDslService(session=Mock())
+ pipeline = Mock(id="p1", name="P", description="D")
+ pipeline.retrieve_dataset.return_value = None
+ mocker.patch.object(service, "_create_or_update_pipeline", return_value=pipeline)
+
+ result = service.confirm_import(import_id="imp-1", account=Mock(id="u1", current_tenant_id="t1"))
+
+ assert result.status == ImportStatus.FAILED
+ assert "Knowledge Index node" in result.error
+
+
+def test_create_or_update_pipeline_saves_dependencies_to_redis(mocker) -> None:
+ from core.plugin.entities.plugin import PluginDependency
+
+ session = cast(MagicMock, Mock())
+ service = RagPipelineDslService(session=cast(Session, session))
+ account = Mock(id="u1", current_tenant_id="t1")
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.current_user", SimpleNamespace(id="u1"))
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.Workflow", return_value=Mock(id="wf-1"))
+ pipeline_cls = mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.Pipeline")
+ pipeline = pipeline_cls.return_value
+ pipeline.tenant_id = "t1"
+ pipeline.id = "p1"
+ session.query.return_value.where.return_value.first.return_value = None
+ setex = mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.redis_client.setex")
+ dependency = PluginDependency(
+ type=PluginDependency.Type.Marketplace,
+ value=PluginDependency.Marketplace(marketplace_plugin_unique_identifier="langgenius/example:0.1.0"),
+ )
+
+ service._create_or_update_pipeline(
+ pipeline=None,
+ data={"rag_pipeline": {"name": "x"}, "workflow": {"graph": {"nodes": []}}},
+ account=account,
+ dependencies=[dependency],
+ )
+
+ setex.assert_called_once()
+
+
+def test_extract_dependencies_from_workflow_graph_knowledge_index_without_embedding_provider(mocker) -> None:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.DependenciesAnalysisService.analyze_model_provider_dependency",
+ return_value="dep",
+ )
+ knowledge = Mock()
+ knowledge.indexing_technique = "high_quality"
+ knowledge.embedding_model_provider = None
+ knowledge.retrieval_model.reranking_mode = "reranking_model"
+ knowledge.retrieval_model.reranking_enable = False
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeConfiguration.model_validate", return_value=knowledge
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": KNOWLEDGE_INDEX_NODE_TYPE}}]}
+ )
+
+ assert result == []
+
+
+def test_extract_dependencies_from_workflow_graph_multiple_reranking_without_model(mocker) -> None:
+ retrieval = Mock()
+ retrieval.retrieval_mode = "multiple"
+ retrieval.multiple_retrieval_config.reranking_mode = "reranking_model"
+ retrieval.multiple_retrieval_config.reranking_model = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeRetrievalNodeData.model_validate",
+ return_value=retrieval,
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL}}]}
+ )
+
+ assert result == []
+
+
+def test_extract_dependencies_from_workflow_graph_multiple_weighted_without_weights(mocker) -> None:
+ retrieval = Mock()
+ retrieval.retrieval_mode = "multiple"
+ retrieval.multiple_retrieval_config.reranking_mode = "weighted_score"
+ retrieval.multiple_retrieval_config.weights = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_dsl_service.KnowledgeRetrievalNodeData.model_validate",
+ return_value=retrieval,
+ )
+ service = RagPipelineDslService(session=Mock())
+
+ result = service._extract_dependencies_from_workflow_graph(
+ {"nodes": [{"data": {"type": BuiltinNodeTypes.KNOWLEDGE_RETRIEVAL}}]}
+ )
+
+ assert result == []
diff --git a/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_manage_service.py b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_manage_service.py
new file mode 100644
index 0000000000..bd75e699dc
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_manage_service.py
@@ -0,0 +1,24 @@
+from types import SimpleNamespace
+
+from services.rag_pipeline.rag_pipeline_manage_service import RagPipelineManageService
+
+
+def test_list_rag_pipeline_datasources_marks_authorized(mocker) -> None:
+ datasource_1 = SimpleNamespace(provider="notion", plugin_id="plugin-1", is_authorized=False)
+ datasource_2 = SimpleNamespace(provider="jina", plugin_id="plugin-2", is_authorized=False)
+
+ manager_cls = mocker.patch("services.rag_pipeline.rag_pipeline_manage_service.PluginDatasourceManager")
+ manager_cls.return_value.fetch_datasource_providers.return_value = [datasource_1, datasource_2]
+
+ provider_cls = mocker.patch("services.rag_pipeline.rag_pipeline_manage_service.DatasourceProviderService")
+ provider_instance = provider_cls.return_value
+ provider_instance.get_datasource_credentials.side_effect = [
+ {"access_token": "token"},
+ None,
+ ]
+
+ result = RagPipelineManageService.list_rag_pipeline_datasources("tenant-1")
+
+ assert result == [datasource_1, datasource_2]
+ assert datasource_1.is_authorized is True
+ assert datasource_2.is_authorized is False
diff --git a/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_service.py b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_service.py
new file mode 100644
index 0000000000..cb3c2d742d
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_service.py
@@ -0,0 +1,2318 @@
+import time
+from types import SimpleNamespace
+
+import pytest
+from sqlalchemy.orm import sessionmaker
+
+from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, PipelineTemplateInfoEntity
+from services.rag_pipeline.rag_pipeline import RagPipelineService
+
+
+@pytest.fixture
+def rag_pipeline_service(mocker) -> RagPipelineService:
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository",
+ return_value=MockRepo(),
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DifyAPIRepositoryFactory.create_api_workflow_run_repository",
+ return_value=MockRepo(),
+ )
+ return RagPipelineService(session_maker=sessionmaker())
+
+
+class MockRepo:
+ pass
+
+
+def test_get_pipeline_templates_fallbacks_to_builtin_for_non_english_empty_result(mocker) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline.dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_MODE", "remote")
+
+ remote_retrieval = mocker.Mock()
+ remote_retrieval.get_pipeline_templates.return_value = {"pipeline_templates": []}
+
+ factory_mock = mocker.patch("services.rag_pipeline.rag_pipeline.PipelineTemplateRetrievalFactory")
+ factory_mock.get_pipeline_template_factory.return_value.return_value = remote_retrieval
+
+ builtin_retrieval = mocker.Mock()
+ builtin_retrieval.fetch_pipeline_templates_from_builtin.return_value = {"pipeline_templates": [{"id": "builtin-1"}]}
+ factory_mock.get_built_in_pipeline_template_retrieval.return_value = builtin_retrieval
+
+ result = RagPipelineService.get_pipeline_templates(type="built-in", language="ja-JP")
+
+ assert result == {"pipeline_templates": [{"id": "builtin-1"}]}
+ builtin_retrieval.fetch_pipeline_templates_from_builtin.assert_called_once_with("en-US")
+
+
+def test_get_pipeline_templates_customized_mode_uses_customized_factory(mocker) -> None:
+ retrieval = mocker.Mock()
+ retrieval.get_pipeline_templates.return_value = {"pipeline_templates": [{"id": "custom-1"}]}
+
+ factory_mock = mocker.patch("services.rag_pipeline.rag_pipeline.PipelineTemplateRetrievalFactory")
+ factory_mock.get_pipeline_template_factory.return_value.return_value = retrieval
+
+ result = RagPipelineService.get_pipeline_templates(type="customized", language="en-US")
+
+ assert result == {"pipeline_templates": [{"id": "custom-1"}]}
+ factory_mock.get_pipeline_template_factory.assert_called_with("customized")
+
+
+@pytest.mark.parametrize("template_type", ["built-in", "customized"])
+def test_get_pipeline_template_detail_uses_expected_mode(mocker, template_type: str) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline.dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_MODE", "remote")
+ retrieval = mocker.Mock()
+ retrieval.get_pipeline_template_detail.return_value = {"id": "tpl-1"}
+
+ factory_mock = mocker.patch("services.rag_pipeline.rag_pipeline.PipelineTemplateRetrievalFactory")
+ factory_mock.get_pipeline_template_factory.return_value.return_value = retrieval
+
+ result = RagPipelineService.get_pipeline_template_detail("tpl-1", type=template_type)
+
+ assert result == {"id": "tpl-1"}
+ expected_mode = "remote" if template_type == "built-in" else "customized"
+ factory_mock.get_pipeline_template_factory.assert_called_with(expected_mode)
+
+
+def test_get_published_workflow_returns_none_when_pipeline_has_no_workflow_id(rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(workflow_id=None)
+
+ result = rag_pipeline_service.get_published_workflow(pipeline)
+
+ assert result is None
+
+
+def test_get_all_published_workflow_returns_empty_for_unpublished_pipeline(rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(workflow_id=None)
+ session = SimpleNamespace()
+
+ workflows, has_more = rag_pipeline_service.get_all_published_workflow(
+ session=session,
+ pipeline=pipeline,
+ page=1,
+ limit=20,
+ user_id=None,
+ named_only=False,
+ )
+
+ assert workflows == []
+ assert has_more is False
+
+
+def test_get_all_published_workflow_applies_limit_and_has_more(rag_pipeline_service) -> None:
+ scalars_result = SimpleNamespace(all=lambda: ["wf1", "wf2", "wf3"])
+ session = SimpleNamespace(scalars=lambda stmt: scalars_result)
+ pipeline = SimpleNamespace(id="pipeline-1", workflow_id="wf-live")
+
+ workflows, has_more = rag_pipeline_service.get_all_published_workflow(
+ session=session,
+ pipeline=pipeline,
+ page=1,
+ limit=2,
+ user_id="user-1",
+ named_only=True,
+ )
+
+ assert workflows == ["wf1", "wf2"]
+ assert has_more is True
+
+
+def test_get_pipeline_raises_when_dataset_not_found(mocker, rag_pipeline_service) -> None:
+ first_query = mocker.Mock()
+ first_query.where.return_value.first.return_value = None
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=first_query)
+
+ with pytest.raises(ValueError, match="Dataset not found"):
+ rag_pipeline_service.get_pipeline("tenant-1", "dataset-1")
+
+
+# --- update_customized_pipeline_template ---
+
+
+def test_update_customized_pipeline_template_success(mocker) -> None:
+ template = SimpleNamespace(name="old", description="old", icon={}, updated_by=None)
+
+ # First query finds the template, second query (duplicate check) returns None
+ query_mock_1 = mocker.Mock()
+ query_mock_1.where.return_value.first.return_value = template
+ query_mock_2 = mocker.Mock()
+ query_mock_2.where.return_value.first.return_value = None
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", side_effect=[query_mock_1, query_mock_2])
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.commit")
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", SimpleNamespace(id="u1", current_tenant_id="t1"))
+
+ info = PipelineTemplateInfoEntity(
+ name="new",
+ description="new desc",
+ icon_info=IconInfo(icon="🔥"),
+ )
+ result = RagPipelineService.update_customized_pipeline_template("tpl-1", info)
+
+ assert result.name == "new"
+ assert result.description == "new desc"
+
+
+def test_update_customized_pipeline_template_not_found(mocker) -> None:
+ query_mock = mocker.Mock()
+ query_mock.where.return_value.first.return_value = None
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query_mock)
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", SimpleNamespace(id="u1", current_tenant_id="t1"))
+
+ info = PipelineTemplateInfoEntity(name="x", description="d", icon_info=IconInfo(icon="i"))
+ with pytest.raises(ValueError, match="Customized pipeline template not found"):
+ RagPipelineService.update_customized_pipeline_template("tpl-missing", info)
+
+
+def test_update_customized_pipeline_template_duplicate_name(mocker) -> None:
+ template = SimpleNamespace(name="old", description="old", icon={}, updated_by=None)
+ duplicate = SimpleNamespace(name="dup")
+
+ query_mock = mocker.Mock()
+ query_mock.where.return_value.first.side_effect = [template, duplicate]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query_mock)
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", SimpleNamespace(id="u1", current_tenant_id="t1"))
+
+ info = PipelineTemplateInfoEntity(name="dup", description="d", icon_info=IconInfo(icon="i"))
+ with pytest.raises(ValueError, match="Template name is already exists"):
+ RagPipelineService.update_customized_pipeline_template("tpl-1", info)
+
+
+# --- delete_customized_pipeline_template ---
+
+
+def test_delete_customized_pipeline_template_success(mocker) -> None:
+ template = SimpleNamespace(id="tpl-1")
+ query_mock = mocker.Mock()
+ query_mock.where.return_value.first.return_value = template
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query_mock)
+ delete_mock = mocker.patch("services.rag_pipeline.rag_pipeline.db.session.delete")
+ commit_mock = mocker.patch("services.rag_pipeline.rag_pipeline.db.session.commit")
+
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", SimpleNamespace(id="u1", current_tenant_id="t1"))
+
+ RagPipelineService.delete_customized_pipeline_template("tpl-1")
+
+ delete_mock.assert_called_once_with(template)
+ commit_mock.assert_called_once()
+
+
+def test_delete_customized_pipeline_template_not_found(mocker) -> None:
+ query_mock = mocker.Mock()
+ query_mock.where.return_value.first.return_value = None
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query_mock)
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", SimpleNamespace(id="u1", current_tenant_id="t1"))
+
+ with pytest.raises(ValueError, match="Customized pipeline template not found"):
+ RagPipelineService.delete_customized_pipeline_template("tpl-missing")
+
+
+# --- sync_draft_workflow ---
+
+
+def test_sync_draft_workflow_creates_new_when_none_exists(mocker, rag_pipeline_service) -> None:
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=None)
+
+ class FakeWorkflow:
+ def __init__(self, **kwargs):
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+ self.id = "wf-new"
+
+ mocker.patch("services.rag_pipeline.rag_pipeline.Workflow", FakeWorkflow)
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.add")
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.flush")
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.commit")
+
+ pipeline = SimpleNamespace(tenant_id="t1", id="p1", workflow_id=None)
+ account = SimpleNamespace(id="u1")
+
+ result = rag_pipeline_service.sync_draft_workflow(
+ pipeline=pipeline,
+ graph={"nodes": []},
+ unique_hash=None,
+ account=account,
+ environment_variables=[],
+ conversation_variables=[],
+ rag_pipeline_variables=[],
+ )
+
+ assert result.id == "wf-new"
+ assert pipeline.workflow_id == "wf-new"
+
+
+def test_sync_draft_workflow_raises_on_hash_mismatch(mocker, rag_pipeline_service) -> None:
+ from services.errors.app import WorkflowHashNotEqualError
+
+ existing_wf = SimpleNamespace(unique_hash="hash-old")
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=existing_wf)
+
+ pipeline = SimpleNamespace(tenant_id="t1", id="p1")
+ account = SimpleNamespace(id="u1")
+
+ with pytest.raises(WorkflowHashNotEqualError):
+ rag_pipeline_service.sync_draft_workflow(
+ pipeline=pipeline,
+ graph={"nodes": []},
+ unique_hash="hash-different",
+ account=account,
+ environment_variables=[],
+ conversation_variables=[],
+ rag_pipeline_variables=[],
+ )
+
+
+def test_sync_draft_workflow_updates_existing(mocker, rag_pipeline_service) -> None:
+ existing_wf = SimpleNamespace(
+ unique_hash="hash-1",
+ graph=None,
+ updated_by=None,
+ updated_at=None,
+ environment_variables=None,
+ conversation_variables=None,
+ rag_pipeline_variables=None,
+ )
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=existing_wf)
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.commit")
+
+ pipeline = SimpleNamespace(tenant_id="t1", id="p1")
+ account = SimpleNamespace(id="u1")
+
+ result = rag_pipeline_service.sync_draft_workflow(
+ pipeline=pipeline,
+ graph={"nodes": [{"id": "n1"}]},
+ unique_hash="hash-1",
+ account=account,
+ environment_variables=["env1"],
+ conversation_variables=["conv1"],
+ rag_pipeline_variables=["rp1"],
+ )
+
+ assert result is existing_wf
+ assert result.updated_by == "u1"
+ assert result.environment_variables == ["env1"]
+
+
+# --- get_default_block_config ---
+
+
+def test_get_default_block_config_returns_config_for_valid_type(mocker, rag_pipeline_service) -> None:
+ fake_node_class = mocker.Mock()
+ fake_node_class.get_default_config.return_value = {"type": "start", "config": {}}
+
+ # Use a simpler approach: test with a known valid node type
+ from graphon.enums import BuiltinNodeTypes
+
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.get_node_type_classes_mapping",
+ return_value={BuiltinNodeTypes.START: {"1": fake_node_class}},
+ )
+ mocker.patch("services.rag_pipeline.rag_pipeline.LATEST_VERSION", "1")
+
+ result = rag_pipeline_service.get_default_block_config("start")
+
+ assert result == {"type": "start", "config": {}}
+
+
+def test_get_default_block_config_returns_none_for_unmapped_type(rag_pipeline_service) -> None:
+ assert rag_pipeline_service.get_default_block_config("nonexistent-type") is None
+
+
+# --- update_workflow ---
+
+
+def test_update_workflow_updates_allowed_fields(mocker, rag_pipeline_service) -> None:
+ workflow = SimpleNamespace(
+ id="wf-1", marked_name="", marked_comment="", updated_by=None, updated_at=None, disallowed="original"
+ )
+ session = mocker.Mock()
+ session.scalar.return_value = workflow
+
+ result = rag_pipeline_service.update_workflow(
+ session=session,
+ workflow_id="wf-1",
+ tenant_id="t1",
+ account_id="u1",
+ data={"marked_name": "v1", "marked_comment": "release", "disallowed": "hacked"},
+ )
+
+ assert result.marked_name == "v1"
+ assert result.marked_comment == "release"
+ assert result.disallowed == "original" # non-allowed field not updated
+ assert result.updated_by == "u1"
+
+
+def test_update_workflow_returns_none_when_not_found(mocker, rag_pipeline_service) -> None:
+ session = mocker.Mock()
+ session.scalar.return_value = None
+
+ result = rag_pipeline_service.update_workflow(
+ session=session,
+ workflow_id="wf-missing",
+ tenant_id="t1",
+ account_id="u1",
+ data={"marked_name": "v1"},
+ )
+
+ assert result is None
+
+
+# --- get_rag_pipeline_paginate_workflow_runs ---
+
+
+def test_get_rag_pipeline_paginate_workflow_runs_delegates(mocker, rag_pipeline_service) -> None:
+ expected = mocker.Mock()
+ repo_mock = mocker.Mock()
+ repo_mock.get_paginated_workflow_runs.return_value = expected
+ rag_pipeline_service._workflow_run_repo = repo_mock
+
+ pipeline = SimpleNamespace(tenant_id="t1", id="p1")
+ result = rag_pipeline_service.get_rag_pipeline_paginate_workflow_runs(pipeline, {"limit": 10, "last_id": "abc"})
+
+ assert result is expected
+ repo_mock.get_paginated_workflow_runs.assert_called_once_with(
+ tenant_id="t1",
+ app_id="p1",
+ triggered_from=mocker.ANY,
+ limit=10,
+ last_id="abc",
+ )
+
+
+# --- get_rag_pipeline_workflow_run ---
+
+
+def test_get_rag_pipeline_workflow_run_delegates(mocker, rag_pipeline_service) -> None:
+ expected = mocker.Mock()
+ repo_mock = mocker.Mock()
+ repo_mock.get_workflow_run_by_id.return_value = expected
+ rag_pipeline_service._workflow_run_repo = repo_mock
+
+ pipeline = SimpleNamespace(tenant_id="t1", id="p1")
+ result = rag_pipeline_service.get_rag_pipeline_workflow_run(pipeline, "run-1")
+
+ assert result is expected
+ repo_mock.get_workflow_run_by_id.assert_called_once_with(tenant_id="t1", app_id="p1", run_id="run-1")
+
+
+# --- is_workflow_exist ---
+
+
+def test_is_workflow_exist_returns_true_when_draft_exists(mocker, rag_pipeline_service) -> None:
+ query_mock = mocker.Mock()
+ query_mock.where.return_value.count.return_value = 1
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query_mock)
+
+ pipeline = SimpleNamespace(tenant_id="t1", id="p1")
+ assert rag_pipeline_service.is_workflow_exist(pipeline) is True
+
+
+def test_is_workflow_exist_returns_false_when_no_draft(mocker, rag_pipeline_service) -> None:
+ query_mock = mocker.Mock()
+ query_mock.where.return_value.count.return_value = 0
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query_mock)
+
+ pipeline = SimpleNamespace(tenant_id="t1", id="p1")
+ assert rag_pipeline_service.is_workflow_exist(pipeline) is False
+
+
+# --- publish_workflow ---
+
+
+def test_publish_workflow_success(mocker, rag_pipeline_service) -> None:
+ # Don't import Workflow from rag_pipeline to avoid confusion during patching
+
+ # 1. Mock select to bypass SQLAlchemy validation
+ mock_select = mocker.patch("services.rag_pipeline.rag_pipeline.select")
+
+ # 2. Setup draft workflow mock
+ draft_wf = mocker.Mock()
+ draft_wf.id = "wf-draft"
+ draft_wf.unique_hash = "hash-1"
+ draft_wf.graph = {
+ "nodes": [
+ {
+ "data": {
+ "type": "knowledge-index",
+ "dataset_id": "d1",
+ "chunk_structure": "paragraph",
+ "indexing_technique": "high_quality",
+ "process_rule": {"mode": "automatic"},
+ "retrieval_model": {"search_method": "hybrid_search", "top_k": 3},
+ }
+ }
+ ]
+ }
+ draft_wf.environment_variables = []
+ draft_wf.conversation_variables = []
+ draft_wf.rag_pipeline_variables = []
+ draft_wf.type = "workflow"
+ draft_wf.features = {}
+
+ # 3. Setup pipeline and account
+ pipeline = mocker.Mock()
+ pipeline.id = "p1"
+ pipeline.tenant_id = "t1"
+ pipeline.workflow_id = "wf-old-published"
+
+ account = mocker.Mock()
+ account.id = "u1"
+
+ # 4. Mock Workflow class and its .new() method
+ mock_workflow_class = mocker.patch("services.rag_pipeline.rag_pipeline.Workflow")
+ new_wf = mocker.Mock()
+ new_wf.id = "wf-published-new"
+ new_wf.graph_dict = draft_wf.graph
+ mock_workflow_class.new.return_value = new_wf
+
+ # 5. Mock entire db object and DatasetService
+ mock_db = mocker.Mock()
+ mocker.patch("services.rag_pipeline.rag_pipeline.db", mock_db)
+ mock_dataset_service_class = mocker.patch("services.dataset_service.DatasetService")
+ mock_dataset_service = mock_dataset_service_class.return_value
+
+ # 6. Mock session and its scalar/query methods
+ mock_session = mocker.Mock()
+ mock_session.scalar.return_value = draft_wf
+
+ # Mock dataset update query (needed even if service is mocked, as rag_pipeline fetches it first)
+ dataset = mocker.Mock()
+ dataset.retrieval_model_dict = {}
+ dataset_query = mocker.Mock()
+ dataset_query.where.return_value.first.return_value = dataset
+
+ # Mock node execution copy
+ node_exec_query = mocker.Mock()
+ node_exec_query.where.return_value.all.return_value = []
+
+ # Mocked session query side effects
+ mock_session.query.side_effect = [node_exec_query, dataset_query]
+
+ # 7. Run test
+ result = rag_pipeline_service.publish_workflow(session=mock_session, pipeline=pipeline, account=account)
+
+ # 8. Assertions
+ assert result == new_wf
+ # Note: dataset settings are updated via DatasetService now, so we can verify the call
+ mock_dataset_service_class.update_rag_pipeline_dataset_settings.assert_called_once()
+
+
+# --- run_datasource_workflow_node ---
+
+
+def test_run_datasource_workflow_node_website_crawl(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceProviderType
+
+ # 1. Setup workflow and node
+ pipeline = mocker.Mock()
+ pipeline.id = "p1"
+ pipeline.tenant_id = "t1"
+
+ workflow = mocker.Mock()
+ workflow.graph_dict = {
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "type": "datasource",
+ "plugin_id": "p-1",
+ "provider_name": "firecrawl",
+ "datasource_name": "website_crawl",
+ "datasource_parameters": {"url": {"value": "{{#start.url#}}"}},
+ },
+ }
+ ]
+ }
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ # 2. Mock DatasourceManager and Runtime
+ mock_runtime = mocker.Mock()
+ mock_runtime.datasource_provider_type.return_value = DatasourceProviderType.WEBSITE_CRAWL
+
+ # Mock the generator result for website crawl
+ def mock_crawl_gen(**kwargs):
+ yield mocker.Mock(result=mocker.Mock(status="processing", total=10, completed=2))
+ yield mocker.Mock(
+ result=mocker.Mock(status="completed", total=10, completed=10, web_info_list=[{"title": "test"}])
+ )
+
+ mock_runtime.get_website_crawl.side_effect = mock_crawl_gen
+
+ mocker.patch(
+ "core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime",
+ return_value=mock_runtime,
+ )
+
+ # 3. Mock DatasourceProviderService
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials",
+ return_value={"api_key": "sk-123"},
+ )
+
+ # 4. Mock Enums to avoid import issues or for consistency
+ mocker.patch("services.rag_pipeline.rag_pipeline.DatasourceProviderType", DatasourceProviderType)
+
+ # 5. Run test
+ gen = rag_pipeline_service.run_datasource_workflow_node(
+ pipeline=pipeline,
+ node_id="node-1",
+ user_inputs={"url": "https://example.com"},
+ account=mocker.Mock(id="u1"),
+ datasource_type="website_crawl",
+ is_published=True,
+ )
+
+ events = list(gen)
+
+ # 6. Assertions
+ assert len(events) == 2
+ assert events[0]["total"] == 10
+ assert events[0]["completed"] == 2
+ assert events[1]["data"] == [{"title": "test"}]
+ assert events[1]["total"] == 10
+ assert events[1]["completed"] == 10
+
+
+# --- run_datasource_node_preview ---
+
+
+def test_run_datasource_node_preview_online_document(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceMessage, DatasourceProviderType
+
+ # 1. Setup workflow and node
+ pipeline = mocker.Mock()
+ pipeline.id = "p1"
+ pipeline.tenant_id = "t1"
+
+ workflow = mocker.Mock()
+ workflow.graph_dict = {
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "type": "datasource",
+ "plugin_id": "p-1",
+ "provider_name": "notion",
+ "datasource_name": "online_document",
+ "datasource_parameters": {
+ "workspace_id": {"value": "ws-1"},
+ "page_id": {"value": "pg-1"},
+ "type": {"value": "page"},
+ },
+ },
+ }
+ ]
+ }
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ # 2. Mock Runtime and results
+ mock_runtime = mocker.Mock()
+
+ def mock_doc_gen(**kwargs):
+ # Yield a variable message
+ msg1 = DatasourceMessage(
+ type=DatasourceMessage.MessageType.VARIABLE,
+ message=DatasourceMessage.VariableMessage(variable_name="content", variable_value="Hello ", stream=True),
+ )
+ yield msg1
+ msg2 = DatasourceMessage(
+ type=DatasourceMessage.MessageType.VARIABLE,
+ message=DatasourceMessage.VariableMessage(variable_name="content", variable_value="World", stream=True),
+ )
+ yield msg2
+
+ mock_runtime.get_online_document_page_content.side_effect = mock_doc_gen
+ mocker.patch(
+ "core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime",
+ return_value=mock_runtime,
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials",
+ return_value={"token": "abc"},
+ )
+ mocker.patch("services.rag_pipeline.rag_pipeline.DatasourceProviderType", DatasourceProviderType)
+
+ # 3. Run test
+ result = rag_pipeline_service.run_datasource_node_preview(
+ pipeline=pipeline,
+ node_id="node-1",
+ user_inputs={},
+ account=mocker.Mock(id="u1"),
+ datasource_type="online_document",
+ is_published=True,
+ )
+
+ # 4. Assertions
+ assert result == {"content": "Hello World"}
+
+
+# --- _handle_node_run_result ---
+
+
+def test_handle_node_run_result_success(mocker, rag_pipeline_service) -> None:
+ from graphon.enums import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
+ from graphon.graph_events import NodeRunSucceededEvent
+ from graphon.node_events.base import NodeRunResult
+
+ # 1. Setup mock node and result
+ node_instance = mocker.Mock()
+ node_instance.workflow_id = "wf-1"
+ node_instance.node_type = "start"
+ node_instance.title = "Start"
+
+ node_run_result = NodeRunResult(
+ status=WorkflowNodeExecutionStatus.SUCCEEDED,
+ inputs={"q": "hi"},
+ outputs={"ans": "hello"},
+ metadata={WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 10},
+ )
+
+ def mock_getter():
+ event = NodeRunSucceededEvent(
+ id="event-1",
+ start_at=time.time(),
+ node_id="node-1",
+ node_type="start",
+ node_run_result=node_run_result,
+ route_node_id=None,
+ )
+ yield event
+
+ # 2. Run test
+ result = rag_pipeline_service._handle_node_run_result(
+ getter=lambda: (node_instance, mock_getter()), start_at=time.perf_counter(), tenant_id="t1", node_id="node-1"
+ )
+
+ # 3. Assertions
+ assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
+ assert result.inputs == {"q": "hi"}
+ assert result.outputs == {"ans": "hello"}
+ assert result.metadata == {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 10}
+
+
+# --- get_first_step_parameters / get_second_step_parameters ---
+
+
+def test_get_first_step_parameters_success(mocker, rag_pipeline_service) -> None:
+ # 1. Setup mock workflow
+ pipeline = mocker.Mock()
+ workflow = mocker.Mock()
+ workflow.graph_dict = {
+ "nodes": [{"id": "node-1", "data": {"datasource_parameters": {"url": {"value": "{{#start.url#}}"}}}}]
+ }
+ workflow.rag_pipeline_variables = [{"variable": "url", "label": "URL", "type": "string"}]
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ # 2. Run test
+ result = rag_pipeline_service.get_first_step_parameters(pipeline=pipeline, node_id="node-1", is_draft=False)
+
+ # 3. Assertions
+ assert len(result) == 1
+ assert result[0]["variable"] == "url"
+
+
+def test_get_second_step_parameters_success(mocker, rag_pipeline_service) -> None:
+ # 1. Setup mock workflow
+ pipeline = mocker.Mock()
+ workflow = mocker.Mock()
+ workflow.graph_dict = {
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {}, # Second step logic is slightly different in how it gets variables
+ }
+ ]
+ }
+ workflow.rag_pipeline_variables = [{"variable": "var1", "label": "Var 1"}]
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ # 2. Run test
+ result = rag_pipeline_service.get_second_step_parameters(pipeline=pipeline, node_id="node-1", is_draft=False)
+
+ # 3. Assertions
+ # Note: get_second_step_parameters also filters by variable names found in node data
+ # (Checking the code again, it seems to iterate through nodes but doesn't do much with variables yet)
+ # Wait, let me check the code for get_second_step_parameters again.
+ assert len(result) == 0 # Based on current implementation which seems to filter but no logic added yet?
+
+
+# --- publish_customized_pipeline_template ---
+
+
+def test_publish_customized_pipeline_template_success(mocker, rag_pipeline_service) -> None:
+ from models.dataset import Dataset, Pipeline, PipelineCustomizedTemplate
+ from models.workflow import Workflow
+
+ # 1. Setup mocks
+ pipeline = mocker.Mock(spec=Pipeline)
+ pipeline.id = "p1"
+ pipeline.tenant_id = "t1"
+ pipeline.workflow_id = "wf-1"
+ pipeline.is_published = True
+
+ workflow = mocker.Mock()
+ workflow.id = "wf-1"
+
+ # Mock db itself to avoid app context errors
+ mock_db = mocker.patch("services.rag_pipeline.rag_pipeline.db")
+
+ # Improved mocking for session.query
+ def mock_query_side_effect(model):
+ m = mocker.Mock()
+ if model == Pipeline:
+ m.where.return_value.first.return_value = pipeline
+ elif model == Workflow:
+ m.where.return_value.first.return_value = workflow
+ elif model == PipelineCustomizedTemplate:
+ m.where.return_value.first.return_value = None
+ elif model == Dataset:
+ m.where.return_value.first.return_value = mocker.Mock()
+ else:
+ # For func.max cases
+ m.where.return_value.scalar.return_value = 5
+ m.where.return_value.first.return_value = mocker.Mock()
+ return m
+
+ mock_db.session.query.side_effect = mock_query_side_effect
+
+ # Mock retrieve_dataset
+ dataset = mocker.Mock()
+ pipeline.retrieve_dataset.return_value = dataset
+
+ # Mock max position
+ mocker.patch("services.rag_pipeline.rag_pipeline.func.max", return_value=1)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.db.session.query.return_value.where.return_value.scalar",
+ return_value=5,
+ )
+
+ # Mock RagPipelineDslService
+ mock_dsl_service = mocker.Mock()
+ mock_dsl_service.export_rag_pipeline_dsl.return_value = {"dsl": "content"}
+ mocker.patch("services.rag_pipeline.rag_pipeline_dsl_service.RagPipelineDslService", return_value=mock_dsl_service)
+
+ # Mock Session and commit
+ mocker.patch("services.rag_pipeline.rag_pipeline.Session", return_value=mocker.MagicMock())
+
+ # Mock current_user
+ mock_user = mocker.Mock()
+ mock_user.id = "user-123"
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", mock_user)
+
+ # 2. Run test
+ args = {"name": "New Template", "description": "Desc", "icon_info": {"icon": "star"}, "tags": ["tag1"]}
+ rag_pipeline_service.publish_customized_pipeline_template("p1", args)
+
+ # 3. Assertions
+ # Verify a new template was added to session or similar?
+ # Since we can't easily check the session inside the context manager with Mock,
+ # we just check that no error was raised and DSL was exported.
+ mock_dsl_service.export_rag_pipeline_dsl.assert_called_once()
+
+
+# --- get_datasource_plugins ---
+
+
+def test_get_datasource_plugins_success(mocker, rag_pipeline_service) -> None:
+ from models.dataset import Dataset, Pipeline
+
+ # 1. Setup mocks
+ dataset = mocker.Mock(spec=Dataset)
+ dataset.pipeline_id = "p1"
+
+ pipeline = mocker.Mock(spec=Pipeline)
+ pipeline.id = "p1"
+
+ workflow = mocker.Mock()
+ workflow.graph_dict = {
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "type": "datasource",
+ "plugin_id": "p-1",
+ "provider_name": "notion",
+ "provider_type": "online_document",
+ "title": "Notion",
+ },
+ }
+ ]
+ }
+ workflow.rag_pipeline_variables = []
+
+ # Mock queries
+ mock_query = mocker.Mock()
+ mock_query.where.return_value.first.side_effect = [dataset, pipeline]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=mock_query)
+
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ # Mock DatasourceProviderService
+ mock_provider_service = mocker.Mock()
+ mock_provider_service.list_datasource_credentials.return_value = [
+ {"id": "c1", "name": "Cred 1", "type": "token", "is_default": True}
+ ]
+ mocker.patch("services.rag_pipeline.rag_pipeline.DatasourceProviderService", return_value=mock_provider_service)
+
+ # 2. Run test
+ result = rag_pipeline_service.get_datasource_plugins("t1", "d1", True)
+
+ # 3. Assertions
+ assert len(result) == 1
+ assert result[0]["node_id"] == "node-1"
+ assert result[0]["credentials"][0]["id"] == "c1"
+
+
+# --- retry_error_document ---
+
+
+def test_retry_error_document_success(mocker, rag_pipeline_service) -> None:
+ from models.dataset import Document, DocumentPipelineExecutionLog, Pipeline
+
+ # 1. Setup mocks
+ dataset = mocker.Mock()
+ document = mocker.Mock(spec=Document)
+ document.id = "doc-1"
+
+ log = mocker.Mock(spec=DocumentPipelineExecutionLog)
+ log.pipeline_id = "p-1"
+ log.datasource_info = "{}" # Ensure it's a string if it's used as JSON later
+
+ pipeline = mocker.Mock(spec=Pipeline)
+ pipeline.id = "p-1"
+
+ workflow = mocker.Mock()
+
+ # Mock queries
+ mock_query = mocker.Mock()
+ # Log lookup, then Pipeline lookup
+ mock_query.where.return_value.first.side_effect = [log, pipeline]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=mock_query)
+
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ # Mock PipelineGenerator
+ mock_gen_instance = mocker.Mock()
+ mocker.patch("services.rag_pipeline.rag_pipeline.PipelineGenerator", return_value=mock_gen_instance)
+
+ # 2. Run test
+ user = mocker.Mock()
+ rag_pipeline_service.retry_error_document(dataset, document, user)
+
+ # 3. Assertions
+ mock_gen_instance.generate.assert_called_once()
+
+
+# --- set_datasource_variables ---
+
+
+def test_set_datasource_variables_success(mocker, rag_pipeline_service) -> None:
+ from graphon.entities.workflow_node_execution import WorkflowNodeExecution
+
+ from models.dataset import Pipeline
+
+ # 1. Setup mocks
+ # Mock db aggressively
+ mock_db = mocker.patch("services.rag_pipeline.rag_pipeline.db")
+ mock_db.engine = mocker.Mock()
+ mock_db.session.query.return_value.where.return_value.first.return_value = mocker.Mock()
+
+ pipeline = mocker.Mock(spec=Pipeline)
+ pipeline.id = "p-1"
+ pipeline.tenant_id = "t1"
+
+ draft_wf = mocker.Mock()
+ draft_wf.id = "wf-1"
+ draft_wf.get_enclosing_node_type_and_id.return_value = None # Avoid unpacking error
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=draft_wf)
+
+ execution = mocker.Mock(spec=WorkflowNodeExecution)
+ execution.id = "exec-1"
+ execution.process_data = {}
+ execution.inputs = {}
+ execution.outputs = {}
+ mocker.patch.object(rag_pipeline_service, "_handle_node_run_result", return_value=execution)
+
+ # Mock Repository
+ mock_repo_instance = mocker.Mock()
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.SQLAlchemyWorkflowNodeExecutionRepository",
+ return_value=mock_repo_instance,
+ )
+ # Repository._to_db_model is also called
+ mock_db_exec = mocker.Mock()
+ mock_db_exec.node_id = "node-1"
+ mock_db_exec.node_type = "datasource"
+ mock_repo_instance._to_db_model.return_value = mock_db_exec
+
+ # Mock Session and begin
+ mocker.patch("services.rag_pipeline.rag_pipeline.Session", return_value=mocker.MagicMock())
+
+ # Mock DraftVariableSaver
+ mock_saver_instance = mocker.Mock()
+ mocker.patch("services.rag_pipeline.rag_pipeline.DraftVariableSaver", return_value=mock_saver_instance)
+
+ # 2. Run test
+ args = {"start_node_id": "node-1"}
+ user = mocker.Mock()
+ user.id = "user-1"
+ rag_pipeline_service.set_datasource_variables(pipeline, args, user)
+
+ # 3. Assertions
+ mock_repo_instance.save.assert_called_once()
+ mock_saver_instance.save.assert_called_once()
+
+
+# --- Utility Methods ---
+
+
+def test_get_draft_workflow_success(mocker, rag_pipeline_service) -> None:
+ from models.dataset import Pipeline
+ from models.workflow import Workflow
+
+ # 1. Setup mocks
+ pipeline = mocker.Mock(spec=Pipeline)
+ pipeline.id = "p1"
+ pipeline.tenant_id = "t1"
+
+ workflow = mocker.Mock(spec=Workflow)
+
+ mock_db = mocker.patch("services.rag_pipeline.rag_pipeline.db")
+ mock_db.session.query.return_value.where.return_value.first.return_value = workflow
+
+ # 2. Run test
+ result = rag_pipeline_service.get_draft_workflow(pipeline)
+
+ # 3. Assertions
+ assert result == workflow
+
+
+def test_get_published_workflow_success(mocker, rag_pipeline_service) -> None:
+ from models.dataset import Pipeline
+ from models.workflow import Workflow
+
+ # 1. Setup mocks
+ pipeline = mocker.Mock(spec=Pipeline)
+ pipeline.id = "p1"
+ pipeline.tenant_id = "t1"
+ pipeline.workflow_id = "wf-pub"
+
+ workflow = mocker.Mock(spec=Workflow)
+
+ mock_db = mocker.patch("services.rag_pipeline.rag_pipeline.db")
+ mock_db.session.query.return_value.where.return_value.first.return_value = workflow
+
+ # 2. Run test
+ result = rag_pipeline_service.get_published_workflow(pipeline)
+
+ # 3. Assertions
+ assert result == workflow
+
+
+def test_get_default_block_configs_success(rag_pipeline_service) -> None:
+ # This calls static methods on node classes, should be safe with default mocks or as-is
+ # unless they access db.
+ result = rag_pipeline_service.get_default_block_configs()
+ assert isinstance(result, list)
+ assert len(result) > 0
+
+
+def test_get_default_block_config_success(rag_pipeline_service) -> None:
+ from graphon.enums import BuiltinNodeTypes
+
+ result = rag_pipeline_service.get_default_block_config(BuiltinNodeTypes.LLM)
+ assert result is not None
+ assert result["type"] == "llm"
+
+
+def test_publish_workflow_raises_when_draft_workflow_missing(mocker, rag_pipeline_service) -> None:
+ session = mocker.Mock()
+ session.scalar.return_value = None
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ account = SimpleNamespace(id="u1")
+
+ with pytest.raises(ValueError, match="No valid workflow found"):
+ rag_pipeline_service.publish_workflow(session=session, pipeline=pipeline, account=account)
+
+
+def test_get_default_block_config_returns_none_when_mapped_type_missing(mocker, rag_pipeline_service) -> None:
+ from graphon.enums import BuiltinNodeTypes
+
+ mocker.patch("services.rag_pipeline.rag_pipeline.get_node_type_classes_mapping", return_value={})
+
+ assert rag_pipeline_service.get_default_block_config(BuiltinNodeTypes.START) is None
+
+
+def test_get_default_block_config_injects_http_request_filter(mocker, rag_pipeline_service) -> None:
+ from graphon.enums import BuiltinNodeTypes
+
+ fake_node_cls = mocker.Mock()
+ fake_node_cls.get_default_config.return_value = {"type": "http-request"}
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.get_node_type_classes_mapping",
+ return_value={BuiltinNodeTypes.HTTP_REQUEST: {"1": fake_node_cls}},
+ )
+ mocker.patch("services.rag_pipeline.rag_pipeline.LATEST_VERSION", "1")
+
+ rag_pipeline_service.get_default_block_config(BuiltinNodeTypes.HTTP_REQUEST)
+
+ called_filters = fake_node_cls.get_default_config.call_args.kwargs["filters"]
+ assert "http_request_config" in called_filters
+
+
+def test_run_draft_workflow_node_raises_when_workflow_missing(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ account = SimpleNamespace(id="u1")
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=None)
+
+ with pytest.raises(ValueError, match="Workflow not initialized"):
+ rag_pipeline_service.run_draft_workflow_node(pipeline, "node-1", {}, account)
+
+
+def test_run_draft_workflow_node_saves_execution_and_variables(mocker, rag_pipeline_service) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline.db", mocker.Mock(engine=mocker.Mock()))
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ account = SimpleNamespace(id="u1")
+ draft_workflow = mocker.Mock(id="wf-1")
+ draft_workflow.get_node_config_by_id.return_value = {"id": "node-1"}
+ draft_workflow.get_enclosing_node_type_and_id.return_value = ("loop", "enclosing-node")
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=draft_workflow)
+
+ execution = SimpleNamespace(id="exec-1", node_id="node-1", node_type="llm", process_data={}, outputs={})
+ mocker.patch.object(rag_pipeline_service, "_handle_node_run_result", return_value=execution)
+
+ repo = mocker.Mock()
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DifyCoreRepositoryFactory.create_workflow_node_execution_repository",
+ return_value=repo,
+ )
+ rag_pipeline_service._node_execution_service_repo = mocker.Mock(get_execution_by_id=mocker.Mock(return_value="db"))
+ saver = mocker.Mock()
+ mocker.patch("services.rag_pipeline.rag_pipeline.DraftVariableSaver", return_value=saver)
+
+ session_ctx = mocker.MagicMock()
+ begin_ctx = mocker.MagicMock()
+ session_ctx.begin.return_value = begin_ctx
+ mocker.patch("services.rag_pipeline.rag_pipeline.Session", return_value=session_ctx)
+
+ result = rag_pipeline_service.run_draft_workflow_node(pipeline, "node-1", {"q": "x"}, account)
+
+ assert result == "db"
+ assert execution.workflow_id == "wf-1"
+ repo.save.assert_called_once_with(execution)
+ saver.save.assert_called_once()
+
+
+def test_run_datasource_workflow_node_returns_error_when_workflow_missing(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=None)
+
+ events = list(
+ rag_pipeline_service.run_datasource_workflow_node(
+ pipeline=pipeline,
+ node_id="node-1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=False,
+ )
+ )
+
+ assert events[0]["event"] == "datasource_error"
+
+
+def test_run_datasource_workflow_node_online_document_success(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceProviderType
+
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = mocker.Mock()
+ workflow.graph_dict = {
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "type": "datasource",
+ "plugin_id": "pid",
+ "provider_name": "notion",
+ "datasource_name": "online_document",
+ "datasource_parameters": {"workspace_id": {"value": None}, "page_id": {"value": "fixed"}},
+ },
+ }
+ ]
+ }
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ runtime = mocker.Mock()
+ runtime.runtime = SimpleNamespace(credentials=None)
+ runtime.datasource_provider_type.return_value = DatasourceProviderType.ONLINE_DOCUMENT
+ runtime.get_online_document_pages.return_value = [SimpleNamespace(result=[{"id": "pg-1"}])]
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials",
+ return_value={"token": "x"},
+ )
+
+ events = list(
+ rag_pipeline_service.run_datasource_workflow_node(
+ pipeline=pipeline,
+ node_id="node-1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type=DatasourceProviderType.ONLINE_DOCUMENT,
+ is_published=True,
+ )
+ )
+
+ assert events[0]["event"] == "datasource_processing"
+ assert events[1]["event"] == "datasource_completed"
+
+
+def test_run_datasource_workflow_node_online_drive_success(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceProviderType
+
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = mocker.Mock()
+ workflow.graph_dict = {
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "type": "datasource",
+ "plugin_id": "pid",
+ "provider_name": "drive",
+ "datasource_name": "online_drive",
+ "datasource_parameters": {"bucket": {"value": "bucket-1"}, "next_page_parameters": {"value": []}},
+ },
+ }
+ ]
+ }
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ runtime = mocker.Mock()
+ runtime.runtime = SimpleNamespace(credentials=None)
+ runtime.datasource_provider_type.return_value = DatasourceProviderType.ONLINE_DRIVE
+ runtime.online_drive_browse_files.return_value = [SimpleNamespace(result=[{"name": "f1"}])]
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials",
+ return_value={},
+ )
+
+ events = list(
+ rag_pipeline_service.run_datasource_workflow_node(
+ pipeline=pipeline,
+ node_id="node-1",
+ user_inputs={"bucket": "bucket-1"},
+ account=SimpleNamespace(id="u1"),
+ datasource_type=DatasourceProviderType.ONLINE_DRIVE,
+ is_published=True,
+ )
+ )
+
+ assert events[0]["event"] == "datasource_processing"
+ assert events[1]["event"] == "datasource_completed"
+
+
+def test_handle_node_run_result_default_value_strategy(mocker, rag_pipeline_service) -> None:
+ from datetime import datetime
+
+ from graphon.enums import BuiltinNodeTypes, ErrorStrategy, WorkflowNodeExecutionStatus
+ from graphon.graph_events import NodeRunFailedEvent
+ from graphon.node_events.base import NodeRunResult
+
+ node_instance = SimpleNamespace(
+ workflow_id="wf-1",
+ node_type=BuiltinNodeTypes.START,
+ title="Start",
+ error_strategy=ErrorStrategy.DEFAULT_VALUE,
+ default_value_dict={"fallback": "ok"},
+ graph_runtime_state=SimpleNamespace(variable_pool=mocker.Mock()),
+ )
+
+ failed_result = NodeRunResult(
+ status=WorkflowNodeExecutionStatus.FAILED,
+ error="boom",
+ error_type="runtime_error",
+ inputs={"x": 1},
+ )
+
+ def _events():
+ yield NodeRunFailedEvent(
+ id="e-1",
+ node_id="node-1",
+ node_type=BuiltinNodeTypes.START,
+ start_at=datetime.now(),
+ error="boom",
+ node_run_result=failed_result,
+ )
+
+ result = rag_pipeline_service._handle_node_run_result(
+ getter=lambda: (node_instance, _events()),
+ start_at=time.perf_counter(),
+ tenant_id="t1",
+ node_id="node-1",
+ )
+
+ assert result.status == WorkflowNodeExecutionStatus.EXCEPTION
+ assert result.outputs
+ assert result.outputs["fallback"] == "ok"
+
+
+def test_get_first_step_parameters_raises_when_datasource_node_missing(mocker, rag_pipeline_service) -> None:
+ workflow = SimpleNamespace(graph_dict={"nodes": []}, rag_pipeline_variables=[{"variable": "url"}])
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ with pytest.raises(ValueError, match="Datasource node data not found"):
+ rag_pipeline_service.get_first_step_parameters(SimpleNamespace(), "missing-node")
+
+
+def test_get_second_step_parameters_handles_string_and_list_variable_references(mocker, rag_pipeline_service) -> None:
+ workflow = SimpleNamespace(
+ rag_pipeline_variables=[
+ {"variable": "url", "belong_to_node_id": "node-1"},
+ {"variable": "bucket", "belong_to_node_id": "shared"},
+ {"variable": "keep", "belong_to_node_id": "node-1"},
+ ],
+ graph_dict={
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "datasource_parameters": {
+ "u": {"value": "{{#start.url#}}"},
+ "b": {"value": ["start", "bucket"]},
+ }
+ },
+ }
+ ]
+ },
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ result = rag_pipeline_service.get_second_step_parameters(SimpleNamespace(), "node-1")
+
+ assert result == [{"variable": "keep", "belong_to_node_id": "node-1"}]
+
+
+def test_get_rag_pipeline_workflow_run_node_executions_empty_when_run_missing(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ mocker.patch.object(rag_pipeline_service, "get_rag_pipeline_workflow_run", return_value=None)
+
+ result = rag_pipeline_service.get_rag_pipeline_workflow_run_node_executions(
+ pipeline=pipeline, run_id="run-1", user=SimpleNamespace(id="u1")
+ )
+
+ assert result == []
+
+
+def test_get_rag_pipeline_workflow_run_node_executions_returns_sorted_executions(mocker, rag_pipeline_service) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline.db", mocker.Mock(engine=mocker.Mock()))
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ mocker.patch.object(rag_pipeline_service, "get_rag_pipeline_workflow_run", return_value=SimpleNamespace(id="run-1"))
+ repo = mocker.Mock()
+ repo.get_db_models_by_workflow_run.return_value = ["n1", "n2"]
+ mocker.patch("services.rag_pipeline.rag_pipeline.SQLAlchemyWorkflowNodeExecutionRepository", return_value=repo)
+
+ result = rag_pipeline_service.get_rag_pipeline_workflow_run_node_executions(
+ pipeline=pipeline, run_id="run-1", user=SimpleNamespace(id="u1")
+ )
+
+ assert result == ["n1", "n2"]
+
+
+def test_get_recommended_plugins_returns_empty_when_no_active_plugins(mocker, rag_pipeline_service) -> None:
+ query = mocker.Mock()
+ query.where.return_value = query
+ query.order_by.return_value.all.return_value = []
+ mock_db = mocker.patch("services.rag_pipeline.rag_pipeline.db")
+ mock_db.session.query.return_value = query
+
+ result = rag_pipeline_service.get_recommended_plugins("all")
+
+ assert result == {
+ "installed_recommended_plugins": [],
+ "uninstalled_recommended_plugins": [],
+ }
+
+
+def test_get_recommended_plugins_returns_installed_and_uninstalled(mocker, rag_pipeline_service) -> None:
+ plugin_a = SimpleNamespace(plugin_id="plugin-a")
+ plugin_b = SimpleNamespace(plugin_id="plugin-b")
+ query = mocker.Mock()
+ query.where.return_value = query
+ query.order_by.return_value.all.return_value = [plugin_a, plugin_b]
+ mock_db = mocker.patch("services.rag_pipeline.rag_pipeline.db")
+ mock_db.session.query.return_value = query
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", SimpleNamespace(id="u1", current_tenant_id="t1"))
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.BuiltinToolManageService.list_builtin_tools",
+ return_value=[SimpleNamespace(plugin_id="plugin-a", to_dict=lambda: {"plugin_id": "plugin-a"})],
+ )
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.marketplace.batch_fetch_plugin_by_ids",
+ return_value=[{"plugin_id": "plugin-b", "name": "Plugin B"}],
+ )
+
+ result = rag_pipeline_service.get_recommended_plugins("custom")
+
+ assert result["installed_recommended_plugins"] == [{"plugin_id": "plugin-a"}]
+ assert result["uninstalled_recommended_plugins"] == [{"plugin_id": "plugin-b", "name": "Plugin B"}]
+
+
+def test_get_node_last_run_delegates_to_repository(mocker, rag_pipeline_service) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline.db", mocker.Mock(engine=mocker.Mock()))
+ repo = mocker.Mock()
+ repo.get_node_last_execution.return_value = "node-exec"
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository",
+ return_value=repo,
+ )
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = SimpleNamespace(id="wf1")
+
+ result = rag_pipeline_service.get_node_last_run(pipeline, workflow, "node-1")
+
+ assert result == "node-exec"
+
+
+def test_set_datasource_variables_raises_when_node_id_missing(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = mocker.Mock()
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=workflow)
+
+ with pytest.raises(ValueError, match="Node id is required"):
+ rag_pipeline_service.set_datasource_variables(pipeline, {"start_node_id": ""}, SimpleNamespace(id="u1"))
+
+
+def test_get_default_block_configs_skips_empty_configs(mocker, rag_pipeline_service) -> None:
+ from graphon.enums import BuiltinNodeTypes
+
+ http_node = mocker.Mock()
+ http_node.get_default_config.return_value = {"type": "http-request"}
+ empty_node = mocker.Mock()
+ empty_node.get_default_config.return_value = None
+
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.get_node_type_classes_mapping",
+ return_value={
+ BuiltinNodeTypes.HTTP_REQUEST: {"1": http_node},
+ BuiltinNodeTypes.START: {"1": empty_node},
+ },
+ )
+ mocker.patch("services.rag_pipeline.rag_pipeline.LATEST_VERSION", "1")
+
+ result = rag_pipeline_service.get_default_block_configs()
+
+ assert result == [{"type": "http-request"}]
+ http_node.get_default_config.assert_called_once()
+ empty_node.get_default_config.assert_called_once()
+
+
+def test_run_datasource_workflow_node_returns_error_when_node_missing(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = SimpleNamespace(graph_dict={"nodes": []})
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ events = list(
+ rag_pipeline_service.run_datasource_workflow_node(
+ pipeline=pipeline,
+ node_id="missing-node",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=True,
+ )
+ )
+
+ assert len(events) == 1
+ assert "Datasource node data not found" in events[0]["error"]
+
+
+def test_run_datasource_workflow_node_online_document_exception(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "plugin_id": "plugin-1",
+ "provider_name": "provider-1",
+ "datasource_name": "doc",
+ "datasource_parameters": {},
+ },
+ }
+ ]
+ }
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ runtime = mocker.Mock()
+
+ class _FailingIterator:
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ raise RuntimeError("doc failed")
+
+ runtime.get_online_document_pages.return_value = _FailingIterator()
+ runtime.datasource_provider_type.return_value = "online_document"
+
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials", return_value=None
+ )
+
+ events = list(
+ rag_pipeline_service.run_datasource_workflow_node(
+ pipeline=pipeline,
+ node_id="node-1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=True,
+ )
+ )
+
+ assert len(events) == 2
+ assert events[0]["event"] == "datasource_processing"
+ assert "doc failed" in events[1]["error"]
+
+
+def test_run_datasource_node_preview_raises_for_stream_non_string(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceMessage
+
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "plugin_id": "plugin-1",
+ "provider_name": "provider-1",
+ "datasource_name": "doc",
+ "datasource_parameters": {},
+ },
+ }
+ ]
+ }
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ runtime = mocker.Mock()
+
+ def _bad_stream_generator(*args, **kwargs):
+ yield DatasourceMessage(
+ type=DatasourceMessage.MessageType.VARIABLE,
+ message=DatasourceMessage.VariableMessage(variable_name="content", variable_value=1, stream=True),
+ )
+
+ runtime.get_online_document_page_content.side_effect = _bad_stream_generator
+ runtime.datasource_provider_type.return_value = "online_document"
+
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials", return_value=None
+ )
+
+ with pytest.raises(RuntimeError, match="must be a string"):
+ rag_pipeline_service.run_datasource_node_preview(
+ pipeline=pipeline,
+ node_id="node-1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=True,
+ )
+
+
+def test_get_first_step_parameters_returns_empty_when_no_rag_variables(mocker, rag_pipeline_service) -> None:
+ workflow = SimpleNamespace(
+ graph_dict={"nodes": [{"id": "node-1", "data": {"datasource_parameters": {"url": {"value": "literal"}}}}]},
+ rag_pipeline_variables=[],
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ result = rag_pipeline_service.get_first_step_parameters(SimpleNamespace(), "node-1")
+
+ assert result == []
+
+
+def test_get_second_step_parameters_filters_first_step_variables(mocker, rag_pipeline_service) -> None:
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "datasource_parameters": {
+ "workspace": {"value": "{{#start.workspace#}}"},
+ "bucket": {"value": ["input", "bucket"]},
+ }
+ },
+ }
+ ]
+ },
+ rag_pipeline_variables=[
+ {"variable": "workspace", "belong_to_node_id": "shared"},
+ {"variable": "bucket", "belong_to_node_id": "shared"},
+ {"variable": "keep", "belong_to_node_id": "shared"},
+ {"variable": "other-node", "belong_to_node_id": "node-x"},
+ ],
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ result = rag_pipeline_service.get_second_step_parameters(SimpleNamespace(), "node-1")
+
+ assert result == [{"variable": "keep", "belong_to_node_id": "shared"}]
+
+
+def test_retry_error_document_raises_when_execution_log_not_found(mocker, rag_pipeline_service) -> None:
+ query = mocker.Mock()
+ query.where.return_value.first.return_value = None
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+
+ with pytest.raises(ValueError, match="Document pipeline execution log not found"):
+ rag_pipeline_service.retry_error_document(
+ SimpleNamespace(), SimpleNamespace(id="doc-1"), SimpleNamespace(id="u1")
+ )
+
+
+def test_get_datasource_plugins_raises_when_workflow_not_found(mocker, rag_pipeline_service) -> None:
+ dataset = SimpleNamespace(pipeline_id="p1")
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [dataset, pipeline]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=None)
+
+ with pytest.raises(ValueError, match="Pipeline or workflow not found"):
+ rag_pipeline_service.get_datasource_plugins("t1", "d1", True)
+
+
+def test_handle_node_run_result_raises_when_no_terminal_event(mocker, rag_pipeline_service) -> None:
+ node_instance = SimpleNamespace(
+ workflow_id="wf-1",
+ node_type="start",
+ title="Start",
+ graph_runtime_state=SimpleNamespace(variable_pool=SimpleNamespace(get=lambda _: None)),
+ error_strategy=None,
+ )
+
+ def _event_generator():
+ yield object()
+
+ with pytest.raises(ValueError, match="Node run failed with no run result"):
+ rag_pipeline_service._handle_node_run_result(
+ getter=lambda: (node_instance, _event_generator()),
+ start_at=time.perf_counter(),
+ tenant_id="t1",
+ node_id="node-1",
+ )
+
+
+def test_handle_node_run_result_marks_document_error_for_published_invoke(mocker, rag_pipeline_service) -> None:
+ from graphon.enums import WorkflowNodeExecutionStatus
+ from graphon.graph_events import NodeRunFailedEvent
+ from graphon.node_events.base import NodeRunResult
+
+ from core.app.entities.app_invoke_entities import InvokeFrom
+
+ class FakeVariablePool:
+ def __init__(self):
+ self._values = {
+ ("sys", "invoke_from"): SimpleNamespace(value=InvokeFrom.PUBLISHED_PIPELINE),
+ ("sys", "document_id"): SimpleNamespace(value="doc-1"),
+ }
+
+ def get(self, path):
+ return self._values.get(tuple(path))
+
+ node_instance = SimpleNamespace(
+ workflow_id="wf-1",
+ node_type="start",
+ title="Start",
+ graph_runtime_state=SimpleNamespace(variable_pool=FakeVariablePool()),
+ error_strategy=None,
+ )
+ run_result = NodeRunResult(
+ status=WorkflowNodeExecutionStatus.FAILED,
+ error="boom",
+ error_type="runtime",
+ inputs={},
+ outputs={},
+ )
+
+ def _event_generator():
+ yield NodeRunFailedEvent(
+ id="evt-1",
+ start_at=time.time(),
+ node_id="node-1",
+ node_type="start",
+ node_run_result=run_result,
+ error="boom",
+ route_node_id=None,
+ )
+
+ document = SimpleNamespace(indexing_status="waiting", error=None)
+ query = mocker.Mock()
+ query.where.return_value.first.return_value = document
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+ add_mock = mocker.patch("services.rag_pipeline.rag_pipeline.db.session.add")
+ commit_mock = mocker.patch("services.rag_pipeline.rag_pipeline.db.session.commit")
+
+ result = rag_pipeline_service._handle_node_run_result(
+ getter=lambda: (node_instance, _event_generator()),
+ start_at=time.perf_counter(),
+ tenant_id="t1",
+ node_id="node-1",
+ )
+
+ assert result.status == WorkflowNodeExecutionStatus.FAILED
+ assert document.indexing_status == "error"
+ assert document.error == "boom"
+ add_mock.assert_called_once_with(document)
+ commit_mock.assert_called_once()
+
+
+def test_run_datasource_node_preview_raises_for_unsupported_provider(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "plugin_id": "plugin-1",
+ "provider_name": "provider-1",
+ "datasource_name": "doc",
+ "datasource_parameters": {},
+ },
+ }
+ ]
+ }
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+ runtime = mocker.Mock()
+ runtime.datasource_provider_type.return_value = "unsupported"
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials", return_value=None
+ )
+
+ with pytest.raises(RuntimeError, match="Unsupported datasource provider"):
+ rag_pipeline_service.run_datasource_node_preview(
+ pipeline=pipeline,
+ node_id="node-1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="website_crawl",
+ is_published=True,
+ )
+
+
+def test_publish_customized_pipeline_template_raises_for_missing_pipeline(mocker, rag_pipeline_service) -> None:
+ query = mocker.Mock()
+ query.where.return_value.first.return_value = None
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+
+ with pytest.raises(ValueError, match="Pipeline not found"):
+ rag_pipeline_service.publish_customized_pipeline_template("p1", {})
+
+
+def test_publish_customized_pipeline_template_raises_for_missing_workflow_id(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1", workflow_id=None)
+ query = mocker.Mock()
+ query.where.return_value.first.return_value = pipeline
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+
+ with pytest.raises(ValueError, match="Pipeline workflow not found"):
+ rag_pipeline_service.publish_customized_pipeline_template("p1", {"name": "template-name"})
+
+
+def test_get_pipeline_raises_when_dataset_missing(mocker, rag_pipeline_service) -> None:
+ query = mocker.Mock()
+ query.where.return_value.first.return_value = None
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+
+ with pytest.raises(ValueError, match="Dataset not found"):
+ rag_pipeline_service.get_pipeline("t1", "d1")
+
+
+def test_get_pipeline_raises_when_pipeline_missing(mocker, rag_pipeline_service) -> None:
+ dataset = SimpleNamespace(pipeline_id="p1")
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [dataset, None]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+
+ with pytest.raises(ValueError, match="Pipeline not found"):
+ rag_pipeline_service.get_pipeline("t1", "d1")
+
+
+def test_init_uses_default_sessionmaker_when_none(mocker) -> None:
+ default_session_maker = mocker.Mock()
+ mocker.patch("services.rag_pipeline.rag_pipeline.sessionmaker", return_value=default_session_maker)
+ mocker.patch("services.rag_pipeline.rag_pipeline.db", SimpleNamespace(engine=mocker.Mock()))
+ create_exec_repo = mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository"
+ )
+ create_run_repo = mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DifyAPIRepositoryFactory.create_api_workflow_run_repository"
+ )
+
+ RagPipelineService(session_maker=None)
+
+ create_exec_repo.assert_called_once_with(default_session_maker)
+ create_run_repo.assert_called_once_with(default_session_maker)
+
+
+def test_get_pipeline_templates_builtin_en_us_no_fallback(mocker) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline.dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_MODE", "remote")
+ retrieval = mocker.Mock()
+ retrieval.get_pipeline_templates.return_value = {"pipeline_templates": []}
+ factory = mocker.patch("services.rag_pipeline.rag_pipeline.PipelineTemplateRetrievalFactory")
+ factory.get_pipeline_template_factory.return_value.return_value = retrieval
+ builtin = factory.get_built_in_pipeline_template_retrieval.return_value
+
+ result = RagPipelineService.get_pipeline_templates(type="built-in", language="en-US")
+
+ assert result == {"pipeline_templates": []}
+ builtin.fetch_pipeline_templates_from_builtin.assert_not_called()
+
+
+def test_update_customized_pipeline_template_commits_when_name_empty(mocker) -> None:
+ template = SimpleNamespace(name="old", description="old", icon={}, updated_by=None)
+ query = mocker.Mock()
+ query.where.return_value.first.return_value = template
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+ commit = mocker.patch("services.rag_pipeline.rag_pipeline.db.session.commit")
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", SimpleNamespace(id="u1", current_tenant_id="t1"))
+
+ info = PipelineTemplateInfoEntity(name="", description="updated", icon_info=IconInfo(icon="i"))
+ result = RagPipelineService.update_customized_pipeline_template("tpl-1", info)
+
+ assert result.description == "updated"
+ commit.assert_called_once()
+
+
+def test_get_all_published_workflow_without_filters_has_no_more(rag_pipeline_service) -> None:
+ session = SimpleNamespace(scalars=lambda stmt: SimpleNamespace(all=lambda: ["wf1"]))
+ pipeline = SimpleNamespace(id="p1", workflow_id="wf-live")
+
+ workflows, has_more = rag_pipeline_service.get_all_published_workflow(
+ session=session,
+ pipeline=pipeline,
+ page=1,
+ limit=2,
+ user_id=None,
+ named_only=False,
+ )
+
+ assert workflows == ["wf1"]
+ assert has_more is False
+
+
+def test_publish_workflow_skips_dataset_update_for_non_knowledge_nodes(mocker, rag_pipeline_service) -> None:
+ draft = SimpleNamespace(
+ type="workflow",
+ graph={"nodes": [{"data": {"type": "start"}}]},
+ features={},
+ environment_variables=[],
+ conversation_variables=[],
+ rag_pipeline_variables=[],
+ )
+ session = mocker.Mock()
+ session.scalar.return_value = draft
+ published = SimpleNamespace(graph_dict={"nodes": [{"data": {"type": "start"}}]})
+ mocker.patch("services.rag_pipeline.rag_pipeline.select")
+ mocker.patch("services.rag_pipeline.rag_pipeline.Workflow.new", return_value=published)
+
+ result = rag_pipeline_service.publish_workflow(
+ session=session,
+ pipeline=SimpleNamespace(id="p1", tenant_id="t1", is_published=False, retrieve_dataset=lambda session: None),
+ account=SimpleNamespace(id="u1"),
+ )
+
+ assert result is published
+
+
+def test_get_default_block_config_returns_none_when_default_empty(mocker, rag_pipeline_service) -> None:
+ from graphon.enums import BuiltinNodeTypes
+
+ node_cls = mocker.Mock()
+ node_cls.get_default_config.return_value = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.get_node_type_classes_mapping",
+ return_value={BuiltinNodeTypes.START: {"1": node_cls}},
+ )
+ mocker.patch("services.rag_pipeline.rag_pipeline.LATEST_VERSION", "1")
+
+ assert rag_pipeline_service.get_default_block_config("start") is None
+
+
+def test_run_datasource_workflow_node_handles_variable_parameter_types(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceProviderType
+
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "plugin_id": "p",
+ "provider_name": "provider",
+ "datasource_name": "crawl",
+ "datasource_parameters": {
+ "a": {"value": None},
+ "b": {"value": "literal"},
+ "c": {"value": ["input", "k"]},
+ },
+ },
+ }
+ ]
+ }
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+ runtime = mocker.Mock()
+
+ def crawl_gen(**kwargs):
+ yield SimpleNamespace(result=SimpleNamespace(status="completed", total=1, completed=1, web_info_list=[]))
+
+ runtime.get_website_crawl.side_effect = crawl_gen
+ runtime.datasource_provider_type.return_value = DatasourceProviderType.WEBSITE_CRAWL
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials", return_value=None
+ )
+
+ events = list(
+ rag_pipeline_service.run_datasource_workflow_node(
+ pipeline=SimpleNamespace(id="p1", tenant_id="t1"),
+ node_id="node-1",
+ user_inputs={"k": "mapped"},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="website_crawl",
+ is_published=True,
+ )
+ )
+
+ assert events
+ assert events[0]["data"] == []
+
+
+def test_run_datasource_workflow_node_online_drive_branch(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceProviderType
+
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "node-1",
+ "data": {
+ "plugin_id": "p",
+ "provider_name": "provider",
+ "datasource_name": "drive",
+ "datasource_parameters": {},
+ },
+ }
+ ]
+ }
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+ runtime = mocker.Mock()
+
+ def drive_gen(**kwargs):
+ yield SimpleNamespace(result={"items": [1]})
+
+ runtime.online_drive_browse_files.side_effect = drive_gen
+ runtime.datasource_provider_type.return_value = DatasourceProviderType.ONLINE_DRIVE
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials", return_value=None
+ )
+
+ events = list(
+ rag_pipeline_service.run_datasource_workflow_node(
+ pipeline=SimpleNamespace(id="p1", tenant_id="t1"),
+ node_id="node-1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_drive",
+ is_published=True,
+ )
+ )
+
+ assert len(events) == 2
+ assert events[1]["data"] == {"items": [1]}
+
+
+def test_run_datasource_node_preview_not_published_uses_draft(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceMessage
+
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "n1",
+ "data": {
+ "plugin_id": "p",
+ "provider_name": "provider",
+ "datasource_name": "doc",
+ "datasource_parameters": {"workspace_id": {"value": "w"}},
+ },
+ }
+ ]
+ }
+ )
+ get_draft = mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=workflow)
+ runtime = mocker.Mock()
+
+ def doc_gen(**kwargs):
+ yield DatasourceMessage(
+ type=DatasourceMessage.MessageType.VARIABLE,
+ message=DatasourceMessage.VariableMessage(variable_name="x", variable_value="v", stream=False),
+ )
+
+ runtime.get_online_document_page_content.side_effect = doc_gen
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials", return_value=None
+ )
+
+ result = rag_pipeline_service.run_datasource_node_preview(
+ pipeline=SimpleNamespace(id="p1", tenant_id="t1"),
+ node_id="n1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=False,
+ )
+
+ assert result == {"x": "v"}
+ get_draft.assert_called_once()
+
+
+def test_run_free_workflow_node_delegates_to_handle_result(mocker, rag_pipeline_service) -> None:
+ expected = SimpleNamespace(id="exec-1")
+ handle = mocker.patch.object(rag_pipeline_service, "_handle_node_run_result", return_value=expected)
+
+ result = rag_pipeline_service.run_free_workflow_node(
+ node_data={"type": "start"},
+ tenant_id="t1",
+ user_id="u1",
+ node_id="n1",
+ user_inputs={},
+ )
+
+ assert result is expected
+ handle.assert_called_once()
+
+
+def test_publish_customized_pipeline_template_raises_when_workflow_missing(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1", workflow_id="wf-1")
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [pipeline, None]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+
+ with pytest.raises(ValueError, match="Workflow not found"):
+ rag_pipeline_service.publish_customized_pipeline_template("p1", {})
+
+
+def test_publish_customized_pipeline_template_raises_when_dataset_missing(mocker, rag_pipeline_service) -> None:
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1", workflow_id="wf-1")
+ workflow = SimpleNamespace(id="wf-1")
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [pipeline, workflow]
+ mock_db = mocker.patch("services.rag_pipeline.rag_pipeline.db")
+ mock_db.engine = mocker.Mock()
+ mock_db.session.query.return_value = query
+ session_ctx = mocker.MagicMock()
+ session_ctx.__enter__.return_value = SimpleNamespace()
+ session_ctx.__exit__.return_value = False
+ mocker.patch("services.rag_pipeline.rag_pipeline.Session", return_value=session_ctx)
+ pipeline.retrieve_dataset = lambda session: None
+
+ with pytest.raises(ValueError, match="Dataset not found"):
+ rag_pipeline_service.publish_customized_pipeline_template("p1", {})
+
+
+def test_get_recommended_plugins_skips_manifest_when_missing(mocker, rag_pipeline_service) -> None:
+ plugin = SimpleNamespace(plugin_id="plugin-a")
+ query = mocker.Mock()
+ query.where.return_value = query
+ query.order_by.return_value.all.return_value = [plugin]
+ mock_db = mocker.patch("services.rag_pipeline.rag_pipeline.db")
+ mock_db.session.query.return_value = query
+ mocker.patch("services.rag_pipeline.rag_pipeline.current_user", SimpleNamespace(id="u1", current_tenant_id="t1"))
+ mocker.patch("services.rag_pipeline.rag_pipeline.BuiltinToolManageService.list_builtin_tools", return_value=[])
+ mocker.patch("services.rag_pipeline.rag_pipeline.marketplace.batch_fetch_plugin_by_ids", return_value=[])
+
+ result = rag_pipeline_service.get_recommended_plugins("all")
+
+ assert result["installed_recommended_plugins"] == []
+ assert result["uninstalled_recommended_plugins"] == []
+
+
+def test_retry_error_document_raises_when_pipeline_missing(mocker, rag_pipeline_service) -> None:
+ exec_log = SimpleNamespace(pipeline_id="p1")
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [exec_log, None]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+
+ with pytest.raises(ValueError, match="Pipeline not found"):
+ rag_pipeline_service.retry_error_document(
+ SimpleNamespace(), SimpleNamespace(id="doc-1"), SimpleNamespace(id="u1")
+ )
+
+
+def test_retry_error_document_raises_when_workflow_missing(mocker, rag_pipeline_service) -> None:
+ exec_log = SimpleNamespace(pipeline_id="p1")
+ pipeline = SimpleNamespace(id="p1")
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [exec_log, pipeline]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=None)
+
+ with pytest.raises(ValueError, match="Workflow not found"):
+ rag_pipeline_service.retry_error_document(
+ SimpleNamespace(), SimpleNamespace(id="doc-1"), SimpleNamespace(id="u1")
+ )
+
+
+def test_get_datasource_plugins_returns_empty_for_non_datasource_nodes(mocker, rag_pipeline_service) -> None:
+ dataset = SimpleNamespace(pipeline_id="p1")
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = SimpleNamespace(
+ graph_dict={"nodes": [{"id": "n1", "data": {"type": "start"}}]}, rag_pipeline_variables=[]
+ )
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [dataset, pipeline]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+
+ assert rag_pipeline_service.get_datasource_plugins("t1", "d1", True) == []
+
+
+def test_publish_workflow_raises_when_knowledge_index_dataset_missing(mocker, rag_pipeline_service) -> None:
+ draft = SimpleNamespace(
+ type="workflow",
+ graph={"nodes": [{"data": {"type": "knowledge-index"}}]},
+ features={},
+ environment_variables=[],
+ conversation_variables=[],
+ rag_pipeline_variables=[],
+ )
+ session = mocker.Mock()
+ session.scalar.return_value = draft
+ mocker.patch("services.rag_pipeline.rag_pipeline.select")
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.Workflow.new",
+ return_value=SimpleNamespace(graph_dict={"nodes": [{"data": {"type": "knowledge-index"}}]}),
+ )
+ mocker.patch("services.rag_pipeline.rag_pipeline.KnowledgeConfiguration.model_validate", return_value=mocker.Mock())
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1", is_published=False, retrieve_dataset=lambda session: None)
+
+ with pytest.raises(ValueError, match="Dataset not found"):
+ rag_pipeline_service.publish_workflow(session=session, pipeline=pipeline, account=SimpleNamespace(id="u1"))
+
+
+def test_run_datasource_node_preview_raises_when_workflow_missing(mocker, rag_pipeline_service) -> None:
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=None)
+
+ with pytest.raises(RuntimeError, match="Workflow not initialized"):
+ rag_pipeline_service.run_datasource_node_preview(
+ pipeline=SimpleNamespace(id="p1", tenant_id="t1"),
+ node_id="n1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=True,
+ )
+
+
+def test_run_datasource_node_preview_raises_when_node_missing(mocker, rag_pipeline_service) -> None:
+ mocker.patch.object(
+ rag_pipeline_service, "get_published_workflow", return_value=SimpleNamespace(graph_dict={"nodes": []})
+ )
+
+ with pytest.raises(RuntimeError, match="Datasource node data not found"):
+ rag_pipeline_service.run_datasource_node_preview(
+ pipeline=SimpleNamespace(id="p1", tenant_id="t1"),
+ node_id="missing",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=True,
+ )
+
+
+def test_run_datasource_node_preview_keeps_existing_user_input(mocker, rag_pipeline_service) -> None:
+ from core.datasource.entities.datasource_entities import DatasourceMessage
+
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "n1",
+ "data": {
+ "plugin_id": "p",
+ "provider_name": "provider",
+ "datasource_name": "doc",
+ "datasource_parameters": {"workspace_id": {"value": "default"}},
+ },
+ }
+ ]
+ }
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+ runtime = mocker.Mock()
+
+ def gen(**kwargs):
+ request = kwargs["datasource_parameters"]
+ assert request.workspace_id == "existing"
+ yield DatasourceMessage(
+ type=DatasourceMessage.MessageType.VARIABLE,
+ message=DatasourceMessage.VariableMessage(variable_name="ok", variable_value="1", stream=False),
+ )
+
+ runtime.get_online_document_page_content.side_effect = gen
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials", return_value=None
+ )
+
+ result = rag_pipeline_service.run_datasource_node_preview(
+ pipeline=SimpleNamespace(id="p1", tenant_id="t1"),
+ node_id="n1",
+ user_inputs={"workspace_id": "existing"},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=True,
+ )
+ assert result == {"ok": "1"}
+
+
+def test_run_datasource_node_preview_ignores_non_variable_messages(mocker, rag_pipeline_service) -> None:
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "n1",
+ "data": {
+ "plugin_id": "p",
+ "provider_name": "provider",
+ "datasource_name": "doc",
+ "datasource_parameters": {},
+ },
+ }
+ ]
+ }
+ )
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+ runtime = mocker.Mock()
+
+ def gen(**kwargs):
+ yield SimpleNamespace(type="log", message=None)
+
+ runtime.get_online_document_page_content.side_effect = gen
+ mocker.patch("core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime", return_value=runtime)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.get_datasource_credentials", return_value=None
+ )
+
+ result = rag_pipeline_service.run_datasource_node_preview(
+ pipeline=SimpleNamespace(id="p1", tenant_id="t1"),
+ node_id="n1",
+ user_inputs={},
+ account=SimpleNamespace(id="u1"),
+ datasource_type="online_document",
+ is_published=True,
+ )
+ assert result == {}
+
+
+def test_set_datasource_variables_raises_when_workflow_missing(mocker, rag_pipeline_service) -> None:
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=None)
+
+ with pytest.raises(ValueError, match="Workflow not initialized"):
+ rag_pipeline_service.set_datasource_variables(
+ SimpleNamespace(id="p1", tenant_id="t1"),
+ {"start_node_id": "n1"},
+ SimpleNamespace(id="u1"),
+ )
+
+
+def test_get_datasource_plugins_handles_empty_datasource_data_and_non_published(mocker, rag_pipeline_service) -> None:
+ dataset = SimpleNamespace(pipeline_id="p1")
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = SimpleNamespace(
+ graph_dict={"nodes": [{"id": "n1", "data": {"type": "datasource", "datasource_parameters": {}}}]},
+ rag_pipeline_variables=[{"variable": "v1", "belong_to_node_id": "shared"}],
+ )
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [dataset, pipeline]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+ mocker.patch.object(rag_pipeline_service, "get_draft_workflow", return_value=workflow)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.list_datasource_credentials", return_value=[]
+ )
+
+ result = rag_pipeline_service.get_datasource_plugins("t1", "d1", False)
+
+ assert len(result) == 1
+
+
+def test_get_datasource_plugins_extracts_user_inputs_and_credentials(mocker, rag_pipeline_service) -> None:
+ dataset = SimpleNamespace(pipeline_id="p1")
+ pipeline = SimpleNamespace(id="p1", tenant_id="t1")
+ workflow = SimpleNamespace(
+ graph_dict={
+ "nodes": [
+ {
+ "id": "n1",
+ "data": {
+ "type": "datasource",
+ "plugin_id": "plugin-1",
+ "provider_name": "provider",
+ "provider_type": "online_document",
+ "title": "Datasource",
+ "datasource_parameters": {
+ "a": {"value": "{{#start.v1#}}"},
+ "b": {"value": ["x", "v2"]},
+ },
+ },
+ }
+ ]
+ },
+ rag_pipeline_variables=[
+ {"variable": "v1", "belong_to_node_id": "shared"},
+ {"variable": "v2", "belong_to_node_id": "shared"},
+ {"variable": "v3", "belong_to_node_id": "shared"},
+ ],
+ )
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [dataset, pipeline]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+ mocker.patch.object(rag_pipeline_service, "get_published_workflow", return_value=workflow)
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline.DatasourceProviderService.list_datasource_credentials",
+ return_value=[{"id": "c1", "name": "Cred", "type": "api", "is_default": True}],
+ )
+
+ result = rag_pipeline_service.get_datasource_plugins("t1", "d1", True)
+
+ assert len(result) == 1
+ assert len(result[0]["user_input_variables"]) == 2
+ assert result[0]["credentials"][0]["id"] == "c1"
+
+
+def test_get_pipeline_returns_pipeline_when_found(mocker, rag_pipeline_service) -> None:
+ dataset = SimpleNamespace(pipeline_id="p1")
+ pipeline = SimpleNamespace(id="p1")
+ query = mocker.Mock()
+ query.where.return_value.first.side_effect = [dataset, pipeline]
+ mocker.patch("services.rag_pipeline.rag_pipeline.db.session.query", return_value=query)
+
+ result = rag_pipeline_service.get_pipeline("t1", "d1")
+
+ assert result is pipeline
diff --git a/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_task_proxy.py b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_task_proxy.py
new file mode 100644
index 0000000000..1a2d062208
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_task_proxy.py
@@ -0,0 +1,159 @@
+from types import SimpleNamespace
+from unittest.mock import Mock
+
+import pytest
+
+from services.rag_pipeline.rag_pipeline_task_proxy import RagPipelineTaskProxy
+
+
+@pytest.fixture
+def proxy(mocker):
+ """Create a RagPipelineTaskProxy with mocked dependencies."""
+ mocker.patch("services.rag_pipeline.rag_pipeline_task_proxy.TenantIsolatedTaskQueue")
+ entity = Mock()
+ entity.model_dump.return_value = {"doc": "data"}
+ return RagPipelineTaskProxy(
+ dataset_tenant_id="tenant-1",
+ user_id="user-1",
+ rag_pipeline_invoke_entities=[entity],
+ )
+
+
+# --- delay ---
+
+
+def test_delay_with_empty_entities_logs_warning_and_returns(mocker) -> None:
+ mocker.patch("services.rag_pipeline.rag_pipeline_task_proxy.TenantIsolatedTaskQueue")
+ proxy = RagPipelineTaskProxy(
+ dataset_tenant_id="tenant-1",
+ user_id="user-1",
+ rag_pipeline_invoke_entities=[],
+ )
+ dispatch_mock = mocker.patch.object(proxy, "_dispatch")
+
+ proxy.delay()
+
+ dispatch_mock.assert_not_called()
+
+
+def test_delay_with_entities_calls_dispatch(mocker, proxy) -> None:
+ dispatch_mock = mocker.patch.object(proxy, "_dispatch")
+
+ proxy.delay()
+
+ dispatch_mock.assert_called_once()
+
+
+# --- _dispatch ---
+
+
+def test_dispatch_billing_sandbox_uses_default_tenant_queue(mocker, proxy) -> None:
+ upload_mock = mocker.patch.object(proxy, "_upload_invoke_entities", return_value="file-1")
+ send_mock = mocker.patch.object(proxy, "_send_to_default_tenant_queue")
+
+ from enums.cloud_plan import CloudPlan
+
+ features = SimpleNamespace(
+ billing=SimpleNamespace(enabled=True, subscription=SimpleNamespace(plan=CloudPlan.SANDBOX))
+ )
+ mocker.patch.object(type(proxy), "features", new_callable=lambda: property(lambda self: features))
+
+ proxy._dispatch()
+
+ upload_mock.assert_called_once()
+ send_mock.assert_called_once_with("file-1")
+
+
+def test_dispatch_billing_non_sandbox_uses_priority_tenant_queue(mocker, proxy) -> None:
+ upload_mock = mocker.patch.object(proxy, "_upload_invoke_entities", return_value="file-1")
+ send_mock = mocker.patch.object(proxy, "_send_to_priority_tenant_queue")
+
+ from enums.cloud_plan import CloudPlan
+
+ features = SimpleNamespace(
+ billing=SimpleNamespace(enabled=True, subscription=SimpleNamespace(plan=CloudPlan.PROFESSIONAL))
+ )
+ mocker.patch.object(type(proxy), "features", new_callable=lambda: property(lambda self: features))
+
+ proxy._dispatch()
+
+ upload_mock.assert_called_once()
+ send_mock.assert_called_once_with("file-1")
+
+
+def test_dispatch_no_billing_uses_priority_direct_queue(mocker, proxy) -> None:
+ upload_mock = mocker.patch.object(proxy, "_upload_invoke_entities", return_value="file-1")
+ send_mock = mocker.patch.object(proxy, "_send_to_priority_direct_queue")
+
+ features = SimpleNamespace(billing=SimpleNamespace(enabled=False, subscription=SimpleNamespace(plan="free")))
+ mocker.patch.object(type(proxy), "features", new_callable=lambda: property(lambda self: features))
+
+ proxy._dispatch()
+
+ upload_mock.assert_called_once()
+ send_mock.assert_called_once_with("file-1")
+
+
+def test_dispatch_raises_on_empty_upload_file_id(mocker, proxy) -> None:
+ mocker.patch.object(proxy, "_upload_invoke_entities", return_value="")
+
+ features = SimpleNamespace(billing=SimpleNamespace(enabled=False, subscription=SimpleNamespace(plan="free")))
+ mocker.patch.object(type(proxy), "features", new_callable=lambda: property(lambda self: features))
+
+ with pytest.raises(ValueError, match="upload_file_id is empty"):
+ proxy._dispatch()
+
+
+# --- _send_to_direct_queue ---
+
+
+def test_send_to_direct_queue_calls_task_func_delay(mocker, proxy) -> None:
+ task_func = Mock()
+
+ proxy._send_to_direct_queue("file-1", task_func)
+
+ task_func.delay.assert_called_once_with(
+ rag_pipeline_invoke_entities_file_id="file-1",
+ tenant_id="tenant-1",
+ )
+
+
+# --- _send_to_tenant_queue ---
+
+
+def test_send_to_tenant_queue_pushes_when_task_key_exists(mocker, proxy) -> None:
+ proxy._tenant_isolated_task_queue.get_task_key.return_value = "existing-key"
+ task_func = Mock()
+
+ proxy._send_to_tenant_queue("file-1", task_func)
+
+ proxy._tenant_isolated_task_queue.push_tasks.assert_called_once_with(["file-1"])
+ task_func.delay.assert_not_called()
+
+
+def test_send_to_tenant_queue_sets_waiting_time_and_calls_delay(mocker, proxy) -> None:
+ proxy._tenant_isolated_task_queue.get_task_key.return_value = None
+ task_func = Mock()
+
+ proxy._send_to_tenant_queue("file-1", task_func)
+
+ proxy._tenant_isolated_task_queue.set_task_waiting_time.assert_called_once()
+ task_func.delay.assert_called_once_with(
+ rag_pipeline_invoke_entities_file_id="file-1",
+ tenant_id="tenant-1",
+ )
+
+
+# --- _upload_invoke_entities ---
+
+
+def test_upload_invoke_entities_returns_file_id(mocker, proxy) -> None:
+ upload_file = SimpleNamespace(id="uploaded-file-1")
+ file_service_cls = mocker.patch("services.rag_pipeline.rag_pipeline_task_proxy.FileService")
+ file_service_cls.return_value.upload_text.return_value = upload_file
+ mocker.patch("services.rag_pipeline.rag_pipeline_task_proxy.db", mocker.Mock(engine="fake-engine"))
+
+ result = proxy._upload_invoke_entities()
+
+ assert result == "uploaded-file-1"
+ file_service_cls.return_value.upload_text.assert_called_once()
diff --git a/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_transform_service.py b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_transform_service.py
new file mode 100644
index 0000000000..82e5e973c1
--- /dev/null
+++ b/api/tests/unit_tests/services/rag_pipeline/test_rag_pipeline_transform_service.py
@@ -0,0 +1,516 @@
+from datetime import UTC, datetime
+from types import SimpleNamespace
+from typing import cast
+
+import pytest
+
+from models.dataset import Dataset
+from services.entities.knowledge_entities.rag_pipeline_entities import KnowledgeConfiguration
+from services.rag_pipeline.rag_pipeline_transform_service import RagPipelineTransformService
+
+
+@pytest.mark.parametrize(
+ ("doc_form", "datasource_type", "indexing_technique"),
+ [
+ ("text_model", "upload_file", "high_quality"),
+ ("text_model", "upload_file", "economy"),
+ ("text_model", "notion_import", "high_quality"),
+ ("text_model", "notion_import", "economy"),
+ ("text_model", "website_crawl", "high_quality"),
+ ("text_model", "website_crawl", "economy"),
+ ("hierarchical_model", "upload_file", None),
+ ("hierarchical_model", "notion_import", None),
+ ("hierarchical_model", "website_crawl", None),
+ ],
+)
+def test_get_transform_yaml_returns_workflow(doc_form: str, datasource_type: str, indexing_technique: str | None):
+ service = RagPipelineTransformService()
+
+ result = service._get_transform_yaml(doc_form, datasource_type, indexing_technique)
+
+ assert isinstance(result, dict)
+ assert "workflow" in result
+
+
+def test_get_transform_yaml_raises_for_unsupported_doc_form() -> None:
+ service = RagPipelineTransformService()
+
+ with pytest.raises(ValueError, match="Unsupported doc form"):
+ service._get_transform_yaml("unknown", "upload_file", "high_quality")
+
+
+@pytest.mark.parametrize("doc_form", ["text_model", "hierarchical_model"])
+def test_get_transform_yaml_raises_for_unsupported_datasource_type(doc_form: str) -> None:
+ service = RagPipelineTransformService()
+
+ with pytest.raises(ValueError, match="Unsupported datasource type"):
+ service._get_transform_yaml(doc_form, "unsupported", "high_quality")
+
+
+def test_deal_file_extensions_filters_and_normalizes_extensions() -> None:
+ service = RagPipelineTransformService()
+ node = {"data": {"fileExtensions": ["pdf", "TXT", "exe"]}}
+
+ result = service._deal_file_extensions(node)
+
+ assert result["data"]["fileExtensions"] == ["pdf", "txt"]
+
+
+def test_deal_file_extensions_returns_original_when_empty() -> None:
+ service = RagPipelineTransformService()
+ node = {"data": {"fileExtensions": []}}
+
+ result = service._deal_file_extensions(node)
+
+ assert result is node
+
+
+def test_deal_dependencies_installs_missing_marketplace_plugins(mocker) -> None:
+ service = RagPipelineTransformService()
+
+ installer_cls = mocker.patch("services.rag_pipeline.rag_pipeline_transform_service.PluginInstaller")
+ installer_cls.return_value.list_plugins.return_value = [SimpleNamespace(plugin_id="installed-plugin")]
+
+ migration_cls = mocker.patch("services.rag_pipeline.rag_pipeline_transform_service.PluginMigration")
+ migration_cls.return_value._fetch_plugin_unique_identifier.return_value = "missing-plugin:1.0.0"
+
+ install_mock = mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.PluginService.install_from_marketplace_pkg"
+ )
+
+ pipeline_yaml = {
+ "dependencies": [
+ {"type": "marketplace", "value": {"plugin_unique_identifier": "installed-plugin:0.1.0"}},
+ {"type": "marketplace", "value": {"plugin_unique_identifier": "missing-plugin:0.1.0"}},
+ ]
+ }
+
+ service._deal_dependencies(pipeline_yaml, "tenant-1")
+
+ install_mock.assert_called_once_with("tenant-1", ["missing-plugin:1.0.0"])
+
+
+def test_transform_to_empty_pipeline_updates_dataset_and_commits(mocker) -> None:
+ service = RagPipelineTransformService()
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.current_user",
+ SimpleNamespace(id="user-1"),
+ )
+
+ class FakePipeline:
+ def __init__(self, **kwargs):
+ self.id = "pipeline-1"
+ self.tenant_id = kwargs["tenant_id"]
+ self.name = kwargs["name"]
+ self.description = kwargs["description"]
+ self.created_by = kwargs["created_by"]
+
+ mocker.patch("services.rag_pipeline.rag_pipeline_transform_service.Pipeline", FakePipeline)
+ session_mock = mocker.Mock()
+ add_mock = session_mock.add
+ flush_mock = session_mock.flush
+ commit_mock = session_mock.commit
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ dataset = SimpleNamespace(
+ id="dataset-1",
+ tenant_id="tenant-1",
+ name="Dataset",
+ description="desc",
+ pipeline_id=None,
+ runtime_mode="general",
+ updated_by=None,
+ updated_at=None,
+ )
+
+ result = service._transform_to_empty_pipeline(cast(Dataset, dataset))
+
+ assert result == {"pipeline_id": "pipeline-1", "dataset_id": "dataset-1", "status": "success"}
+ assert dataset.pipeline_id == "pipeline-1"
+ assert dataset.runtime_mode == "rag_pipeline"
+ assert dataset.updated_by == "user-1"
+ add_mock.assert_called()
+ flush_mock.assert_called_once()
+ commit_mock.assert_called_once()
+
+
+# --- transform_dataset ---
+
+
+def test_transform_dataset_returns_early_when_pipeline_exists(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(
+ id="d1",
+ pipeline_id="p1",
+ runtime_mode="rag_pipeline",
+ )
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = dataset
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ result = service.transform_dataset("d1")
+
+ assert result == {"pipeline_id": "p1", "dataset_id": "d1", "status": "success"}
+
+
+def test_transform_dataset_raises_for_dataset_not_found(mocker) -> None:
+ service = RagPipelineTransformService()
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = None
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ with pytest.raises(ValueError, match="Dataset not found"):
+ service.transform_dataset("d1")
+
+
+def test_transform_dataset_raises_for_external_dataset(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(
+ id="d1",
+ pipeline_id=None,
+ runtime_mode=None,
+ provider="external",
+ )
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = dataset
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ with pytest.raises(ValueError, match="External dataset is not supported"):
+ service.transform_dataset("d1")
+
+
+def test_transform_dataset_calls_empty_pipeline_when_no_datasource(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(
+ id="d1",
+ pipeline_id=None,
+ runtime_mode=None,
+ provider="vendor",
+ data_source_type=None,
+ indexing_technique=None,
+ )
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = dataset
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ empty_result = {"pipeline_id": "p-empty", "dataset_id": "d1", "status": "success"}
+ mocker.patch.object(service, "_transform_to_empty_pipeline", return_value=empty_result)
+
+ result = service.transform_dataset("d1")
+
+ assert result == empty_result
+
+
+def test_transform_dataset_calls_empty_pipeline_when_no_doc_form(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(
+ id="d1",
+ pipeline_id=None,
+ runtime_mode=None,
+ provider="vendor",
+ data_source_type="upload_file",
+ indexing_technique="high_quality",
+ doc_form=None,
+ )
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = dataset
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ empty_result = {"pipeline_id": "p-empty", "dataset_id": "d1", "status": "success"}
+ mocker.patch.object(service, "_transform_to_empty_pipeline", return_value=empty_result)
+
+ result = service.transform_dataset("d1")
+
+ assert result == empty_result
+
+
+# --- _deal_knowledge_index ---
+
+
+def test_deal_knowledge_index_high_quality_sets_embedding(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = cast(
+ Dataset,
+ SimpleNamespace(
+ embedding_model="text-embedding-ada-002",
+ embedding_model_provider="openai",
+ retrieval_model=None,
+ summary_index_setting=None,
+ ),
+ )
+ node = {
+ "data": {
+ "type": "knowledge-index",
+ "indexing_technique": "high_quality",
+ "embedding_model": "",
+ "embedding_model_provider": "",
+ "retrieval_model": {
+ "search_method": "semantic_search",
+ "reranking_enable": False,
+ "reranking_mode": None,
+ "reranking_model": None,
+ "weights": None,
+ "top_k": 3,
+ "score_threshold_enabled": False,
+ "score_threshold": None,
+ },
+ "chunk_structure": "text_model",
+ "keyword_number": None,
+ "summary_index_setting": None,
+ }
+ }
+
+ # Create KnowledgeConfiguration from node data
+ knowledge_configuration = KnowledgeConfiguration.model_validate(node.get("data", {}))
+ retrieval_model = knowledge_configuration.retrieval_model
+
+ result = service._deal_knowledge_index(
+ knowledge_configuration,
+ dataset,
+ "high_quality",
+ retrieval_model,
+ node,
+ )
+
+ assert result["data"]["embedding_model"] == "text-embedding-ada-002"
+ assert result["data"]["embedding_model_provider"] == "openai"
+
+
+# --- _deal_document_data ---
+
+
+def test_deal_document_data_notion(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(id="d1", pipeline_id="p1")
+ doc = SimpleNamespace(
+ id="doc1",
+ dataset_id="d1",
+ data_source_type="notion_import",
+ data_source_info_dict={
+ "notion_workspace_id": "ws1",
+ "notion_page_id": "page1",
+ "notion_page_icon": "icon1",
+ "type": "page",
+ "last_edited_time": 12345,
+ },
+ name="Notion Doc",
+ created_by="u1",
+ created_at=datetime.now(UTC).replace(tzinfo=None),
+ data_source_info=None,
+ )
+
+ scalars_mock = mocker.Mock()
+ scalars_mock.all.return_value = [doc]
+ session_mock = mocker.Mock()
+ session_mock.scalars.return_value = scalars_mock
+ add_mock = session_mock.add
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ service._deal_document_data(cast(Dataset, dataset))
+
+ assert doc.data_source_type == "online_document"
+ assert "page1" in doc.data_source_info
+ assert add_mock.call_count == 2 # document + log
+
+
+@pytest.mark.parametrize(("provider", "node_id"), [("firecrawl", "1752565402678"), ("jinareader", "1752491761974")])
+def test_deal_document_data_website(mocker, provider: str, node_id: str) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(id="d1", pipeline_id="p1")
+ doc = SimpleNamespace(
+ id="doc1",
+ dataset_id="d1",
+ data_source_type="website_crawl",
+ data_source_info_dict={
+ "url": "https://example.com",
+ "provider": provider,
+ },
+ name="Web Doc",
+ created_by="u1",
+ created_at=datetime.now(UTC).replace(tzinfo=None),
+ data_source_info=None,
+ )
+
+ scalars_mock = mocker.Mock()
+ scalars_mock.all.return_value = [doc]
+ session_mock = mocker.Mock()
+ session_mock.scalars.return_value = scalars_mock
+ add_mock = session_mock.add
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ service._deal_document_data(cast(Dataset, dataset))
+
+ assert doc.data_source_type == "website_crawl"
+ assert "example.com" in doc.data_source_info
+ # Check if correct node id was used in log
+ log = add_mock.call_args_list[1][0][0]
+ assert log.datasource_node_id == node_id
+
+
+# --- transform_dataset complex flow ---
+
+
+def test_transform_dataset_full_flow(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(
+ id="d1",
+ tenant_id="t1",
+ name="D",
+ description="d",
+ pipeline_id=None,
+ runtime_mode=None,
+ provider="vendor",
+ data_source_type="upload_file",
+ indexing_technique="high_quality",
+ doc_form="text_model",
+ retrieval_model={"search_method": "semantic_search", "top_k": 3},
+ embedding_model="m1",
+ embedding_model_provider="p1",
+ summary_index_setting=None,
+ chunk_structure=None,
+ )
+
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = dataset
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ mocker.patch.object(service, "_deal_dependencies")
+ mocker.patch.object(service, "_deal_document_data")
+ session_mock.commit = mocker.Mock()
+
+ # Mock current_user to have the same tenant_id as dataset
+ mock_current_user = SimpleNamespace(current_tenant_id="t1")
+ mocker.patch("services.rag_pipeline.rag_pipeline_transform_service.current_user", mock_current_user)
+
+ pipeline = SimpleNamespace(id="p-new")
+ mocker.patch.object(service, "_create_pipeline", return_value=pipeline)
+
+ result = service.transform_dataset("d1")
+
+ assert result["pipeline_id"] == "p-new"
+ assert dataset.runtime_mode == "rag_pipeline"
+ assert dataset.chunk_structure == "text_model"
+
+
+def test_transform_dataset_raises_for_unsupported_doc_form_after_pipeline_create(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(
+ id="d1",
+ tenant_id="t1",
+ name="D",
+ description="d",
+ pipeline_id=None,
+ runtime_mode=None,
+ provider="vendor",
+ data_source_type="upload_file",
+ indexing_technique="high_quality",
+ doc_form="unsupported",
+ retrieval_model=None,
+ )
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = dataset
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+ mocker.patch.object(service, "_get_transform_yaml", return_value={"workflow": {"graph": {"nodes": []}}})
+ mocker.patch.object(service, "_deal_dependencies")
+ mocker.patch.object(service, "_create_pipeline", return_value=SimpleNamespace(id="p-new"))
+
+ with pytest.raises(ValueError, match="Unsupported doc form"):
+ service.transform_dataset("d1")
+
+
+def test_transform_dataset_raises_when_transform_yaml_missing_workflow(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(
+ id="d1",
+ tenant_id="t1",
+ name="D",
+ description="d",
+ pipeline_id=None,
+ runtime_mode=None,
+ provider="vendor",
+ data_source_type="upload_file",
+ indexing_technique="high_quality",
+ doc_form="text_model",
+ retrieval_model=None,
+ )
+ session_mock = mocker.Mock()
+ session_mock.get.return_value = dataset
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+ mocker.patch.object(service, "_get_transform_yaml", return_value={})
+ mocker.patch.object(service, "_deal_dependencies")
+
+ with pytest.raises(ValueError, match="Missing workflow data for rag pipeline"):
+ service.transform_dataset("d1")
+
+
+def test_create_pipeline_raises_when_workflow_data_missing() -> None:
+ service = RagPipelineTransformService()
+
+ with pytest.raises(ValueError, match="Missing workflow data for rag pipeline"):
+ service._create_pipeline({"rag_pipeline": {"name": "N"}})
+
+
+def test_deal_document_data_upload_file_with_existing_file(mocker) -> None:
+ service = RagPipelineTransformService()
+ dataset = SimpleNamespace(id="d1", pipeline_id="p1")
+ document = SimpleNamespace(
+ id="doc-1",
+ dataset_id="d1",
+ data_source_type="upload_file",
+ data_source_info_dict={"upload_file_id": "file-1"},
+ name="Doc",
+ created_by="u1",
+ created_at=datetime.now(UTC).replace(tzinfo=None),
+ data_source_info=None,
+ )
+ upload_file = SimpleNamespace(name="f.txt", size=10, extension="txt", mime_type="text/plain")
+
+ scalars_mock = mocker.Mock()
+ scalars_mock.all.return_value = [document]
+ session_mock = mocker.Mock()
+ session_mock.scalars.return_value = scalars_mock
+ session_mock.get.return_value = upload_file
+ add_mock = session_mock.add
+ mocker.patch(
+ "services.rag_pipeline.rag_pipeline_transform_service.db",
+ new=SimpleNamespace(session=session_mock),
+ )
+
+ service._deal_document_data(cast(Dataset, dataset))
+
+ assert document.data_source_type == "local_file"
+ assert "real_file_id" in document.data_source_info
+ assert add_mock.call_count >= 2
diff --git a/api/tests/unit_tests/services/retention/__init__.py b/api/tests/unit_tests/services/retention/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/tests/unit_tests/services/retention/test_messages_clean_policy.py b/api/tests/unit_tests/services/retention/test_messages_clean_policy.py
new file mode 100644
index 0000000000..79c079c683
--- /dev/null
+++ b/api/tests/unit_tests/services/retention/test_messages_clean_policy.py
@@ -0,0 +1,135 @@
+import datetime
+from unittest.mock import MagicMock, patch
+
+from services.retention.conversation.messages_clean_policy import (
+ BillingDisabledPolicy,
+ BillingSandboxPolicy,
+ SimpleMessage,
+ create_message_clean_policy,
+)
+
+MODULE = "services.retention.conversation.messages_clean_policy"
+
+
+def _msg(msg_id: str, app_id: str, days_ago: int = 0) -> SimpleMessage:
+ return SimpleMessage(
+ id=msg_id,
+ app_id=app_id,
+ created_at=datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days_ago),
+ )
+
+
+class TestBillingDisabledPolicy:
+ def test_returns_all_message_ids(self):
+ policy = BillingDisabledPolicy()
+ msgs = [_msg("m1", "app1"), _msg("m2", "app2"), _msg("m3", "app1")]
+
+ result = policy.filter_message_ids(msgs, {"app1": "t1", "app2": "t2"})
+
+ assert set(result) == {"m1", "m2", "m3"}
+
+ def test_empty_messages_returns_empty(self):
+ assert BillingDisabledPolicy().filter_message_ids([], {}) == []
+
+
+class TestBillingSandboxPolicy:
+ def _policy(self, plans, *, graceful_days=21, whitelist=None, now=1_000_000_000):
+ return BillingSandboxPolicy(
+ plan_provider=lambda _ids: plans,
+ graceful_period_days=graceful_days,
+ tenant_whitelist=whitelist,
+ current_timestamp=now,
+ )
+
+ def test_empty_messages_returns_empty(self):
+ policy = self._policy({})
+ assert policy.filter_message_ids([], {"app1": "t1"}) == []
+
+ def test_empty_app_to_tenant_returns_empty(self):
+ policy = self._policy({})
+ assert policy.filter_message_ids([_msg("m1", "app1")], {}) == []
+
+ def test_empty_plans_returns_empty(self):
+ policy = self._policy({})
+ msgs = [_msg("m1", "app1")]
+ assert policy.filter_message_ids(msgs, {"app1": "t1"}) == []
+
+ def test_non_sandbox_tenant_skipped(self):
+ plans = {"t1": {"plan": "professional", "expiration_date": 0}}
+ policy = self._policy(plans)
+ msgs = [_msg("m1", "app1")]
+
+ assert policy.filter_message_ids(msgs, {"app1": "t1"}) == []
+
+ def test_sandbox_no_previous_subscription_deletes(self):
+ plans = {"t1": {"plan": "sandbox", "expiration_date": -1}}
+ policy = self._policy(plans)
+ msgs = [_msg("m1", "app1")]
+
+ assert policy.filter_message_ids(msgs, {"app1": "t1"}) == ["m1"]
+
+ def test_sandbox_expired_beyond_grace_period_deletes(self):
+ now = 1_000_000_000
+ expired_long_ago = now - (22 * 24 * 60 * 60) # 22 days ago > 21 day grace
+ plans = {"t1": {"plan": "sandbox", "expiration_date": expired_long_ago}}
+ policy = self._policy(plans, now=now)
+ msgs = [_msg("m1", "app1")]
+
+ assert policy.filter_message_ids(msgs, {"app1": "t1"}) == ["m1"]
+
+ def test_sandbox_within_grace_period_kept(self):
+ now = 1_000_000_000
+ expired_recently = now - (10 * 24 * 60 * 60) # 10 days ago < 21 day grace
+ plans = {"t1": {"plan": "sandbox", "expiration_date": expired_recently}}
+ policy = self._policy(plans, now=now)
+ msgs = [_msg("m1", "app1")]
+
+ assert policy.filter_message_ids(msgs, {"app1": "t1"}) == []
+
+ def test_whitelisted_tenant_skipped(self):
+ plans = {"t1": {"plan": "sandbox", "expiration_date": -1}}
+ policy = self._policy(plans, whitelist=["t1"])
+ msgs = [_msg("m1", "app1")]
+
+ assert policy.filter_message_ids(msgs, {"app1": "t1"}) == []
+
+ def test_message_without_tenant_mapping_skipped(self):
+ plans = {"t1": {"plan": "sandbox", "expiration_date": -1}}
+ policy = self._policy(plans)
+ msgs = [_msg("m1", "unmapped_app")]
+
+ assert policy.filter_message_ids(msgs, {"app1": "t1"}) == []
+
+ def test_mixed_tenants_only_sandbox_deleted(self):
+ plans = {
+ "t_sandbox": {"plan": "sandbox", "expiration_date": -1},
+ "t_pro": {"plan": "professional", "expiration_date": 0},
+ }
+ policy = self._policy(plans)
+ msgs = [_msg("m1", "app_sandbox"), _msg("m2", "app_pro")]
+ app_map = {"app_sandbox": "t_sandbox", "app_pro": "t_pro"}
+
+ result = policy.filter_message_ids(msgs, app_map)
+
+ assert result == ["m1"]
+
+
+class TestCreateMessageCleanPolicy:
+ def test_billing_disabled_returns_disabled_policy(self):
+ with patch(f"{MODULE}.dify_config") as cfg:
+ cfg.BILLING_ENABLED = False
+ policy = create_message_clean_policy()
+
+ assert isinstance(policy, BillingDisabledPolicy)
+
+ def test_billing_enabled_returns_sandbox_policy(self):
+ with (
+ patch(f"{MODULE}.dify_config") as cfg,
+ patch(f"{MODULE}.BillingService") as bs,
+ ):
+ cfg.BILLING_ENABLED = True
+ bs.get_expired_subscription_cleanup_whitelist.return_value = ["wl1"]
+ bs.get_plan_bulk_with_cache = MagicMock()
+ policy = create_message_clean_policy(graceful_period_days=30)
+
+ assert isinstance(policy, BillingSandboxPolicy)
diff --git a/api/tests/unit_tests/services/test_audio_service.py b/api/tests/unit_tests/services/test_audio_service.py
index 175fd3ee01..cede6671ce 100644
--- a/api/tests/unit_tests/services/test_audio_service.py
+++ b/api/tests/unit_tests/services/test_audio_service.py
@@ -421,11 +421,8 @@ class TestAudioServiceTTS:
answer="Message answer text",
)
- # Mock database query
- mock_query = MagicMock()
- mock_db_session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = message
+ # Mock database lookup
+ mock_db_session.get.return_value = message
# Mock ModelManager
mock_model_manager = mock_model_manager_class.return_value
@@ -568,11 +565,8 @@ class TestAudioServiceTTS:
# Arrange
app = factory.create_app_mock()
- # Mock database query returning None
- mock_query = MagicMock()
- mock_db_session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = None
+ # Mock database lookup returning None
+ mock_db_session.get.return_value = None
# Act
result = AudioService.transcript_tts(
@@ -594,11 +588,8 @@ class TestAudioServiceTTS:
status=MessageStatus.NORMAL,
)
- # Mock database query
- mock_query = MagicMock()
- mock_db_session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = message
+ # Mock database lookup
+ mock_db_session.get.return_value = message
# Act
result = AudioService.transcript_tts(
diff --git a/api/tests/unit_tests/services/test_billing_service.py b/api/tests/unit_tests/services/test_billing_service.py
index 316381f0ca..168ab6cf0d 100644
--- a/api/tests/unit_tests/services/test_billing_service.py
+++ b/api/tests/unit_tests/services/test_billing_service.py
@@ -38,7 +38,7 @@ class TestBillingServiceSendRequest:
@pytest.fixture
def mock_httpx_request(self):
"""Mock httpx.request for testing."""
- with patch("services.billing_service.httpx.request") as mock_request:
+ with patch("services.billing_service._http_client.request") as mock_request:
yield mock_request
@pytest.fixture
@@ -865,16 +865,11 @@ class TestBillingServiceAccountManagement:
mock_join = MagicMock(spec=TenantAccountJoin)
mock_join.role = TenantAccountRole.OWNER
- mock_query = MagicMock()
- mock_query.where.return_value.first.return_value = mock_join
- mock_db_session.query.return_value = mock_query
+ mock_db_session.scalar.return_value = mock_join
# Act - should not raise exception
BillingService.is_tenant_owner_or_admin(current_user)
- # Assert
- mock_db_session.query.assert_called_once()
-
def test_is_tenant_owner_or_admin_admin(self, mock_db_session):
"""Test tenant owner/admin check for admin role."""
# Arrange
@@ -885,16 +880,11 @@ class TestBillingServiceAccountManagement:
mock_join = MagicMock(spec=TenantAccountJoin)
mock_join.role = TenantAccountRole.ADMIN
- mock_query = MagicMock()
- mock_query.where.return_value.first.return_value = mock_join
- mock_db_session.query.return_value = mock_query
+ mock_db_session.scalar.return_value = mock_join
# Act - should not raise exception
BillingService.is_tenant_owner_or_admin(current_user)
- # Assert
- mock_db_session.query.assert_called_once()
-
def test_is_tenant_owner_or_admin_normal_user_raises_error(self, mock_db_session):
"""Test tenant owner/admin check raises error for normal user."""
# Arrange
@@ -905,9 +895,7 @@ class TestBillingServiceAccountManagement:
mock_join = MagicMock(spec=TenantAccountJoin)
mock_join.role = TenantAccountRole.NORMAL
- mock_query = MagicMock()
- mock_query.where.return_value.first.return_value = mock_join
- mock_db_session.query.return_value = mock_query
+ mock_db_session.scalar.return_value = mock_join
# Act & Assert
with pytest.raises(ValueError) as exc_info:
@@ -921,9 +909,7 @@ class TestBillingServiceAccountManagement:
current_user.id = "account-123"
current_user.current_tenant_id = "tenant-456"
- mock_query = MagicMock()
- mock_query.where.return_value.first.return_value = None
- mock_db_session.query.return_value = mock_query
+ mock_db_session.scalar.return_value = None
# Act & Assert
with pytest.raises(ValueError) as exc_info:
@@ -1135,9 +1121,7 @@ class TestBillingServiceEdgeCases:
mock_join.role = TenantAccountRole.EDITOR # Editor is not privileged
with patch("services.billing_service.db.session") as mock_session:
- mock_query = MagicMock()
- mock_query.where.return_value.first.return_value = mock_join
- mock_session.query.return_value = mock_query
+ mock_session.scalar.return_value = mock_join
# Act & Assert
with pytest.raises(ValueError) as exc_info:
@@ -1155,9 +1139,7 @@ class TestBillingServiceEdgeCases:
mock_join.role = TenantAccountRole.DATASET_OPERATOR # Dataset operator is not privileged
with patch("services.billing_service.db.session") as mock_session:
- mock_query = MagicMock()
- mock_query.where.return_value.first.return_value = mock_join
- mock_session.query.return_value = mock_query
+ mock_session.scalar.return_value = mock_join
# Act & Assert
with pytest.raises(ValueError) as exc_info:
diff --git a/api/tests/unit_tests/services/test_conversation_service.py b/api/tests/unit_tests/services/test_conversation_service.py
index 1bf4c0e172..a4359f00b8 100644
--- a/api/tests/unit_tests/services/test_conversation_service.py
+++ b/api/tests/unit_tests/services/test_conversation_service.py
@@ -355,15 +355,13 @@ class TestConversationServiceGetConversation:
from_account_id=user.id, from_source=ConversationFromSource.CONSOLE
)
- mock_query = mock_db_session.query.return_value
- mock_query.where.return_value.first.return_value = conversation
+ mock_db_session.scalar.return_value = conversation
# Act
result = ConversationService.get_conversation(app_model, "conv-123", user)
# Assert
assert result == conversation
- mock_db_session.query.assert_called_once_with(Conversation)
@patch("services.conversation_service.db.session")
def test_get_conversation_success_with_end_user(self, mock_db_session):
@@ -379,8 +377,7 @@ class TestConversationServiceGetConversation:
from_end_user_id=user.id, from_source=ConversationFromSource.API
)
- mock_query = mock_db_session.query.return_value
- mock_query.where.return_value.first.return_value = conversation
+ mock_db_session.scalar.return_value = conversation
# Act
result = ConversationService.get_conversation(app_model, "conv-123", user)
@@ -399,8 +396,7 @@ class TestConversationServiceGetConversation:
app_model = ConversationServiceTestDataFactory.create_app_mock()
user = ConversationServiceTestDataFactory.create_account_mock()
- mock_query = mock_db_session.query.return_value
- mock_query.where.return_value.first.return_value = None
+ mock_db_session.scalar.return_value = None
# Act & Assert
with pytest.raises(ConversationNotExistsError):
@@ -489,8 +485,7 @@ class TestConversationServiceAutoGenerateName:
)
# Mock database query to return message
- mock_query = mock_db_session.query.return_value
- mock_query.where.return_value.order_by.return_value.first.return_value = message
+ mock_db_session.scalar.return_value = message
# Mock LLM generator
mock_llm_generator.generate_conversation_name.return_value = "Generated Name"
@@ -518,8 +513,7 @@ class TestConversationServiceAutoGenerateName:
conversation = ConversationServiceTestDataFactory.create_conversation_mock()
# Mock database query to return None
- mock_query = mock_db_session.query.return_value
- mock_query.where.return_value.order_by.return_value.first.return_value = None
+ mock_db_session.scalar.return_value = None
# Act & Assert
with pytest.raises(MessageNotExistsError):
@@ -541,8 +535,7 @@ class TestConversationServiceAutoGenerateName:
)
# Mock database query to return message
- mock_query = mock_db_session.query.return_value
- mock_query.where.return_value.order_by.return_value.first.return_value = message
+ mock_db_session.scalar.return_value = message
# Mock LLM generator to raise exception
mock_llm_generator.generate_conversation_name.side_effect = Exception("LLM Error")
diff --git a/api/tests/unit_tests/services/test_datasource_provider_service.py b/api/tests/unit_tests/services/test_datasource_provider_service.py
index 3df7d500cf..da414816ff 100644
--- a/api/tests/unit_tests/services/test_datasource_provider_service.py
+++ b/api/tests/unit_tests/services/test_datasource_provider_service.py
@@ -1,5 +1,6 @@
from unittest.mock import MagicMock, patch
+import httpx
import pytest
from graphon.model_runtime.entities.provider_entities import FormType
from sqlalchemy.orm import Session
@@ -71,6 +72,8 @@ class TestDatasourceProviderService:
@pytest.fixture(autouse=True)
def patch_externals(self):
with (
+ patch("core.plugin.impl.base._httpx_client.request", side_effect=lambda **kw: httpx.request(**kw)),
+ patch("core.plugin.impl.base._httpx_client.stream", side_effect=lambda **kw: httpx.stream(**kw)),
patch("httpx.request") as mock_httpx,
patch("services.datasource_provider_service.dify_config") as mock_cfg,
patch("services.datasource_provider_service.encrypter") as mock_enc,
diff --git a/api/tests/unit_tests/services/test_external_dataset_service.py b/api/tests/unit_tests/services/test_external_dataset_service.py
index e2d62583f8..3709e1fa94 100644
--- a/api/tests/unit_tests/services/test_external_dataset_service.py
+++ b/api/tests/unit_tests/services/test_external_dataset_service.py
@@ -805,11 +805,12 @@ class TestExternalDatasetServiceGetAPI:
mock_query.first.return_value = expected_api
# Act
- result = ExternalDatasetService.get_external_knowledge_api(api_id)
+ tenant_id = "tenant-123"
+ result = ExternalDatasetService.get_external_knowledge_api(api_id, tenant_id)
# Assert
assert result.id == api_id
- mock_query.filter_by.assert_called_once_with(id=api_id)
+ mock_query.filter_by.assert_called_once_with(id=api_id, tenant_id=tenant_id)
@patch("services.external_knowledge_service.db")
def test_get_external_knowledge_api_not_found(self, mock_db, factory):
@@ -822,7 +823,7 @@ class TestExternalDatasetServiceGetAPI:
# Act & Assert
with pytest.raises(ValueError, match="api template not found"):
- ExternalDatasetService.get_external_knowledge_api("nonexistent-id")
+ ExternalDatasetService.get_external_knowledge_api("nonexistent-id", "tenant-123")
class TestExternalDatasetServiceUpdateAPI:
diff --git a/api/tests/unit_tests/services/test_message_service.py b/api/tests/unit_tests/services/test_message_service.py
index 101b9bff24..b6e990ebe0 100644
--- a/api/tests/unit_tests/services/test_message_service.py
+++ b/api/tests/unit_tests/services/test_message_service.py
@@ -151,12 +151,7 @@ class TestMessageServicePaginationByFirstId:
for i in range(5)
]
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.all.return_value = messages
+ mock_db.session.scalars.return_value.all.return_value = messages
# Act
result = MessageService.pagination_by_first_id(
@@ -196,12 +191,7 @@ class TestMessageServicePaginationByFirstId:
for i in range(5)
]
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.all.return_value = messages
+ mock_db.session.scalars.return_value.all.return_value = messages
# Act
result = MessageService.pagination_by_first_id(
@@ -246,31 +236,8 @@ class TestMessageServicePaginationByFirstId:
for i in range(5)
]
- # Setup query mocks
- mock_query_first = MagicMock()
- mock_query_history = MagicMock()
-
- query_calls = []
-
- def query_side_effect(*args):
- if args[0] == Message:
- query_calls.append(args)
- if len(query_calls) == 1:
- return mock_query_first
- else:
- return mock_query_history
-
- mock_db.session.query.side_effect = [mock_query_first, mock_query_history]
-
- # Setup first message query
- mock_query_first.where.return_value = mock_query_first
- mock_query_first.first.return_value = first_message
-
- # Setup history messages query
- mock_query_history.where.return_value = mock_query_history
- mock_query_history.order_by.return_value = mock_query_history
- mock_query_history.limit.return_value = mock_query_history
- mock_query_history.all.return_value = history_messages
+ mock_db.session.scalar.return_value = first_message
+ mock_db.session.scalars.return_value.all.return_value = history_messages
# Act
result = MessageService.pagination_by_first_id(
@@ -285,8 +252,6 @@ class TestMessageServicePaginationByFirstId:
# Assert
assert len(result.data) == 5
assert result.has_more is False
- mock_query_first.where.assert_called_once()
- mock_query_history.where.assert_called_once()
# Test 06: First message not found
@patch("services.message_service.db")
@@ -300,10 +265,7 @@ class TestMessageServicePaginationByFirstId:
mock_conversation_service.get_conversation.return_value = conversation
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = None # Message not found
+ mock_db.session.scalar.return_value = None # Message not found
# Act & Assert
with pytest.raises(FirstMessageNotExistsError):
@@ -336,12 +298,7 @@ class TestMessageServicePaginationByFirstId:
for i in range(11)
]
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.all.return_value = messages
+ mock_db.session.scalars.return_value.all.return_value = messages
# Act
result = MessageService.pagination_by_first_id(
@@ -369,12 +326,7 @@ class TestMessageServicePaginationByFirstId:
mock_conversation_service.get_conversation.return_value = conversation
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.all.return_value = []
+ mock_db.session.scalars.return_value.all.return_value = []
# Act
result = MessageService.pagination_by_first_id(
@@ -443,12 +395,7 @@ class TestMessageServicePaginationByLastId:
for i in range(5)
]
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.all.return_value = messages
+ mock_db.session.scalars.return_value.all.return_value = messages
# Act
result = MessageService.pagination_by_last_id(
@@ -485,22 +432,8 @@ class TestMessageServicePaginationByLastId:
for i in range(6, 10)
]
- # Setup base query mock that returns itself for chaining
- mock_base_query = MagicMock()
- mock_db.session.query.return_value = mock_base_query
-
- # First where() call for last_id lookup
- mock_query_last = MagicMock()
- mock_query_last.first.return_value = last_message
-
- # Second where() call for history messages
- mock_query_history = MagicMock()
- mock_query_history.order_by.return_value = mock_query_history
- mock_query_history.limit.return_value = mock_query_history
- mock_query_history.all.return_value = new_messages
-
- # Setup where() to return different mocks on consecutive calls
- mock_base_query.where.side_effect = [mock_query_last, mock_query_history]
+ mock_db.session.scalar.return_value = last_message
+ mock_db.session.scalars.return_value.all.return_value = new_messages
# Act
result = MessageService.pagination_by_last_id(
@@ -522,10 +455,7 @@ class TestMessageServicePaginationByLastId:
app = factory.create_app_mock()
user = factory.create_end_user_mock()
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = None # Message not found
+ mock_db.session.scalar.return_value = None # Message not found
# Act & Assert
with pytest.raises(LastMessageNotExistsError):
@@ -557,12 +487,7 @@ class TestMessageServicePaginationByLastId:
for i in range(5)
]
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.all.return_value = messages
+ mock_db.session.scalars.return_value.all.return_value = messages
# Act
result = MessageService.pagination_by_last_id(
@@ -576,8 +501,6 @@ class TestMessageServicePaginationByLastId:
# Assert
assert len(result.data) == 5
assert result.has_more is False
- # Verify conversation_id was used in query
- mock_query.where.assert_called()
mock_conversation_service.get_conversation.assert_called_once()
# Test 14: Pagination with include_ids filter
@@ -594,12 +517,7 @@ class TestMessageServicePaginationByLastId:
factory.create_message_mock(message_id="msg-003"),
]
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.all.return_value = messages
+ mock_db.session.scalars.return_value.all.return_value = messages
# Act
result = MessageService.pagination_by_last_id(
@@ -632,12 +550,7 @@ class TestMessageServicePaginationByLastId:
for i in range(11)
]
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.all.return_value = messages
+ mock_db.session.scalars.return_value.all.return_value = messages
# Act
result = MessageService.pagination_by_last_id(
@@ -743,17 +656,13 @@ class TestMessageServiceGetMessage:
user = factory.create_end_user_mock(user_id="end-user-123")
message = factory.create_message_mock()
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = message
+ mock_db.session.scalar.return_value = message
# Act
result = MessageService.get_message(app_model=app, user=user, message_id="msg-123")
# Assert
assert result == message
- mock_query.where.assert_called_once()
# Test 21: get_message success for Account (Admin)
@patch("services.message_service.db")
@@ -767,10 +676,7 @@ class TestMessageServiceGetMessage:
user.id = "account-123"
message = factory.create_message_mock()
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = message
+ mock_db.session.scalar.return_value = message
# Act
result = MessageService.get_message(app_model=app, user=user, message_id="msg-123")
@@ -786,10 +692,7 @@ class TestMessageServiceGetMessage:
app = factory.create_app_mock()
user = factory.create_end_user_mock()
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = None
+ mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(MessageNotExistsError):
@@ -899,21 +802,13 @@ class TestMessageServiceFeedback:
feedback = MagicMock()
feedback.to_dict.return_value = {"id": "fb-1"}
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.order_by.return_value = mock_query
- mock_query.limit.return_value = mock_query
- mock_query.offset.return_value = mock_query
- mock_query.all.return_value = [feedback]
+ mock_db.session.scalars.return_value.all.return_value = [feedback]
# Act
result = MessageService.get_all_messages_feedbacks(app_model=app, page=1, limit=10)
# Assert
assert result == [{"id": "fb-1"}]
- mock_query.limit.assert_called_with(10)
- mock_query.offset.assert_called_with(0)
class TestMessageServiceSuggestedQuestions:
@@ -1015,10 +910,7 @@ class TestMessageServiceSuggestedQuestions:
app_model_config.suggested_questions_after_answer_dict = {"enabled": True}
app_model_config.model_dict = {"provider": "openai", "name": "gpt-4"}
- mock_query = MagicMock()
- mock_db.session.query.return_value = mock_query
- mock_query.where.return_value = mock_query
- mock_query.first.return_value = app_model_config
+ mock_db.session.scalar.return_value = app_model_config
mock_llm_gen.generate_suggested_questions_after_answer.return_value = ["Q1?"]
@@ -1029,7 +921,6 @@ class TestMessageServiceSuggestedQuestions:
# Assert
assert result == ["Q1?"]
- mock_query.first.assert_called_once()
mock_llm_gen.generate_suggested_questions_after_answer.assert_called_once()
# Test 30: get_suggested_questions_after_answer - Disabled Error
diff --git a/api/tests/unit_tests/services/test_model_load_balancing_service.py b/api/tests/unit_tests/services/test_model_load_balancing_service.py
index b43e79dff5..bea288fb9b 100644
--- a/api/tests/unit_tests/services/test_model_load_balancing_service.py
+++ b/api/tests/unit_tests/services/test_model_load_balancing_service.py
@@ -158,7 +158,7 @@ def test_get_load_balancing_configs_should_insert_inherit_config_when_missing_fo
credential_id="cred-1",
enabled=True,
)
- mock_db.session.query.return_value.where.return_value.order_by.return_value.all.return_value = [config]
+ mock_db.session.scalars.return_value.all.return_value = [config]
mocker.patch(
"services.model_load_balancing_service.encrypter.get_decrypt_decoding",
return_value=("rsa", "cipher"),
@@ -216,7 +216,7 @@ def test_get_load_balancing_configs_should_reorder_existing_inherit_and_tolerate
credential_id=None,
enabled=False,
)
- mock_db.session.query.return_value.where.return_value.order_by.return_value.all.return_value = [
+ mock_db.session.scalars.return_value.all.return_value = [
normal_config,
inherit_config,
]
@@ -269,7 +269,7 @@ def test_get_load_balancing_config_should_return_none_when_config_not_found(
# Arrange
provider_configuration = _build_provider_configuration(provider_schema=_build_provider_credential_schema())
service.provider_manager.get_configurations.return_value = {"openai": provider_configuration}
- mock_db.session.query.return_value.where.return_value.first.return_value = None
+ mock_db.session.scalar.return_value = None
# Act
result = service.get_load_balancing_config("tenant-1", "openai", "gpt-4o-mini", ModelType.LLM.value, "cfg-1")
@@ -289,7 +289,7 @@ def test_get_load_balancing_config_should_return_obfuscated_payload_when_config_
}
service.provider_manager.get_configurations.return_value = {"openai": provider_configuration}
config = SimpleNamespace(id="cfg-1", name="primary", encrypted_config="not-json", enabled=True)
- mock_db.session.query.return_value.where.return_value.first.return_value = config
+ mock_db.session.scalar.return_value = config
# Act
result = service.get_load_balancing_config("tenant-1", "openai", "gpt-4o-mini", ModelType.LLM.value, "cfg-1")
@@ -317,7 +317,7 @@ def test_init_inherit_config_should_create_and_persist_inherit_configuration(
assert inherit_config.tenant_id == "tenant-1"
assert inherit_config.provider_name == "openai"
assert inherit_config.model_name == "gpt-4o-mini"
- assert inherit_config.model_type == "text-generation"
+ assert inherit_config.model_type == "llm"
assert inherit_config.name == "__inherit__"
mock_db.session.add.assert_called_once_with(inherit_config)
mock_db.session.commit.assert_called_once()
@@ -389,7 +389,7 @@ def test_update_load_balancing_configs_should_raise_value_error_when_credential_
provider_configuration = _build_provider_configuration(provider_schema=_build_provider_credential_schema())
service.provider_manager.get_configurations.return_value = {"openai": provider_configuration}
mock_db.session.scalars.return_value.all.return_value = []
- mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
+ mock_db.session.scalar.return_value = None
# Act + Assert
with pytest.raises(ValueError, match="Provider credential with id cred-1 not found"):
@@ -578,7 +578,7 @@ def test_update_load_balancing_configs_should_create_from_existing_provider_cred
service.provider_manager.get_configurations.return_value = {"openai": provider_configuration}
mock_db.session.scalars.return_value.all.return_value = []
credential_record = SimpleNamespace(credential_name="Main Credential", encrypted_config='{"api_key":"enc"}')
- mock_db.session.query.return_value.filter_by.return_value.first.return_value = credential_record
+ mock_db.session.scalar.return_value = credential_record
# Act
service.update_load_balancing_configs(
@@ -623,7 +623,7 @@ def test_validate_load_balancing_credentials_should_raise_value_error_when_confi
# Arrange
provider_configuration = _build_provider_configuration(provider_schema=_build_provider_credential_schema())
service.provider_manager.get_configurations.return_value = {"openai": provider_configuration}
- mock_db.session.query.return_value.where.return_value.first.return_value = None
+ mock_db.session.scalar.return_value = None
# Act + Assert
with pytest.raises(ValueError, match="Load balancing config cfg-1 does not exist"):
@@ -646,7 +646,7 @@ def test_validate_load_balancing_credentials_should_delegate_to_custom_validate_
provider_configuration = _build_provider_configuration(provider_schema=_build_provider_credential_schema())
service.provider_manager.get_configurations.return_value = {"openai": provider_configuration}
existing_config = SimpleNamespace(id="cfg-1")
- mock_db.session.query.return_value.where.return_value.first.return_value = existing_config
+ mock_db.session.scalar.return_value = existing_config
mock_validate = mocker.patch.object(service, "_custom_credentials_validate")
# Act
diff --git a/api/tests/unit_tests/services/test_ops_service.py b/api/tests/unit_tests/services/test_ops_service.py
index ab7b473790..7067e3b3dd 100644
--- a/api/tests/unit_tests/services/test_ops_service.py
+++ b/api/tests/unit_tests/services/test_ops_service.py
@@ -12,28 +12,27 @@ class TestOpsService:
@patch("services.ops_service.OpsTraceManager")
def test_get_tracing_app_config_no_config(self, mock_ops_trace_manager, mock_db):
# Arrange
- mock_db.session.query.return_value.where.return_value.first.return_value = None
+ mock_db.session.scalar.return_value = None
# Act
result = OpsService.get_tracing_app_config("app_id", "arize")
# Assert
assert result is None
- mock_db.session.query.assert_called_with(TraceAppConfig)
@patch("services.ops_service.db")
@patch("services.ops_service.OpsTraceManager")
def test_get_tracing_app_config_no_app(self, mock_ops_trace_manager, mock_db):
# Arrange
trace_config = MagicMock(spec=TraceAppConfig)
- mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, None]
+ mock_db.session.scalar.return_value = trace_config
+ mock_db.session.get.return_value = None
# Act
result = OpsService.get_tracing_app_config("app_id", "arize")
# Assert
assert result is None
- assert mock_db.session.query.call_count == 2
@patch("services.ops_service.db")
@patch("services.ops_service.OpsTraceManager")
@@ -43,7 +42,8 @@ class TestOpsService:
trace_config.tracing_config = None
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
+ mock_db.session.scalar.return_value = trace_config
+ mock_db.session.get.return_value = app
# Act & Assert
with pytest.raises(ValueError, match="Tracing config cannot be None."):
@@ -72,7 +72,8 @@ class TestOpsService:
trace_config.to_dict.return_value = {"tracing_config": {"project_url": default_url}}
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
+ mock_db.session.scalar.return_value = trace_config
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.decrypt_tracing_config.return_value = {}
mock_ops_trace_manager.obfuscated_decrypt_token.return_value = {}
@@ -97,7 +98,8 @@ class TestOpsService:
trace_config.to_dict.return_value = {"tracing_config": {"project_url": "success_url"}}
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
+ mock_db.session.scalar.return_value = trace_config
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.decrypt_tracing_config.return_value = {}
mock_ops_trace_manager.obfuscated_decrypt_token.return_value = {}
@@ -118,7 +120,8 @@ class TestOpsService:
trace_config.to_dict.return_value = {"tracing_config": {"project_url": "https://api.langfuse.com/project/key"}}
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
+ mock_db.session.scalar.return_value = trace_config
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.decrypt_tracing_config.return_value = {"host": "https://api.langfuse.com"}
mock_ops_trace_manager.obfuscated_decrypt_token.return_value = {"host": "https://api.langfuse.com"}
@@ -139,7 +142,8 @@ class TestOpsService:
trace_config.to_dict.return_value = {"tracing_config": {"project_url": "https://api.langfuse.com/"}}
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
+ mock_db.session.scalar.return_value = trace_config
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.decrypt_tracing_config.return_value = {"host": "https://api.langfuse.com"}
mock_ops_trace_manager.obfuscated_decrypt_token.return_value = {"host": "https://api.langfuse.com"}
@@ -189,7 +193,7 @@ class TestOpsService:
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
mock_ops_trace_manager.get_trace_config_project_url.side_effect = Exception("error")
mock_ops_trace_manager.get_trace_config_project_key.side_effect = Exception("error")
- mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock(spec=TraceAppConfig)
+ mock_db.session.scalar.return_value = MagicMock(spec=TraceAppConfig)
# Act
result = OpsService.create_tracing_app_config("app_id", provider, config)
@@ -206,7 +210,8 @@ class TestOpsService:
mock_ops_trace_manager.get_trace_config_project_key.return_value = "key"
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [None, app]
+ mock_db.session.scalar.return_value = None
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.encrypt_tracing_config.return_value = {}
# Act
@@ -223,7 +228,7 @@ class TestOpsService:
# Arrange
provider = TracingProviderEnum.ARIZE
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
- mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock(spec=TraceAppConfig)
+ mock_db.session.scalar.return_value = MagicMock(spec=TraceAppConfig)
# Act
result = OpsService.create_tracing_app_config("app_id", provider, {})
@@ -237,7 +242,8 @@ class TestOpsService:
# Arrange
provider = TracingProviderEnum.ARIZE
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
- mock_db.session.query.return_value.where.return_value.first.side_effect = [None, None]
+ mock_db.session.scalar.return_value = None
+ mock_db.session.get.return_value = None
# Act
result = OpsService.create_tracing_app_config("app_id", provider, {})
@@ -253,7 +259,8 @@ class TestOpsService:
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [None, app]
+ mock_db.session.scalar.return_value = None
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.encrypt_tracing_config.return_value = {}
# Act
@@ -274,7 +281,8 @@ class TestOpsService:
mock_ops_trace_manager.get_trace_config_project_url.return_value = "http://project_url"
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [None, app]
+ mock_db.session.scalar.return_value = None
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.encrypt_tracing_config.return_value = {"encrypted": "config"}
# Act
@@ -297,7 +305,7 @@ class TestOpsService:
def test_update_tracing_app_config_no_config(self, mock_ops_trace_manager, mock_db):
# Arrange
provider = TracingProviderEnum.ARIZE
- mock_db.session.query.return_value.where.return_value.first.return_value = None
+ mock_db.session.scalar.return_value = None
# Act
result = OpsService.update_tracing_app_config("app_id", provider, {})
@@ -311,7 +319,8 @@ class TestOpsService:
# Arrange
provider = TracingProviderEnum.ARIZE
current_config = MagicMock(spec=TraceAppConfig)
- mock_db.session.query.return_value.where.return_value.first.side_effect = [current_config, None]
+ mock_db.session.scalar.return_value = current_config
+ mock_db.session.get.return_value = None
# Act
result = OpsService.update_tracing_app_config("app_id", provider, {})
@@ -327,7 +336,8 @@ class TestOpsService:
current_config = MagicMock(spec=TraceAppConfig)
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [current_config, app]
+ mock_db.session.scalar.return_value = current_config
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.decrypt_tracing_config.return_value = {}
mock_ops_trace_manager.check_trace_config_is_effective.return_value = False
@@ -344,7 +354,8 @@ class TestOpsService:
current_config.to_dict.return_value = {"some": "data"}
app = MagicMock(spec=App)
app.tenant_id = "tenant_id"
- mock_db.session.query.return_value.where.return_value.first.side_effect = [current_config, app]
+ mock_db.session.scalar.return_value = current_config
+ mock_db.session.get.return_value = app
mock_ops_trace_manager.decrypt_tracing_config.return_value = {}
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
@@ -358,7 +369,7 @@ class TestOpsService:
@patch("services.ops_service.db")
def test_delete_tracing_app_config_no_config(self, mock_db):
# Arrange
- mock_db.session.query.return_value.where.return_value.first.return_value = None
+ mock_db.session.scalar.return_value = None
# Act
result = OpsService.delete_tracing_app_config("app_id", "arize")
@@ -370,7 +381,7 @@ class TestOpsService:
def test_delete_tracing_app_config_success(self, mock_db):
# Arrange
trace_config = MagicMock(spec=TraceAppConfig)
- mock_db.session.query.return_value.where.return_value.first.return_value = trace_config
+ mock_db.session.scalar.return_value = trace_config
# Act
result = OpsService.delete_tracing_app_config("app_id", "arize")
diff --git a/api/tests/unit_tests/services/test_vector_service.py b/api/tests/unit_tests/services/test_vector_service.py
index 598ff3fc3a..a78a033f4d 100644
--- a/api/tests/unit_tests/services/test_vector_service.py
+++ b/api/tests/unit_tests/services/test_vector_service.py
@@ -77,22 +77,12 @@ def _make_segment(
def _mock_db_session_for_update_multimodel(*, upload_files: list[_UploadFileStub] | None) -> MagicMock:
session = MagicMock(name="session")
- binding_query = MagicMock(name="binding_query")
- binding_query.where.return_value = binding_query
- binding_query.delete.return_value = 1
+ # db.session.execute() is used for delete(SegmentAttachmentBinding).where(...)
+ session.execute = MagicMock(name="execute")
- upload_query = MagicMock(name="upload_query")
- upload_query.where.return_value = upload_query
- upload_query.all.return_value = upload_files or []
+ # db.session.scalars(select(UploadFile).where(...)).all() returns upload files
+ session.scalars.return_value.all.return_value = upload_files or []
- def query_side_effect(model: object) -> MagicMock:
- if model is vector_service_module.SegmentAttachmentBinding:
- return binding_query
- if model is vector_service_module.UploadFile:
- return upload_query
- return MagicMock(name=f"query({model})")
-
- session.query.side_effect = query_side_effect
db_mock = MagicMock(name="db")
db_mock.session = session
return db_mock
@@ -165,22 +155,15 @@ def _mock_parent_child_queries(
) -> MagicMock:
session = MagicMock(name="session")
- doc_query = MagicMock(name="doc_query")
- doc_query.filter_by.return_value = doc_query
- doc_query.first.return_value = dataset_document
+ get_dispatch: dict[object, object | None] = {
+ vector_service_module.DatasetDocument: dataset_document,
+ vector_service_module.DatasetProcessRule: processing_rule,
+ }
- rule_query = MagicMock(name="rule_query")
- rule_query.where.return_value = rule_query
- rule_query.first.return_value = processing_rule
+ def get_side_effect(model: object, pk: object) -> object | None:
+ return get_dispatch.get(model)
- def query_side_effect(model: object) -> MagicMock:
- if model is vector_service_module.DatasetDocument:
- return doc_query
- if model is vector_service_module.DatasetProcessRule:
- return rule_query
- return MagicMock(name=f"query({model})")
-
- session.query.side_effect = query_side_effect
+ session.get.side_effect = get_side_effect
db_mock = MagicMock(name="db")
db_mock.session = session
return db_mock
@@ -609,7 +592,7 @@ def test_update_multimodel_vector_deletes_bindings_and_commits_on_empty_new_ids(
vector_cls.assert_called_once_with(dataset=dataset)
vector_instance.delete_by_ids.assert_called_once_with(["old-1", "old-2"])
- db_mock.session.query.assert_called_once_with(vector_service_module.SegmentAttachmentBinding)
+ db_mock.session.execute.assert_called_once()
db_mock.session.commit.assert_called_once()
db_mock.session.add_all.assert_not_called()
vector_instance.add_texts.assert_not_called()
@@ -644,6 +627,8 @@ def test_update_multimodel_vector_adds_bindings_and_vectors_and_skips_missing_up
binding_ctor = MagicMock(side_effect=lambda **kwargs: kwargs)
monkeypatch.setattr(vector_service_module, "SegmentAttachmentBinding", binding_ctor)
+ monkeypatch.setattr(vector_service_module, "delete", MagicMock())
+ monkeypatch.setattr(vector_service_module, "select", MagicMock())
logger_mock = MagicMock()
monkeypatch.setattr(vector_service_module, "logger", logger_mock)
@@ -677,6 +662,8 @@ def test_update_multimodel_vector_updates_bindings_without_multimodal_vector_ops
monkeypatch.setattr(
vector_service_module, "SegmentAttachmentBinding", MagicMock(side_effect=lambda **kwargs: kwargs)
)
+ monkeypatch.setattr(vector_service_module, "delete", MagicMock())
+ monkeypatch.setattr(vector_service_module, "select", MagicMock())
VectorService.update_multimodel_vector(segment=segment, attachment_ids=["file-1"], dataset=dataset)
@@ -698,6 +685,8 @@ def test_update_multimodel_vector_rolls_back_and_reraises_on_error(monkeypatch:
monkeypatch.setattr(
vector_service_module, "SegmentAttachmentBinding", MagicMock(side_effect=lambda **kwargs: kwargs)
)
+ monkeypatch.setattr(vector_service_module, "delete", MagicMock())
+ monkeypatch.setattr(vector_service_module, "select", MagicMock())
logger_mock = MagicMock()
monkeypatch.setattr(vector_service_module, "logger", logger_mock)
diff --git a/api/tests/unit_tests/services/test_website_service.py b/api/tests/unit_tests/services/test_website_service.py
index e973da7d56..b0ddc7388a 100644
--- a/api/tests/unit_tests/services/test_website_service.py
+++ b/api/tests/unit_tests/services/test_website_service.py
@@ -343,7 +343,7 @@ def test_crawl_with_watercrawl_passes_options_dict(monkeypatch: pytest.MonkeyPat
def test_crawl_with_jinareader_single_page_success(monkeypatch: pytest.MonkeyPatch) -> None:
get_mock = MagicMock(return_value=_DummyHttpxResponse({"code": 200, "data": {"title": "t"}}))
- monkeypatch.setattr(website_service_module.httpx, "get", get_mock)
+ monkeypatch.setattr(website_service_module._jina_http_client, "get", get_mock)
req = WebsiteCrawlApiRequest(
provider="jinareader", url="https://example.com", options={"crawl_sub_pages": False}
@@ -356,7 +356,11 @@ def test_crawl_with_jinareader_single_page_success(monkeypatch: pytest.MonkeyPat
def test_crawl_with_jinareader_single_page_failure(monkeypatch: pytest.MonkeyPatch) -> None:
- monkeypatch.setattr(website_service_module.httpx, "get", MagicMock(return_value=_DummyHttpxResponse({"code": 500})))
+ monkeypatch.setattr(
+ website_service_module._jina_http_client,
+ "get",
+ MagicMock(return_value=_DummyHttpxResponse({"code": 500})),
+ )
req = WebsiteCrawlApiRequest(
provider="jinareader", url="https://example.com", options={"crawl_sub_pages": False}
).to_crawl_request()
@@ -368,7 +372,7 @@ def test_crawl_with_jinareader_single_page_failure(monkeypatch: pytest.MonkeyPat
def test_crawl_with_jinareader_multi_page_success(monkeypatch: pytest.MonkeyPatch) -> None:
post_mock = MagicMock(return_value=_DummyHttpxResponse({"code": 200, "data": {"taskId": "t1"}}))
- monkeypatch.setattr(website_service_module.httpx, "post", post_mock)
+ monkeypatch.setattr(website_service_module._adaptive_http_client, "post", post_mock)
req = WebsiteCrawlApiRequest(
provider="jinareader",
@@ -384,7 +388,7 @@ def test_crawl_with_jinareader_multi_page_success(monkeypatch: pytest.MonkeyPatc
def test_crawl_with_jinareader_multi_page_failure(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(
- website_service_module.httpx, "post", MagicMock(return_value=_DummyHttpxResponse({"code": 400}))
+ website_service_module._adaptive_http_client, "post", MagicMock(return_value=_DummyHttpxResponse({"code": 400}))
)
req = WebsiteCrawlApiRequest(
provider="jinareader",
@@ -482,7 +486,7 @@ def test_get_jinareader_status_active(monkeypatch: pytest.MonkeyPatch) -> None:
}
)
)
- monkeypatch.setattr(website_service_module.httpx, "post", post_mock)
+ monkeypatch.setattr(website_service_module._adaptive_http_client, "post", post_mock)
result = WebsiteService._get_jinareader_status("job-1", "k")
assert result["status"] == "active"
@@ -518,7 +522,7 @@ def test_get_jinareader_status_completed_formats_processed_items(monkeypatch: py
}
}
post_mock = MagicMock(side_effect=[_DummyHttpxResponse(status_payload), _DummyHttpxResponse(processed_payload)])
- monkeypatch.setattr(website_service_module.httpx, "post", post_mock)
+ monkeypatch.setattr(website_service_module._adaptive_http_client, "post", post_mock)
result = WebsiteService._get_jinareader_status("job-1", "k")
assert result["status"] == "completed"
@@ -619,7 +623,7 @@ def test_get_watercrawl_url_data_delegates(monkeypatch: pytest.MonkeyPatch) -> N
def test_get_jinareader_url_data_without_job_id_success(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(
- website_service_module.httpx,
+ website_service_module._jina_http_client,
"get",
MagicMock(return_value=_DummyHttpxResponse({"code": 200, "data": {"url": "u"}})),
)
@@ -627,7 +631,11 @@ def test_get_jinareader_url_data_without_job_id_success(monkeypatch: pytest.Monk
def test_get_jinareader_url_data_without_job_id_failure(monkeypatch: pytest.MonkeyPatch) -> None:
- monkeypatch.setattr(website_service_module.httpx, "get", MagicMock(return_value=_DummyHttpxResponse({"code": 500})))
+ monkeypatch.setattr(
+ website_service_module._jina_http_client,
+ "get",
+ MagicMock(return_value=_DummyHttpxResponse({"code": 500})),
+ )
with pytest.raises(ValueError, match="Failed to crawl$"):
WebsiteService._get_jinareader_url_data("", "u", "k")
@@ -637,7 +645,7 @@ def test_get_jinareader_url_data_with_job_id_completed_returns_matching_item(mon
processed_payload = {"data": {"processed": {"u1": {"data": {"url": "u", "title": "t"}}}}}
post_mock = MagicMock(side_effect=[_DummyHttpxResponse(status_payload), _DummyHttpxResponse(processed_payload)])
- monkeypatch.setattr(website_service_module.httpx, "post", post_mock)
+ monkeypatch.setattr(website_service_module._adaptive_http_client, "post", post_mock)
assert WebsiteService._get_jinareader_url_data("job-1", "u", "k") == {"url": "u", "title": "t"}
assert post_mock.call_count == 2
@@ -645,7 +653,7 @@ def test_get_jinareader_url_data_with_job_id_completed_returns_matching_item(mon
def test_get_jinareader_url_data_with_job_id_not_completed_raises(monkeypatch: pytest.MonkeyPatch) -> None:
post_mock = MagicMock(return_value=_DummyHttpxResponse({"data": {"status": "active"}}))
- monkeypatch.setattr(website_service_module.httpx, "post", post_mock)
+ monkeypatch.setattr(website_service_module._adaptive_http_client, "post", post_mock)
with pytest.raises(ValueError, match=r"Crawl job is no\s*t completed"):
WebsiteService._get_jinareader_url_data("job-1", "u", "k")
@@ -658,7 +666,7 @@ def test_get_jinareader_url_data_with_job_id_completed_but_not_found_returns_non
processed_payload = {"data": {"processed": {"u1": {"data": {"url": "other"}}}}}
post_mock = MagicMock(side_effect=[_DummyHttpxResponse(status_payload), _DummyHttpxResponse(processed_payload)])
- monkeypatch.setattr(website_service_module.httpx, "post", post_mock)
+ monkeypatch.setattr(website_service_module._adaptive_http_client, "post", post_mock)
assert WebsiteService._get_jinareader_url_data("job-1", "u", "k") is None
diff --git a/api/tests/unit_tests/services/test_workflow_service.py b/api/tests/unit_tests/services/test_workflow_service.py
index cd71981bcf..1b253eb2f1 100644
--- a/api/tests/unit_tests/services/test_workflow_service.py
+++ b/api/tests/unit_tests/services/test_workflow_service.py
@@ -268,7 +268,7 @@ class TestWorkflowService:
Provides mock implementations of:
- session.add(): Adding new records
- session.commit(): Committing transactions
- - session.query(): Querying database
+ - session.scalar(): Scalar queries
- session.execute(): Executing SQL statements
"""
with patch("services.workflow_service.db") as mock_db:
@@ -276,7 +276,7 @@ class TestWorkflowService:
mock_db.session = mock_session
mock_session.add = MagicMock()
mock_session.commit = MagicMock()
- mock_session.query = MagicMock()
+ mock_session.scalar = MagicMock()
mock_session.execute = MagicMock()
yield mock_db
@@ -338,10 +338,8 @@ class TestWorkflowService:
app = TestWorkflowAssociatedDataFactory.create_app_mock()
mock_workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
- # Mock database query
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = mock_workflow
+ # Mock db.session.scalar() used by get_draft_workflow
+ mock_db_session.session.scalar.return_value = mock_workflow
result = workflow_service.get_draft_workflow(app)
@@ -351,10 +349,8 @@ class TestWorkflowService:
"""Test get_draft_workflow returns None when no draft exists."""
app = TestWorkflowAssociatedDataFactory.create_app_mock()
- # Mock database query to return None
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = None
+ # Mock db.session.scalar() to return None
+ mock_db_session.session.scalar.return_value = None
result = workflow_service.get_draft_workflow(app)
@@ -366,10 +362,8 @@ class TestWorkflowService:
workflow_id = "workflow-123"
mock_workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock(version="v1")
- # Mock database query
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = mock_workflow
+ # Mock db.session.scalar() used by get_published_workflow_by_id
+ mock_db_session.session.scalar.return_value = mock_workflow
result = workflow_service.get_draft_workflow(app, workflow_id=workflow_id)
@@ -384,10 +378,8 @@ class TestWorkflowService:
workflow_id = "workflow-123"
mock_workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock(workflow_id=workflow_id, version="v1")
- # Mock database query
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = mock_workflow
+ # Mock db.session.scalar() used by get_published_workflow_by_id
+ mock_db_session.session.scalar.return_value = mock_workflow
result = workflow_service.get_published_workflow_by_id(app, workflow_id)
@@ -406,10 +398,8 @@ class TestWorkflowService:
workflow_id=workflow_id, version=Workflow.VERSION_DRAFT
)
- # Mock database query
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = mock_workflow
+ # Mock db.session.scalar() used by get_published_workflow_by_id
+ mock_db_session.session.scalar.return_value = mock_workflow
with pytest.raises(IsDraftWorkflowError):
workflow_service.get_published_workflow_by_id(app, workflow_id)
@@ -419,10 +409,8 @@ class TestWorkflowService:
app = TestWorkflowAssociatedDataFactory.create_app_mock()
workflow_id = "nonexistent-workflow"
- # Mock database query to return None
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = None
+ # Mock db.session.scalar() to return None
+ mock_db_session.session.scalar.return_value = None
result = workflow_service.get_published_workflow_by_id(app, workflow_id)
@@ -434,10 +422,8 @@ class TestWorkflowService:
app = TestWorkflowAssociatedDataFactory.create_app_mock(workflow_id=workflow_id)
mock_workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock(workflow_id=workflow_id, version="v1")
- # Mock database query
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = mock_workflow
+ # Mock db.session.scalar() used by get_published_workflow
+ mock_db_session.session.scalar.return_value = mock_workflow
result = workflow_service.get_published_workflow(app)
@@ -466,11 +452,9 @@ class TestWorkflowService:
graph = TestWorkflowAssociatedDataFactory.create_valid_workflow_graph()
features = {"file_upload": {"enabled": False}}
- # Mock get_draft_workflow to return None (no existing draft)
+ # Mock db.session.scalar() to return None (no existing draft)
# This simulates the first time a workflow is created for an app
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = None
+ mock_db_session.session.scalar.return_value = None
with (
patch.object(workflow_service, "validate_features_structure"),
@@ -504,12 +488,10 @@ class TestWorkflowService:
features = {"file_upload": {"enabled": False}}
unique_hash = "test-hash-123"
- # Mock existing draft workflow
+ # Mock existing draft workflow via db.session.scalar()
mock_workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock(unique_hash=unique_hash)
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = mock_workflow
+ mock_db_session.session.scalar.return_value = mock_workflow
with (
patch.object(workflow_service, "validate_features_structure"),
@@ -545,12 +527,10 @@ class TestWorkflowService:
graph = TestWorkflowAssociatedDataFactory.create_valid_workflow_graph()
features = {}
- # Mock existing draft workflow with different hash
+ # Mock existing draft workflow with different hash via db.session.scalar()
mock_workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock(unique_hash="old-hash")
- mock_query = MagicMock()
- mock_db_session.session.query.return_value = mock_query
- mock_query.where.return_value.first.return_value = mock_workflow
+ mock_db_session.session.scalar.return_value = mock_workflow
with pytest.raises(WorkflowHashNotEqualError):
workflow_service.sync_draft_workflow(
diff --git a/api/tests/unit_tests/services/tools/test_builtin_tools_manage_service.py b/api/tests/unit_tests/services/tools/test_builtin_tools_manage_service.py
index 439d203c58..175900071b 100644
--- a/api/tests/unit_tests/services/tools/test_builtin_tools_manage_service.py
+++ b/api/tests/unit_tests/services/tools/test_builtin_tools_manage_service.py
@@ -347,7 +347,7 @@ class TestGetBuiltinToolProviderCredentials:
def test_returns_empty_when_no_providers(self, mock_db):
mock_db.session.no_autoflush.__enter__ = MagicMock(return_value=None)
mock_db.session.no_autoflush.__exit__ = MagicMock(return_value=False)
- mock_db.session.query.return_value.filter_by.return_value.order_by.return_value.all.return_value = []
+ mock_db.session.scalars.return_value.all.return_value = []
result = BuiltinToolManageService.get_builtin_tool_provider_credentials("t", "google")
@@ -362,7 +362,7 @@ class TestGetBuiltinToolProviderCredentials:
mock_db.session.no_autoflush.__exit__ = MagicMock(return_value=False)
provider = MagicMock(provider="google", is_default=False)
- mock_db.session.query.return_value.filter_by.return_value.order_by.return_value.all.return_value = [provider]
+ mock_db.session.scalars.return_value.all.return_value = [provider]
mock_encrypter = MagicMock()
mock_encrypter.decrypt.return_value = {"key": "decrypted"}
diff --git a/api/tests/unit_tests/services/tools/test_tools_transform_service.py b/api/tests/unit_tests/services/tools/test_tools_transform_service.py
new file mode 100644
index 0000000000..32c1a00d30
--- /dev/null
+++ b/api/tests/unit_tests/services/tools/test_tools_transform_service.py
@@ -0,0 +1,598 @@
+from unittest.mock import MagicMock, Mock, patch
+
+from core.tools.__base.tool import Tool
+from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity
+from core.tools.entities.common_entities import I18nObject
+from core.tools.entities.tool_entities import ApiProviderAuthType, ToolParameter, ToolProviderType
+from services.tools.tools_transform_service import ToolTransformService
+
+MODULE = "services.tools.tools_transform_service"
+
+
+class TestToolTransformService:
+ """Test cases for ToolTransformService.convert_tool_entity_to_api_entity method"""
+
+ def test_convert_tool_with_parameter_override(self):
+ """Test that runtime parameters correctly override base parameters"""
+ # Create mock base parameters
+ base_param1 = Mock(spec=ToolParameter)
+ base_param1.name = "param1"
+ base_param1.form = ToolParameter.ToolParameterForm.FORM
+ base_param1.type = "string"
+ base_param1.label = "Base Param 1"
+
+ base_param2 = Mock(spec=ToolParameter)
+ base_param2.name = "param2"
+ base_param2.form = ToolParameter.ToolParameterForm.FORM
+ base_param2.type = "string"
+ base_param2.label = "Base Param 2"
+
+ # Create mock runtime parameters that override base parameters
+ runtime_param1 = Mock(spec=ToolParameter)
+ runtime_param1.name = "param1"
+ runtime_param1.form = ToolParameter.ToolParameterForm.FORM
+ runtime_param1.type = "string"
+ runtime_param1.label = "Runtime Param 1" # Different label to verify override
+
+ # Create mock tool
+ mock_tool = Mock(spec=Tool)
+ mock_tool.entity = Mock()
+ mock_tool.entity.parameters = [base_param1, base_param2]
+ mock_tool.entity.identity = Mock()
+ mock_tool.entity.identity.author = "test_author"
+ mock_tool.entity.identity.name = "test_tool"
+ mock_tool.entity.identity.label = I18nObject(en_US="Test Tool")
+ mock_tool.entity.description = Mock()
+ mock_tool.entity.description.human = I18nObject(en_US="Test description")
+ mock_tool.entity.output_schema = {}
+ mock_tool.get_runtime_parameters.return_value = [runtime_param1]
+
+ # Mock fork_tool_runtime to return the same tool
+ mock_tool.fork_tool_runtime.return_value = mock_tool
+
+ # Call the method
+ result = ToolTransformService.convert_tool_entity_to_api_entity(mock_tool, "test_tenant", None)
+
+ # Verify the result
+ assert isinstance(result, ToolApiEntity)
+ assert result.author == "test_author"
+ assert result.name == "test_tool"
+ assert result.parameters is not None
+ assert len(result.parameters) == 2
+
+ # Find the overridden parameter
+ overridden_param = next((p for p in result.parameters if p.name == "param1"), None)
+ assert overridden_param is not None
+ assert overridden_param.label == "Runtime Param 1" # Should be runtime version
+
+ # Find the non-overridden parameter
+ original_param = next((p for p in result.parameters if p.name == "param2"), None)
+ assert original_param is not None
+ assert original_param.label == "Base Param 2" # Should be base version
+
+ def test_convert_tool_with_additional_runtime_parameters(self):
+ """Test that additional runtime parameters are added to the final list"""
+ # Create mock base parameters
+ base_param1 = Mock(spec=ToolParameter)
+ base_param1.name = "param1"
+ base_param1.form = ToolParameter.ToolParameterForm.FORM
+ base_param1.type = "string"
+ base_param1.label = "Base Param 1"
+
+ # Create mock runtime parameters - one that overrides and one that's new
+ runtime_param1 = Mock(spec=ToolParameter)
+ runtime_param1.name = "param1"
+ runtime_param1.form = ToolParameter.ToolParameterForm.FORM
+ runtime_param1.type = "string"
+ runtime_param1.label = "Runtime Param 1"
+
+ runtime_param2 = Mock(spec=ToolParameter)
+ runtime_param2.name = "runtime_only"
+ runtime_param2.form = ToolParameter.ToolParameterForm.FORM
+ runtime_param2.type = "string"
+ runtime_param2.label = "Runtime Only Param"
+
+ # Create mock tool
+ mock_tool = Mock(spec=Tool)
+ mock_tool.entity = Mock()
+ mock_tool.entity.parameters = [base_param1]
+ mock_tool.entity.identity = Mock()
+ mock_tool.entity.identity.author = "test_author"
+ mock_tool.entity.identity.name = "test_tool"
+ mock_tool.entity.identity.label = I18nObject(en_US="Test Tool")
+ mock_tool.entity.description = Mock()
+ mock_tool.entity.description.human = I18nObject(en_US="Test description")
+ mock_tool.entity.output_schema = {}
+ mock_tool.get_runtime_parameters.return_value = [runtime_param1, runtime_param2]
+
+ # Mock fork_tool_runtime to return the same tool
+ mock_tool.fork_tool_runtime.return_value = mock_tool
+
+ # Call the method
+ result = ToolTransformService.convert_tool_entity_to_api_entity(mock_tool, "test_tenant", None)
+
+ # Verify the result
+ assert isinstance(result, ToolApiEntity)
+ assert result.parameters is not None
+ assert len(result.parameters) == 2
+
+ # Check that both parameters are present
+ param_names = [p.name for p in result.parameters]
+ assert "param1" in param_names
+ assert "runtime_only" in param_names
+
+ # Verify the overridden parameter has runtime version
+ overridden_param = next((p for p in result.parameters if p.name == "param1"), None)
+ assert overridden_param is not None
+ assert overridden_param.label == "Runtime Param 1"
+
+ # Verify the new runtime parameter is included
+ new_param = next((p for p in result.parameters if p.name == "runtime_only"), None)
+ assert new_param is not None
+ assert new_param.label == "Runtime Only Param"
+
+ def test_convert_tool_with_non_form_runtime_parameters(self):
+ """Test that non-FORM runtime parameters are not added as new parameters"""
+ # Create mock base parameters
+ base_param1 = Mock(spec=ToolParameter)
+ base_param1.name = "param1"
+ base_param1.form = ToolParameter.ToolParameterForm.FORM
+ base_param1.type = "string"
+ base_param1.label = "Base Param 1"
+
+ # Create mock runtime parameters with different forms
+ runtime_param1 = Mock(spec=ToolParameter)
+ runtime_param1.name = "param1"
+ runtime_param1.form = ToolParameter.ToolParameterForm.FORM
+ runtime_param1.type = "string"
+ runtime_param1.label = "Runtime Param 1"
+
+ runtime_param2 = Mock(spec=ToolParameter)
+ runtime_param2.name = "llm_param"
+ runtime_param2.form = ToolParameter.ToolParameterForm.LLM
+ runtime_param2.type = "string"
+ runtime_param2.label = "LLM Param"
+
+ # Create mock tool
+ mock_tool = Mock(spec=Tool)
+ mock_tool.entity = Mock()
+ mock_tool.entity.parameters = [base_param1]
+ mock_tool.entity.identity = Mock()
+ mock_tool.entity.identity.author = "test_author"
+ mock_tool.entity.identity.name = "test_tool"
+ mock_tool.entity.identity.label = I18nObject(en_US="Test Tool")
+ mock_tool.entity.description = Mock()
+ mock_tool.entity.description.human = I18nObject(en_US="Test description")
+ mock_tool.entity.output_schema = {}
+ mock_tool.get_runtime_parameters.return_value = [runtime_param1, runtime_param2]
+
+ # Mock fork_tool_runtime to return the same tool
+ mock_tool.fork_tool_runtime.return_value = mock_tool
+
+ # Call the method
+ result = ToolTransformService.convert_tool_entity_to_api_entity(mock_tool, "test_tenant", None)
+
+ # Verify the result
+ assert isinstance(result, ToolApiEntity)
+ assert result.parameters is not None
+ assert len(result.parameters) == 1 # Only the FORM parameter should be present
+
+ # Check that only the FORM parameter is present
+ param_names = [p.name for p in result.parameters]
+ assert "param1" in param_names
+ assert "llm_param" not in param_names
+
+ def test_convert_tool_with_empty_parameters(self):
+ """Test conversion with empty base and runtime parameters"""
+ # Create mock tool with no parameters
+ mock_tool = Mock(spec=Tool)
+ mock_tool.entity = Mock()
+ mock_tool.entity.parameters = []
+ mock_tool.entity.identity = Mock()
+ mock_tool.entity.identity.author = "test_author"
+ mock_tool.entity.identity.name = "test_tool"
+ mock_tool.entity.identity.label = I18nObject(en_US="Test Tool")
+ mock_tool.entity.description = Mock()
+ mock_tool.entity.description.human = I18nObject(en_US="Test description")
+ mock_tool.entity.output_schema = {}
+ mock_tool.get_runtime_parameters.return_value = []
+
+ # Mock fork_tool_runtime to return the same tool
+ mock_tool.fork_tool_runtime.return_value = mock_tool
+
+ # Call the method
+ result = ToolTransformService.convert_tool_entity_to_api_entity(mock_tool, "test_tenant", None)
+
+ # Verify the result
+ assert isinstance(result, ToolApiEntity)
+ assert result.parameters is not None
+ assert len(result.parameters) == 0
+
+ def test_convert_tool_with_none_parameters(self):
+ """Test conversion when base parameters is None"""
+ # Create mock tool with None parameters
+ mock_tool = Mock(spec=Tool)
+ mock_tool.entity = Mock()
+ mock_tool.entity.parameters = None
+ mock_tool.entity.identity = Mock()
+ mock_tool.entity.identity.author = "test_author"
+ mock_tool.entity.identity.name = "test_tool"
+ mock_tool.entity.identity.label = I18nObject(en_US="Test Tool")
+ mock_tool.entity.description = Mock()
+ mock_tool.entity.description.human = I18nObject(en_US="Test description")
+ mock_tool.entity.output_schema = {}
+ mock_tool.get_runtime_parameters.return_value = []
+
+ # Mock fork_tool_runtime to return the same tool
+ mock_tool.fork_tool_runtime.return_value = mock_tool
+
+ # Call the method
+ result = ToolTransformService.convert_tool_entity_to_api_entity(mock_tool, "test_tenant", None)
+
+ # Verify the result
+ assert isinstance(result, ToolApiEntity)
+ assert result.parameters is not None
+ assert len(result.parameters) == 0
+
+ def test_convert_tool_parameter_order_preserved(self):
+ """Test that parameter order is preserved correctly"""
+ # Create mock base parameters in specific order
+ base_param1 = Mock(spec=ToolParameter)
+ base_param1.name = "param1"
+ base_param1.form = ToolParameter.ToolParameterForm.FORM
+ base_param1.type = "string"
+ base_param1.label = "Base Param 1"
+
+ base_param2 = Mock(spec=ToolParameter)
+ base_param2.name = "param2"
+ base_param2.form = ToolParameter.ToolParameterForm.FORM
+ base_param2.type = "string"
+ base_param2.label = "Base Param 2"
+
+ base_param3 = Mock(spec=ToolParameter)
+ base_param3.name = "param3"
+ base_param3.form = ToolParameter.ToolParameterForm.FORM
+ base_param3.type = "string"
+ base_param3.label = "Base Param 3"
+
+ # Create runtime parameter that overrides middle parameter
+ runtime_param2 = Mock(spec=ToolParameter)
+ runtime_param2.name = "param2"
+ runtime_param2.form = ToolParameter.ToolParameterForm.FORM
+ runtime_param2.type = "string"
+ runtime_param2.label = "Runtime Param 2"
+
+ # Create new runtime parameter
+ runtime_param4 = Mock(spec=ToolParameter)
+ runtime_param4.name = "param4"
+ runtime_param4.form = ToolParameter.ToolParameterForm.FORM
+ runtime_param4.type = "string"
+ runtime_param4.label = "Runtime Param 4"
+
+ # Create mock tool
+ mock_tool = Mock(spec=Tool)
+ mock_tool.entity = Mock()
+ mock_tool.entity.parameters = [base_param1, base_param2, base_param3]
+ mock_tool.entity.identity = Mock()
+ mock_tool.entity.identity.author = "test_author"
+ mock_tool.entity.identity.name = "test_tool"
+ mock_tool.entity.identity.label = I18nObject(en_US="Test Tool")
+ mock_tool.entity.description = Mock()
+ mock_tool.entity.description.human = I18nObject(en_US="Test description")
+ mock_tool.entity.output_schema = {}
+ mock_tool.get_runtime_parameters.return_value = [runtime_param2, runtime_param4]
+
+ # Mock fork_tool_runtime to return the same tool
+ mock_tool.fork_tool_runtime.return_value = mock_tool
+
+ # Call the method
+ result = ToolTransformService.convert_tool_entity_to_api_entity(mock_tool, "test_tenant", None)
+
+ # Verify the result
+ assert isinstance(result, ToolApiEntity)
+ assert result.parameters is not None
+ assert len(result.parameters) == 4
+
+ # Check that order is maintained: base parameters first, then new runtime parameters
+ param_names = [p.name for p in result.parameters]
+ assert param_names == ["param1", "param2", "param3", "param4"]
+
+ # Verify that param2 was overridden with runtime version
+ param2 = result.parameters[1]
+ assert param2.name == "param2"
+ assert param2.label == "Runtime Param 2"
+
+
+class TestWorkflowProviderToUserProvider:
+ """Test cases for ToolTransformService.workflow_provider_to_user_provider method"""
+
+ def test_workflow_provider_to_user_provider_with_workflow_app_id(self):
+ """Test that workflow_provider_to_user_provider correctly sets workflow_app_id."""
+ from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
+
+ # Create mock workflow tool provider controller
+ workflow_app_id = "app_123"
+ provider_id = "provider_123"
+ mock_controller = Mock(spec=WorkflowToolProviderController)
+ mock_controller.provider_id = provider_id
+ mock_controller.entity = Mock()
+ mock_controller.entity.identity = Mock()
+ mock_controller.entity.identity.author = "test_author"
+ mock_controller.entity.identity.name = "test_workflow_tool"
+ mock_controller.entity.identity.description = I18nObject(en_US="Test description")
+ mock_controller.entity.identity.icon = {"type": "emoji", "content": "🔧"}
+ mock_controller.entity.identity.icon_dark = None
+ mock_controller.entity.identity.label = I18nObject(en_US="Test Workflow Tool")
+
+ # Call the method
+ result = ToolTransformService.workflow_provider_to_user_provider(
+ provider_controller=mock_controller,
+ labels=["label1", "label2"],
+ workflow_app_id=workflow_app_id,
+ )
+
+ # Verify the result
+ assert isinstance(result, ToolProviderApiEntity)
+ assert result.id == provider_id
+ assert result.author == "test_author"
+ assert result.name == "test_workflow_tool"
+ assert result.type == ToolProviderType.WORKFLOW
+ assert result.workflow_app_id == workflow_app_id
+ assert result.labels == ["label1", "label2"]
+ assert result.is_team_authorization is True
+ assert result.plugin_id is None
+ assert result.plugin_unique_identifier is None
+ assert result.tools == []
+
+ def test_workflow_provider_to_user_provider_without_workflow_app_id(self):
+ """Test that workflow_provider_to_user_provider works when workflow_app_id is not provided."""
+ from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
+
+ # Create mock workflow tool provider controller
+ provider_id = "provider_123"
+ mock_controller = Mock(spec=WorkflowToolProviderController)
+ mock_controller.provider_id = provider_id
+ mock_controller.entity = Mock()
+ mock_controller.entity.identity = Mock()
+ mock_controller.entity.identity.author = "test_author"
+ mock_controller.entity.identity.name = "test_workflow_tool"
+ mock_controller.entity.identity.description = I18nObject(en_US="Test description")
+ mock_controller.entity.identity.icon = {"type": "emoji", "content": "🔧"}
+ mock_controller.entity.identity.icon_dark = None
+ mock_controller.entity.identity.label = I18nObject(en_US="Test Workflow Tool")
+
+ # Call the method without workflow_app_id
+ result = ToolTransformService.workflow_provider_to_user_provider(
+ provider_controller=mock_controller,
+ labels=["label1"],
+ )
+
+ # Verify the result
+ assert isinstance(result, ToolProviderApiEntity)
+ assert result.id == provider_id
+ assert result.workflow_app_id is None
+ assert result.labels == ["label1"]
+
+ def test_workflow_provider_to_user_provider_workflow_app_id_none(self):
+ """Test that workflow_provider_to_user_provider handles None workflow_app_id explicitly."""
+ from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
+
+ # Create mock workflow tool provider controller
+ provider_id = "provider_123"
+ mock_controller = Mock(spec=WorkflowToolProviderController)
+ mock_controller.provider_id = provider_id
+ mock_controller.entity = Mock()
+ mock_controller.entity.identity = Mock()
+ mock_controller.entity.identity.author = "test_author"
+ mock_controller.entity.identity.name = "test_workflow_tool"
+ mock_controller.entity.identity.description = I18nObject(en_US="Test description")
+ mock_controller.entity.identity.icon = {"type": "emoji", "content": "🔧"}
+ mock_controller.entity.identity.icon_dark = None
+ mock_controller.entity.identity.label = I18nObject(en_US="Test Workflow Tool")
+
+ # Call the method with explicit None values
+ result = ToolTransformService.workflow_provider_to_user_provider(
+ provider_controller=mock_controller,
+ labels=None,
+ workflow_app_id=None,
+ )
+
+ # Verify the result
+ assert isinstance(result, ToolProviderApiEntity)
+ assert result.id == provider_id
+ assert result.workflow_app_id is None
+ assert result.labels == []
+
+ def test_workflow_provider_to_user_provider_preserves_other_fields(self):
+ """Test that workflow_provider_to_user_provider preserves all other entity fields."""
+ from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
+
+ # Create mock workflow tool provider controller with various fields
+ workflow_app_id = "app_456"
+ provider_id = "provider_456"
+ mock_controller = Mock(spec=WorkflowToolProviderController)
+ mock_controller.provider_id = provider_id
+ mock_controller.entity = Mock()
+ mock_controller.entity.identity = Mock()
+ mock_controller.entity.identity.author = "another_author"
+ mock_controller.entity.identity.name = "another_workflow_tool"
+ mock_controller.entity.identity.description = I18nObject(
+ en_US="Another description", zh_Hans="Another description"
+ )
+ mock_controller.entity.identity.icon = {"type": "emoji", "content": "⚙️"}
+ mock_controller.entity.identity.icon_dark = {"type": "emoji", "content": "🔧"}
+ mock_controller.entity.identity.label = I18nObject(
+ en_US="Another Workflow Tool", zh_Hans="Another Workflow Tool"
+ )
+
+ # Call the method
+ result = ToolTransformService.workflow_provider_to_user_provider(
+ provider_controller=mock_controller,
+ labels=["automation", "workflow"],
+ workflow_app_id=workflow_app_id,
+ )
+
+ # Verify all fields are preserved correctly
+ assert isinstance(result, ToolProviderApiEntity)
+ assert result.id == provider_id
+ assert result.author == "another_author"
+ assert result.name == "another_workflow_tool"
+ assert result.description.en_US == "Another description"
+ assert result.description.zh_Hans == "Another description"
+ assert result.icon == {"type": "emoji", "content": "⚙️"}
+ assert result.icon_dark == {"type": "emoji", "content": "🔧"}
+ assert result.label.en_US == "Another Workflow Tool"
+ assert result.label.zh_Hans == "Another Workflow Tool"
+ assert result.type == ToolProviderType.WORKFLOW
+ assert result.workflow_app_id == workflow_app_id
+ assert result.labels == ["automation", "workflow"]
+ assert result.masked_credentials == {}
+ assert result.is_team_authorization is True
+ assert result.allow_delete is True
+ assert result.plugin_id is None
+ assert result.plugin_unique_identifier is None
+ assert result.tools == []
+
+
+class TestGetToolProviderIconUrl:
+ def test_builtin_provider_returns_console_url(self):
+ with patch(f"{MODULE}.dify_config") as cfg:
+ cfg.CONSOLE_API_URL = "https://app.dify.ai"
+ url = ToolTransformService.get_tool_provider_icon_url("builtin", "google", "icon.png")
+
+ assert "/builtin/google/icon" in url
+ assert url.startswith("https://app.dify.ai/console/api/workspaces/current/tool-provider")
+
+ def test_builtin_provider_with_no_console_url(self):
+ with patch(f"{MODULE}.dify_config") as cfg:
+ cfg.CONSOLE_API_URL = None
+ url = ToolTransformService.get_tool_provider_icon_url("builtin", "slack", "icon.png")
+
+ assert "/builtin/slack/icon" in url
+
+ def test_api_provider_parses_json_icon(self):
+ icon_json = '{"background": "#fff", "content": "A"}'
+ result = ToolTransformService.get_tool_provider_icon_url("api", "my-api", icon_json)
+ assert result == {"background": "#fff", "content": "A"}
+
+ def test_api_provider_returns_dict_icon_directly(self):
+ icon = {"background": "#000", "content": "B"}
+ result = ToolTransformService.get_tool_provider_icon_url("api", "my-api", icon)
+ assert result == icon
+
+ def test_api_provider_returns_fallback_on_invalid_json(self):
+ result = ToolTransformService.get_tool_provider_icon_url("api", "my-api", "not-json")
+ assert result == {"background": "#252525", "content": "\ud83d\ude01"}
+
+ def test_workflow_provider_behaves_like_api(self):
+ icon = {"background": "#123", "content": "W"}
+ assert ToolTransformService.get_tool_provider_icon_url("workflow", "wf", icon) == icon
+
+ def test_mcp_returns_icon_as_is(self):
+ assert ToolTransformService.get_tool_provider_icon_url("mcp", "srv", "icon-value") == "icon-value"
+
+ def test_unknown_type_returns_empty(self):
+ assert ToolTransformService.get_tool_provider_icon_url("unknown", "x", "i") == ""
+
+
+class TestRepackProvider:
+ def test_repacks_dict_provider_icon(self):
+ provider = {"type": "builtin", "name": "google", "icon": "old"}
+ with patch.object(ToolTransformService, "get_tool_provider_icon_url", return_value="/new-url") as mock_fn:
+ ToolTransformService.repack_provider("t1", provider)
+
+ assert provider["icon"] == "/new-url"
+ mock_fn.assert_called_once_with(provider_type="builtin", provider_name="google", icon="old")
+
+ def test_repacks_tool_provider_api_entity_without_plugin(self):
+ entity = MagicMock(spec=ToolProviderApiEntity)
+ entity.plugin_id = None
+ entity.type = ToolProviderType.BUILT_IN
+ entity.name = "slack"
+ entity.icon = "icon.svg"
+ entity.icon_dark = "dark.svg"
+
+ with patch.object(ToolTransformService, "get_tool_provider_icon_url", return_value="/url"):
+ ToolTransformService.repack_provider("t1", entity)
+
+ assert entity.icon == "/url"
+ assert entity.icon_dark == "/url"
+
+
+class TestConvertMcpSchemaToParameter:
+ def test_simple_object_schema(self):
+ schema = {
+ "type": "object",
+ "properties": {
+ "query": {"type": "string", "description": "Search query"},
+ "count": {"type": "integer", "description": "Result count"},
+ },
+ "required": ["query"],
+ }
+
+ params = ToolTransformService.convert_mcp_schema_to_parameter(schema)
+
+ assert len(params) == 2
+ query_param = next(p for p in params if p.name == "query")
+ count_param = next(p for p in params if p.name == "count")
+ assert query_param.required is True
+ assert count_param.required is False
+ assert count_param.type.value == "number"
+
+ def test_float_maps_to_number(self):
+ schema = {"type": "object", "properties": {"rate": {"type": "float"}}, "required": []}
+ assert ToolTransformService.convert_mcp_schema_to_parameter(schema)[0].type.value == "number"
+
+ def test_array_type_attaches_input_schema(self):
+ prop = {"type": "array", "description": "Items", "items": {"type": "string"}}
+ schema = {"type": "object", "properties": {"items": prop}, "required": []}
+ param = ToolTransformService.convert_mcp_schema_to_parameter(schema)[0]
+ assert param.input_schema is not None
+
+ def test_non_object_schema_returns_empty(self):
+ assert ToolTransformService.convert_mcp_schema_to_parameter({"type": "string"}) == []
+
+ def test_missing_properties_returns_empty(self):
+ assert ToolTransformService.convert_mcp_schema_to_parameter({"type": "object"}) == []
+
+ def test_list_type_uses_first_element(self):
+ schema = {"type": "object", "properties": {"f": {"type": ["string", "null"]}}, "required": []}
+ assert ToolTransformService.convert_mcp_schema_to_parameter(schema)[0].type.value == "string"
+
+ def test_missing_description_defaults_empty(self):
+ schema = {"type": "object", "properties": {"f": {"type": "string"}}, "required": []}
+ assert ToolTransformService.convert_mcp_schema_to_parameter(schema)[0].llm_description == ""
+
+
+class TestApiProviderToController:
+ def test_api_key_header_auth(self):
+ db_provider = MagicMock()
+ db_provider.credentials = {"auth_type": "api_key_header"}
+ with patch(f"{MODULE}.ApiToolProviderController") as ctrl_cls:
+ ctrl_cls.from_db.return_value = MagicMock()
+ ToolTransformService.api_provider_to_controller(db_provider)
+ ctrl_cls.from_db.assert_called_once_with(db_provider=db_provider, auth_type=ApiProviderAuthType.API_KEY_HEADER)
+
+ def test_api_key_query_auth(self):
+ db_provider = MagicMock()
+ db_provider.credentials = {"auth_type": "api_key_query"}
+ with patch(f"{MODULE}.ApiToolProviderController") as ctrl_cls:
+ ctrl_cls.from_db.return_value = MagicMock()
+ ToolTransformService.api_provider_to_controller(db_provider)
+ ctrl_cls.from_db.assert_called_once_with(db_provider=db_provider, auth_type=ApiProviderAuthType.API_KEY_QUERY)
+
+ def test_legacy_api_key_maps_to_header(self):
+ db_provider = MagicMock()
+ db_provider.credentials = {"auth_type": "api_key"}
+ with patch(f"{MODULE}.ApiToolProviderController") as ctrl_cls:
+ ctrl_cls.from_db.return_value = MagicMock()
+ ToolTransformService.api_provider_to_controller(db_provider)
+ ctrl_cls.from_db.assert_called_once_with(db_provider=db_provider, auth_type=ApiProviderAuthType.API_KEY_HEADER)
+
+ def test_unknown_auth_defaults_to_none(self):
+ db_provider = MagicMock()
+ db_provider.credentials = {"auth_type": "something_else"}
+ with patch(f"{MODULE}.ApiToolProviderController") as ctrl_cls:
+ ctrl_cls.from_db.return_value = MagicMock()
+ ToolTransformService.api_provider_to_controller(db_provider)
+ ctrl_cls.from_db.assert_called_once_with(db_provider=db_provider, auth_type=ApiProviderAuthType.NONE)
diff --git a/api/tests/unit_tests/tools/test_api_tool.py b/api/tests/unit_tests/tools/test_api_tool.py
index 4d5683dcbd..2a8c6686d7 100644
--- a/api/tests/unit_tests/tools/test_api_tool.py
+++ b/api/tests/unit_tests/tools/test_api_tool.py
@@ -1,6 +1,5 @@
import json
import operator
-from typing import TypeVar
from unittest.mock import Mock, patch
import httpx
@@ -16,10 +15,8 @@ from core.tools.entities.tool_entities import (
ToolInvokeMessage,
)
-_T = TypeVar("_T")
-
-def _get_message_by_type(msgs: list[ToolInvokeMessage], msg_type: type[_T]) -> ToolInvokeMessage | None:
+def _get_message_by_type[T](msgs: list[ToolInvokeMessage], msg_type: type[T]) -> ToolInvokeMessage | None:
return next((i for i in msgs if isinstance(i.message, msg_type)), None)
diff --git a/api/uv.lock b/api/uv.lock
index 3e8d794866..9381fabb40 100644
--- a/api/uv.lock
+++ b/api/uv.lock
@@ -1,31 +1,11 @@
version = 1
revision = 3
-requires-python = ">=3.11, <3.13"
+requires-python = "==3.12.*"
resolution-markers = [
- "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'",
- "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'win32'",
- "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'emscripten'",
- "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'",
- "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'",
- "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'win32'",
- "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'emscripten'",
- "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'",
- "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'",
- "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'win32'",
- "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'emscripten'",
- "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'",
- "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'",
- "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'win32'",
- "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'emscripten'",
- "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'",
- "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'",
- "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform == 'win32'",
- "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform == 'emscripten'",
- "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'",
- "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'",
- "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform == 'win32'",
- "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform == 'emscripten'",
- "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'",
+ "sys_platform == 'win32'",
+ "sys_platform == 'emscripten'",
+ "sys_platform == 'linux'",
+ "sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'",
]
[[package]]
@@ -60,7 +40,7 @@ wheels = [
[[package]]
name = "aiohttp"
-version = "3.13.3"
+version = "3.13.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohappyeyeballs" },
@@ -71,42 +51,42 @@ dependencies = [
{ name = "propcache" },
{ name = "yarl" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f1/4c/a164164834f03924d9a29dc3acd9e7ee58f95857e0b467f6d04298594ebb/aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b", size = 746051, upload-time = "2026-01-03T17:29:43.287Z" },
- { url = "https://files.pythonhosted.org/packages/82/71/d5c31390d18d4f58115037c432b7e0348c60f6f53b727cad33172144a112/aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64", size = 499234, upload-time = "2026-01-03T17:29:44.822Z" },
- { url = "https://files.pythonhosted.org/packages/0e/c9/741f8ac91e14b1d2e7100690425a5b2b919a87a5075406582991fb7de920/aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea", size = 494979, upload-time = "2026-01-03T17:29:46.405Z" },
- { url = "https://files.pythonhosted.org/packages/75/b5/31d4d2e802dfd59f74ed47eba48869c1c21552c586d5e81a9d0d5c2ad640/aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a", size = 1748297, upload-time = "2026-01-03T17:29:48.083Z" },
- { url = "https://files.pythonhosted.org/packages/1a/3e/eefad0ad42959f226bb79664826883f2687d602a9ae2941a18e0484a74d3/aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540", size = 1707172, upload-time = "2026-01-03T17:29:49.648Z" },
- { url = "https://files.pythonhosted.org/packages/c5/3a/54a64299fac2891c346cdcf2aa6803f994a2e4beeaf2e5a09dcc54acc842/aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b", size = 1805405, upload-time = "2026-01-03T17:29:51.244Z" },
- { url = "https://files.pythonhosted.org/packages/6c/70/ddc1b7169cf64075e864f64595a14b147a895a868394a48f6a8031979038/aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3", size = 1899449, upload-time = "2026-01-03T17:29:53.938Z" },
- { url = "https://files.pythonhosted.org/packages/a1/7e/6815aab7d3a56610891c76ef79095677b8b5be6646aaf00f69b221765021/aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1", size = 1748444, upload-time = "2026-01-03T17:29:55.484Z" },
- { url = "https://files.pythonhosted.org/packages/6b/f2/073b145c4100da5511f457dc0f7558e99b2987cf72600d42b559db856fbc/aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3", size = 1606038, upload-time = "2026-01-03T17:29:57.179Z" },
- { url = "https://files.pythonhosted.org/packages/0a/c1/778d011920cae03ae01424ec202c513dc69243cf2db303965615b81deeea/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440", size = 1724156, upload-time = "2026-01-03T17:29:58.914Z" },
- { url = "https://files.pythonhosted.org/packages/0e/cb/3419eabf4ec1e9ec6f242c32b689248365a1cf621891f6f0386632525494/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7", size = 1722340, upload-time = "2026-01-03T17:30:01.962Z" },
- { url = "https://files.pythonhosted.org/packages/7a/e5/76cf77bdbc435bf233c1f114edad39ed4177ccbfab7c329482b179cff4f4/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c", size = 1783041, upload-time = "2026-01-03T17:30:03.609Z" },
- { url = "https://files.pythonhosted.org/packages/9d/d4/dd1ca234c794fd29c057ce8c0566b8ef7fd6a51069de5f06fa84b9a1971c/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51", size = 1596024, upload-time = "2026-01-03T17:30:05.132Z" },
- { url = "https://files.pythonhosted.org/packages/55/58/4345b5f26661a6180afa686c473620c30a66afdf120ed3dd545bbc809e85/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4", size = 1804590, upload-time = "2026-01-03T17:30:07.135Z" },
- { url = "https://files.pythonhosted.org/packages/7b/06/05950619af6c2df7e0a431d889ba2813c9f0129cec76f663e547a5ad56f2/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29", size = 1740355, upload-time = "2026-01-03T17:30:09.083Z" },
- { url = "https://files.pythonhosted.org/packages/3e/80/958f16de79ba0422d7c1e284b2abd0c84bc03394fbe631d0a39ffa10e1eb/aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239", size = 433701, upload-time = "2026-01-03T17:30:10.869Z" },
- { url = "https://files.pythonhosted.org/packages/dc/f2/27cdf04c9851712d6c1b99df6821a6623c3c9e55956d4b1e318c337b5a48/aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f", size = 457678, upload-time = "2026-01-03T17:30:12.719Z" },
- { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" },
- { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" },
- { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" },
- { url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" },
- { url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" },
- { url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" },
- { url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" },
- { url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" },
- { url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" },
- { url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" },
- { url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" },
- { url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" },
- { url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" },
- { url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" },
- { url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" },
- { url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" },
- { url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/7e/cb94129302d78c46662b47f9897d642fd0b33bdfef4b73b20c6ced35aa4c/aiohttp-3.13.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8ea0c64d1bcbf201b285c2246c51a0c035ba3bbd306640007bc5844a3b4658c1", size = 760027, upload-time = "2026-03-28T17:15:33.022Z" },
+ { url = "https://files.pythonhosted.org/packages/5e/cd/2db3c9397c3bd24216b203dd739945b04f8b87bb036c640da7ddb63c75ef/aiohttp-3.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6f742e1fa45c0ed522b00ede565e18f97e4cf8d1883a712ac42d0339dfb0cce7", size = 508325, upload-time = "2026-03-28T17:15:34.714Z" },
+ { url = "https://files.pythonhosted.org/packages/36/a3/d28b2722ec13107f2e37a86b8a169897308bab6a3b9e071ecead9d67bd9b/aiohttp-3.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dcfb50ee25b3b7a1222a9123be1f9f89e56e67636b561441f0b304e25aaef8f", size = 502402, upload-time = "2026-03-28T17:15:36.409Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/d6/acd47b5f17c4430e555590990a4746efbcb2079909bb865516892bf85f37/aiohttp-3.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3262386c4ff370849863ea93b9ea60fd59c6cf56bf8f93beac625cf4d677c04d", size = 1771224, upload-time = "2026-03-28T17:15:38.223Z" },
+ { url = "https://files.pythonhosted.org/packages/98/af/af6e20113ba6a48fd1cd9e5832c4851e7613ef50c7619acdaee6ec5f1aff/aiohttp-3.13.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:473bb5aa4218dd254e9ae4834f20e31f5a0083064ac0136a01a62ddbae2eaa42", size = 1731530, upload-time = "2026-03-28T17:15:39.988Z" },
+ { url = "https://files.pythonhosted.org/packages/81/16/78a2f5d9c124ad05d5ce59a9af94214b6466c3491a25fb70760e98e9f762/aiohttp-3.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e56423766399b4c77b965f6aaab6c9546617b8994a956821cc507d00b91d978c", size = 1827925, upload-time = "2026-03-28T17:15:41.944Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/1f/79acf0974ced805e0e70027389fccbb7d728e6f30fcac725fb1071e63075/aiohttp-3.13.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8af249343fafd5ad90366a16d230fc265cf1149f26075dc9fe93cfd7c7173942", size = 1923579, upload-time = "2026-03-28T17:15:44.071Z" },
+ { url = "https://files.pythonhosted.org/packages/af/53/29f9e2054ea6900413f3b4c3eb9d8331f60678ec855f13ba8714c47fd48d/aiohttp-3.13.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bc0a5cf4f10ef5a2c94fdde488734b582a3a7a000b131263e27c9295bd682d9", size = 1767655, upload-time = "2026-03-28T17:15:45.911Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/57/462fe1d3da08109ba4aa8590e7aed57c059af2a7e80ec21f4bac5cfe1094/aiohttp-3.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5c7ff1028e3c9fc5123a865ce17df1cb6424d180c503b8517afbe89aa566e6be", size = 1630439, upload-time = "2026-03-28T17:15:48.11Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/4b/4813344aacdb8127263e3eec343d24e973421143826364fa9fc847f6283f/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ba5cf98b5dcb9bddd857da6713a503fa6d341043258ca823f0f5ab7ab4a94ee8", size = 1745557, upload-time = "2026-03-28T17:15:50.13Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/01/1ef1adae1454341ec50a789f03cfafe4c4ac9c003f6a64515ecd32fe4210/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d85965d3ba21ee4999e83e992fecb86c4614d6920e40705501c0a1f80a583c12", size = 1741796, upload-time = "2026-03-28T17:15:52.351Z" },
+ { url = "https://files.pythonhosted.org/packages/22/04/8cdd99af988d2aa6922714d957d21383c559835cbd43fbf5a47ddf2e0f05/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:49f0b18a9b05d79f6f37ddd567695943fcefb834ef480f17a4211987302b2dc7", size = 1805312, upload-time = "2026-03-28T17:15:54.407Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/7f/b48d5577338d4b25bbdbae35c75dbfd0493cb8886dc586fbfb2e90862239/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7f78cb080c86fbf765920e5f1ef35af3f24ec4314d6675d0a21eaf41f6f2679c", size = 1621751, upload-time = "2026-03-28T17:15:56.564Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/89/4eecad8c1858e6d0893c05929e22343e0ebe3aec29a8a399c65c3cc38311/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:67a3ec705534a614b68bbf1c70efa777a21c3da3895d1c44510a41f5a7ae0453", size = 1826073, upload-time = "2026-03-28T17:15:58.489Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/5c/9dc8293ed31b46c39c9c513ac7ca152b3c3d38e0ea111a530ad12001b827/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6630ec917e85c5356b2295744c8a97d40f007f96a1c76bf1928dc2e27465393", size = 1760083, upload-time = "2026-03-28T17:16:00.677Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/19/8bbf6a4994205d96831f97b7d21a0feed120136e6267b5b22d229c6dc4dc/aiohttp-3.13.4-cp311-cp311-win32.whl", hash = "sha256:54049021bc626f53a5394c29e8c444f726ee5a14b6e89e0ad118315b1f90f5e3", size = 439690, upload-time = "2026-03-28T17:16:02.902Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/f5/ac409ecd1007528d15c3e8c3a57d34f334c70d76cfb7128a28cffdebd4c1/aiohttp-3.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:c033f2bc964156030772d31cbf7e5defea181238ce1f87b9455b786de7d30145", size = 463824, upload-time = "2026-03-28T17:16:05.058Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/bd/ede278648914cabbabfdf95e436679b5d4156e417896a9b9f4587169e376/aiohttp-3.13.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ee62d4471ce86b108b19c3364db4b91180d13fe3510144872d6bad5401957360", size = 752158, upload-time = "2026-03-28T17:16:06.901Z" },
+ { url = "https://files.pythonhosted.org/packages/90/de/581c053253c07b480b03785196ca5335e3c606a37dc73e95f6527f1591fe/aiohttp-3.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c0fd8f41b54b58636402eb493afd512c23580456f022c1ba2db0f810c959ed0d", size = 501037, upload-time = "2026-03-28T17:16:08.82Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/f9/a5ede193c08f13cc42c0a5b50d1e246ecee9115e4cf6e900d8dbd8fd6acb/aiohttp-3.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4baa48ce49efd82d6b1a0be12d6a36b35e5594d1dd42f8bfba96ea9f8678b88c", size = 501556, upload-time = "2026-03-28T17:16:10.63Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/10/88ff67cd48a6ec36335b63a640abe86135791544863e0cfe1f065d6cef7a/aiohttp-3.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d738ebab9f71ee652d9dbd0211057690022201b11197f9a7324fd4dba128aa97", size = 1757314, upload-time = "2026-03-28T17:16:12.498Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/15/fdb90a5cf5a1f52845c276e76298c75fbbcc0ac2b4a86551906d54529965/aiohttp-3.13.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0ce692c3468fa831af7dceed52edf51ac348cebfc8d3feb935927b63bd3e8576", size = 1731819, upload-time = "2026-03-28T17:16:14.558Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/df/28146785a007f7820416be05d4f28cc207493efd1e8c6c1068e9bdc29198/aiohttp-3.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8e08abcfe752a454d2cb89ff0c08f2d1ecd057ae3e8cc6d84638de853530ebab", size = 1793279, upload-time = "2026-03-28T17:16:16.594Z" },
+ { url = "https://files.pythonhosted.org/packages/10/47/689c743abf62ea7a77774d5722f220e2c912a77d65d368b884d9779ef41b/aiohttp-3.13.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5977f701b3fff36367a11087f30ea73c212e686d41cd363c50c022d48b011d8d", size = 1891082, upload-time = "2026-03-28T17:16:18.71Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/b6/f7f4f318c7e58c23b761c9b13b9a3c9b394e0f9d5d76fbc6622fa98509f6/aiohttp-3.13.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:54203e10405c06f8b6020bd1e076ae0fe6c194adcee12a5a78af3ffa3c57025e", size = 1773938, upload-time = "2026-03-28T17:16:21.125Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/06/f207cb3121852c989586a6fc16ff854c4fcc8651b86c5d3bd1fc83057650/aiohttp-3.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:358a6af0145bc4dda037f13167bef3cce54b132087acc4c295c739d05d16b1c3", size = 1579548, upload-time = "2026-03-28T17:16:23.588Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/58/e1289661a32161e24c1fe479711d783067210d266842523752869cc1d9c2/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:898ea1850656d7d61832ef06aa9846ab3ddb1621b74f46de78fbc5e1a586ba83", size = 1714669, upload-time = "2026-03-28T17:16:25.713Z" },
+ { url = "https://files.pythonhosted.org/packages/96/0a/3e86d039438a74a86e6a948a9119b22540bae037d6ba317a042ae3c22711/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7bc30cceb710cf6a44e9617e43eebb6e3e43ad855a34da7b4b6a73537d8a6763", size = 1754175, upload-time = "2026-03-28T17:16:28.18Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/30/e717fc5df83133ba467a560b6d8ef20197037b4bb5d7075b90037de1018e/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4a31c0c587a8a038f19a4c7e60654a6c899c9de9174593a13e7cc6e15ff271f9", size = 1762049, upload-time = "2026-03-28T17:16:30.941Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/28/8f7a2d4492e336e40005151bdd94baf344880a4707573378579f833a64c1/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2062f675f3fe6e06d6113eb74a157fb9df58953ffed0cdb4182554b116545758", size = 1570861, upload-time = "2026-03-28T17:16:32.953Z" },
+ { url = "https://files.pythonhosted.org/packages/78/45/12e1a3d0645968b1c38de4b23fdf270b8637735ea057d4f84482ff918ad9/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d1ba8afb847ff80626d5e408c1fdc99f942acc877d0702fe137015903a220a9", size = 1790003, upload-time = "2026-03-28T17:16:35.468Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/0f/60374e18d590de16dcb39d6ff62f39c096c1b958e6f37727b5870026ea30/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b08149419994cdd4d5eecf7fd4bc5986b5a9380285bcd01ab4c0d6bfca47b79d", size = 1737289, upload-time = "2026-03-28T17:16:38.187Z" },
+ { url = "https://files.pythonhosted.org/packages/02/bf/535e58d886cfbc40a8b0013c974afad24ef7632d645bca0b678b70033a60/aiohttp-3.13.4-cp312-cp312-win32.whl", hash = "sha256:fc432f6a2c4f720180959bc19aa37259651c1a4ed8af8afc84dd41c60f15f791", size = 434185, upload-time = "2026-03-28T17:16:40.735Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/1a/d92e3325134ebfff6f4069f270d3aac770d63320bd1fcd0eca023e74d9a8/aiohttp-3.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:6148c9ae97a3e8bff9a1fc9c757fa164116f86c100468339730e717590a3fb77", size = 461285, upload-time = "2026-03-28T17:16:42.713Z" },
]
[[package]]
@@ -361,15 +341,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/91/be/317c2c55b8bbec407257d45f5c8d1b6867abc76d12043f2d3d58c538a4ea/asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d", size = 24096, upload-time = "2025-11-19T15:32:19.004Z" },
]
-[[package]]
-name = "async-timeout"
-version = "5.0.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" },
-]
-
[[package]]
name = "attrs"
version = "25.4.0"
@@ -450,23 +421,6 @@ version = "1.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f4/b1/36a5182ce1d8ef9ef32bff69037bd28b389bbdb66338f8069e61da7028cb/backports_zstd-1.3.0.tar.gz", hash = "sha256:e8b2d68e2812f5c9970cabc5e21da8b409b5ed04e79b4585dbffa33e9b45ebe2", size = 997138, upload-time = "2025-12-29T17:28:06.143Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ac/28/ed31a0e35feb4538a996348362051b52912d50f00d25c2d388eccef9242c/backports_zstd-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:249f90b39d3741c48620021a968b35f268ca70e35f555abeea9ff95a451f35f9", size = 435660, upload-time = "2025-12-29T17:25:55.207Z" },
- { url = "https://files.pythonhosted.org/packages/00/0d/3db362169d80442adda9dd563c4f0bb10091c8c1c9a158037f4ecd53988e/backports_zstd-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b0e71e83e46154a9d3ced6d4de9a2fea8207ee1e4832aeecf364dc125eda305c", size = 362056, upload-time = "2025-12-29T17:25:56.729Z" },
- { url = "https://files.pythonhosted.org/packages/bd/00/b67ba053a7d6f6dbe2f8a704b7d3a5e01b1d2e2e8edbc9b634f2702ef73c/backports_zstd-1.3.0-cp311-cp311-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:cbc6193acd21f96760c94dd71bf32b161223e8503f5277acb0a5ab54e5598957", size = 505957, upload-time = "2025-12-29T17:25:57.941Z" },
- { url = "https://files.pythonhosted.org/packages/6f/3e/2667c0ddb53ddf28667e330bf9fe92e8e17705a481c9b698e283120565f7/backports_zstd-1.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1df583adc0ae84a8d13d7139f42eade6d90182b1dd3e0d28f7df3c564b9fd55d", size = 475569, upload-time = "2025-12-29T17:25:59.075Z" },
- { url = "https://files.pythonhosted.org/packages/eb/86/4052473217bd954ccdffda5f7264a0e99e7c4ecf70c0f729845c6a45fc5a/backports_zstd-1.3.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d833fc23aa3cc2e05aeffc7cfadd87b796654ad3a7fb214555cda3f1db2d4dc2", size = 581196, upload-time = "2025-12-29T17:26:00.508Z" },
- { url = "https://files.pythonhosted.org/packages/e5/bd/064f6fdb61db3d2c473159ebc844243e650dc032de0f8208443a00127925/backports_zstd-1.3.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:142178fe981061f1d2a57c5348f2cd31a3b6397a35593e7a17dbda817b793a7f", size = 640888, upload-time = "2025-12-29T17:26:02.134Z" },
- { url = "https://files.pythonhosted.org/packages/d8/09/0822403f40932a165a4f1df289d41653683019e4fd7a86b63ed20e9b6177/backports_zstd-1.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5eed0a09a163f3a8125a857cb031be87ed052e4a47bc75085ed7fca786e9bb5b", size = 491100, upload-time = "2025-12-29T17:26:03.418Z" },
- { url = "https://files.pythonhosted.org/packages/a6/a3/f5ac28d74039b7e182a780809dc66b9dbfc893186f5d5444340bba135389/backports_zstd-1.3.0-cp311-cp311-manylinux_2_34_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:60aa483fef5843749e993dde01229e5eedebca8c283023d27d6bf6800d1d4ce3", size = 565071, upload-time = "2025-12-29T17:26:05.022Z" },
- { url = "https://files.pythonhosted.org/packages/e1/ac/50209aeb92257a642ee987afa1e61d5b6731ab6bf0bff70905856e5aede6/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ea0886c1b619773544546e243ed73f6d6c2b1ae3c00c904ccc9903a352d731e1", size = 481519, upload-time = "2025-12-29T17:26:06.255Z" },
- { url = "https://files.pythonhosted.org/packages/08/1f/b06f64199fb4b2e9437cedbf96d0155ca08aeec35fe81d41065acd44762e/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5e137657c830a5ce99be40a1d713eb1d246bae488ada28ff0666ac4387aebdd5", size = 509465, upload-time = "2025-12-29T17:26:07.602Z" },
- { url = "https://files.pythonhosted.org/packages/f4/37/2c365196e61c8fffbbc930ffd69f1ada7aa1c7210857b3e565031c787ac6/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94048c8089755e482e4b34608029cf1142523a625873c272be2b1c9253871a72", size = 585552, upload-time = "2025-12-29T17:26:08.911Z" },
- { url = "https://files.pythonhosted.org/packages/93/8d/c2c4f448bb6b6c9df17410eaedce415e8db0eb25b60d09a3d22a98294d09/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:d339c1ec40485e97e600eb9a285fb13169dbf44c5094b945788a62f38b96e533", size = 562893, upload-time = "2025-12-29T17:26:10.566Z" },
- { url = "https://files.pythonhosted.org/packages/74/e8/2110d4d39115130f7514cbbcec673a885f4052bb68d15e41bc96a7558856/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aeee9210c54cf8bf83f4d263a6d0d6e7a0298aeb5a14a0a95e90487c5c3157c", size = 631462, upload-time = "2025-12-29T17:26:11.99Z" },
- { url = "https://files.pythonhosted.org/packages/b9/a8/d64b59ae0714fdace14e43873f794eff93613e35e3e85eead33a4f44cd80/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba7114a3099e5ea05cbb46568bd0e08bca2ca11e12c6a7b563a24b86b2b4a67f", size = 495125, upload-time = "2025-12-29T17:26:13.218Z" },
- { url = "https://files.pythonhosted.org/packages/ef/d8/bcff0a091fcf27172c57ae463e49d8dec6dc31e01d7e7bf1ae3aad9c3566/backports_zstd-1.3.0-cp311-cp311-win32.whl", hash = "sha256:08dfdfb85da5915383bfae680b6ac10ab5769ab22e690f9a854320720011ae8e", size = 288664, upload-time = "2025-12-29T17:26:14.791Z" },
- { url = "https://files.pythonhosted.org/packages/28/1a/379061e2abf8c3150ad51c1baab9ac723e01cf7538860a6a74c48f8b73ee/backports_zstd-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8aac2e7cdcc8f310c16f98a0062b48d0a081dbb82862794f4f4f5bdafde30a4", size = 313633, upload-time = "2025-12-29T17:26:16.31Z" },
- { url = "https://files.pythonhosted.org/packages/35/e7/eca40858883029fc716660106069b23253e2ec5fd34e86b4101c8cfe864b/backports_zstd-1.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:440ef1be06e82dc0d69dbb57177f2ce98bbd2151013ee7e551e2f2b54caa6120", size = 288814, upload-time = "2025-12-29T17:26:17.571Z" },
{ url = "https://files.pythonhosted.org/packages/72/d4/356da49d3053f4bc50e71a8535631b57bc9ca4e8c6d2442e073e0ab41c44/backports_zstd-1.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f4a292e357f3046d18766ce06d990ccbab97411708d3acb934e63529c2ea7786", size = 435972, upload-time = "2025-12-29T17:26:18.752Z" },
{ url = "https://files.pythonhosted.org/packages/30/8f/dbe389e60c7e47af488520f31a4aa14028d66da5bf3c60d3044b571eb906/backports_zstd-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb4c386f38323698991b38edcc9c091d46d4713f5df02a3b5c80a28b40e289ea", size = 362124, upload-time = "2025-12-29T17:26:19.995Z" },
{ url = "https://files.pythonhosted.org/packages/55/4b/173beafc99e99e7276ce008ef060b704471e75124c826bc5e2092815da37/backports_zstd-1.3.0-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f52523d2bdada29e653261abdc9cfcecd9e5500d305708b7e37caddb24909d4e", size = 506378, upload-time = "2025-12-29T17:26:21.855Z" },
@@ -484,12 +438,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/a9/67a24007c333ed22736d5cd79f1aa1d7209f09be772ff82a8fd724c1978e/backports_zstd-1.3.0-cp312-cp312-win32.whl", hash = "sha256:21a9a542ccc7958ddb51ae6e46d8ed25d585b54d0d52aaa1c8da431ea158046a", size = 288809, upload-time = "2025-12-29T17:26:38.373Z" },
{ url = "https://files.pythonhosted.org/packages/42/24/34b816118ea913debb2ea23e71ffd0fb2e2ac738064c4ac32e3fb62c18bb/backports_zstd-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:89ea8281821123b071a06b30b80da8e4d8a2b40a4f57315a19850337a21297ac", size = 313815, upload-time = "2025-12-29T17:26:39.665Z" },
{ url = "https://files.pythonhosted.org/packages/4e/2f/babd02c9fc4ca35376ada7c291193a208165c7be2455f0f98bc1e1243f31/backports_zstd-1.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:f6843ecb181480e423b02f60fe29e393cbc31a95fb532acdf0d3a2c87bd50ce3", size = 288927, upload-time = "2025-12-29T17:26:40.923Z" },
- { url = "https://files.pythonhosted.org/packages/9a/d9/8c9c246e5ea79a4f45d551088b11b61f2dc7efcdc5dbe6df3be84a506e0c/backports_zstd-1.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:968167d29f012cee7b112ad031a8925e484e97e99288e55e4d62962c3a1013e3", size = 409666, upload-time = "2025-12-29T17:27:57.37Z" },
- { url = "https://files.pythonhosted.org/packages/a4/4f/a55b33c314ca8c9074e99daab54d04c5d212070ae7dbc435329baf1b139e/backports_zstd-1.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8f6fc7d62b71083b574193dd8fb3a60e6bb34880cc0132aad242943af301f7a", size = 339199, upload-time = "2025-12-29T17:27:58.542Z" },
- { url = "https://files.pythonhosted.org/packages/9d/13/ce31bd048b1c88d0f65d7af60b6cf89cfbed826c7c978f0ebca9a8a71cfc/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:e0f2eca6aac280fdb77991ad3362487ee91a7fb064ad40043fb5a0bf5a376943", size = 420332, upload-time = "2025-12-29T17:28:00.332Z" },
- { url = "https://files.pythonhosted.org/packages/cf/80/c0cdbc533d0037b57248588403a3afb050b2a83b8c38aa608e31b3a4d600/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:676eb5e177d4ef528cf3baaeea4fffe05f664e4dd985d3ac06960ef4619c81a9", size = 393879, upload-time = "2025-12-29T17:28:01.57Z" },
- { url = "https://files.pythonhosted.org/packages/0f/38/c97428867cac058ed196ccaeddfdf82ecd43b8a65965f2950a6e7547e77a/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:199eb9bd8aca6a9d489c41a682fad22c587dffe57b613d0fe6d492d0d38ce7c5", size = 413842, upload-time = "2025-12-29T17:28:03.113Z" },
- { url = "https://files.pythonhosted.org/packages/8d/ec/6247be6536668fe1c7dfae3eaa9c94b00b956b716957c0fc986ba78c3cc4/backports_zstd-1.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2524bd6777a828d5e7ccd7bd1a57f9e7007ae654fc2bd1bc1a207f6428674e4a", size = 299684, upload-time = "2025-12-29T17:28:04.856Z" },
]
[[package]]
@@ -555,10 +503,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" },
{ url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" },
{ url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" },
- { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" },
- { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" },
- { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" },
- { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" },
]
[[package]]
@@ -613,13 +557,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/d0/d0/d8cc8c9a4488a787e7fa430f6055e5bd1ddb22c340a751d9e901b82e2efe/blis-1.3.3.tar.gz", hash = "sha256:034d4560ff3cc43e8aa37e188451b0440e3261d989bb8a42ceee865607715ecd", size = 2644873, upload-time = "2025-11-17T12:28:30.511Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/a1/0a/a4c8736bc497d386b0ffc76d321f478c03f1a4725e52092f93b38beb3786/blis-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e10c8d3e892b1dbdff365b9d00e08291876fc336915bf1a5e9f188ed087e1a91", size = 6925522, upload-time = "2025-11-17T12:27:29.199Z" },
- { url = "https://files.pythonhosted.org/packages/83/5a/3437009282f23684ecd3963a8b034f9307cdd2bf4484972e5a6b096bf9ac/blis-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66e6249564f1db22e8af1e0513ff64134041fa7e03c8dd73df74db3f4d8415a7", size = 1232787, upload-time = "2025-11-17T12:27:30.996Z" },
- { url = "https://files.pythonhosted.org/packages/d1/0e/82221910d16259ce3017c1442c468a3f206a4143a96fbba9f5b5b81d62e8/blis-1.3.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7260da065958b4e5475f62f44895ef9d673b0f47dcf61b672b22b7dae1a18505", size = 2844596, upload-time = "2025-11-17T12:27:32.601Z" },
- { url = "https://files.pythonhosted.org/packages/6c/93/ab547f1a5c23e20bca16fbcf04021c32aac3f969be737ea4980509a7ca90/blis-1.3.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e9327a6ca67de8ae76fe071e8584cc7f3b2e8bfadece4961d40f2826e1cda2df", size = 11377746, upload-time = "2025-11-17T12:27:35.342Z" },
- { url = "https://files.pythonhosted.org/packages/6e/a6/7733820aa62da32526287a63cd85c103b2b323b186c8ee43b7772ff7017c/blis-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c4ae70629cf302035d268858a10ca4eb6242a01b2dc8d64422f8e6dcb8a8ee74", size = 3041954, upload-time = "2025-11-17T12:27:37.479Z" },
- { url = "https://files.pythonhosted.org/packages/87/53/e39d67fd3296b649772780ca6aab081412838ecb54e0b0c6432d01626a50/blis-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45866a9027d43b93e8b59980a23c5d7358b6536fc04606286e39fdcfce1101c2", size = 14251222, upload-time = "2025-11-17T12:27:39.705Z" },
- { url = "https://files.pythonhosted.org/packages/ea/44/b749f8777b020b420bceaaf60f66432fc30cc904ca5b69640ec9cbef11ed/blis-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:27f82b8633030f8d095d2b412dffa7eb6dbc8ee43813139909a20012e54422ea", size = 6171233, upload-time = "2025-11-17T12:27:41.921Z" },
{ url = "https://files.pythonhosted.org/packages/16/d1/429cf0cf693d4c7dc2efed969bd474e315aab636e4a95f66c4ed7264912d/blis-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a1c74e100665f8e918ebdbae2794576adf1f691680b5cdb8b29578432f623ef", size = 6929663, upload-time = "2025-11-17T12:27:44.482Z" },
{ url = "https://files.pythonhosted.org/packages/11/69/363c8df8d98b3cc97be19aad6aabb2c9c53f372490d79316bdee92d476e7/blis-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3f6c595185176ce021316263e1a1d636a3425b6c48366c1fd712d08d0b71849a", size = 1230939, upload-time = "2025-11-17T12:27:46.19Z" },
{ url = "https://files.pythonhosted.org/packages/96/2a/fbf65d906d823d839076c5150a6f8eb5ecbc5f9135e0b6510609bda1e6b7/blis-1.3.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d734b19fba0be7944f272dfa7b443b37c61f9476d9ab054a9ac53555ceadd2e0", size = 2818835, upload-time = "2025-11-17T12:27:48.167Z" },
@@ -650,7 +587,6 @@ source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore-stubs" },
{ name = "types-s3transfer" },
- { name = "typing-extensions", marker = "python_full_version < '3.12'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/03/16/4bdb3c1f69bf7b97dd8b22fe5b007e9da67ba3f00ed10e47146f5fd9d0ff/boto3_stubs-1.42.78.tar.gz", hash = "sha256:423335b8ce9a935e404054978589cdb98d9fa1d4bd46073d6821bf1c3fad8ca7", size = 101602, upload-time = "2026-03-27T19:35:51.149Z" }
wheels = [
@@ -697,13 +633,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/14/d8/6d641573e210768816023a64966d66463f2ce9fc9945fa03290c8a18f87c/bottleneck-1.6.0.tar.gz", hash = "sha256:028d46ee4b025ad9ab4d79924113816f825f62b17b87c9e1d0d8ce144a4a0e31", size = 104311, upload-time = "2025-09-08T16:30:38.617Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/83/96/9d51012d729f97de1e75aad986f3ba50956742a40fc99cbab4c2aa896c1c/bottleneck-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:69ef4514782afe39db2497aaea93b1c167ab7ab3bc5e3930500ef9cf11841db7", size = 100400, upload-time = "2025-09-08T16:29:44.464Z" },
- { url = "https://files.pythonhosted.org/packages/16/f4/4fcbebcbc42376a77e395a6838575950587e5eb82edf47d103f8daa7ba22/bottleneck-1.6.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:727363f99edc6dc83d52ed28224d4cb858c07a01c336c7499c0c2e5dd4fd3e4a", size = 375920, upload-time = "2025-09-08T16:29:45.52Z" },
- { url = "https://files.pythonhosted.org/packages/36/13/7fa8cdc41cbf2dfe0540f98e1e0caf9ffbd681b1a0fc679a91c2698adaf9/bottleneck-1.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:847671a9e392220d1dfd2ff2524b4d61ec47b2a36ea78e169d2aa357fd9d933a", size = 367922, upload-time = "2025-09-08T16:29:46.743Z" },
- { url = "https://files.pythonhosted.org/packages/13/7d/dccfa4a2792c1bdc0efdde8267e527727e517df1ff0d4976b84e0268c2f9/bottleneck-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:daef2603ab7b4ec4f032bb54facf5fa92dacd3a264c2fd9677c9fc22bcb5a245", size = 361379, upload-time = "2025-09-08T16:29:48.042Z" },
- { url = "https://files.pythonhosted.org/packages/93/42/21c0fad823b71c3a8904cbb847ad45136d25573a2d001a9cff48d3985fab/bottleneck-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fc7f09bda980d967f2e9f1a746eda57479f824f66de0b92b9835c431a8c922d4", size = 371911, upload-time = "2025-09-08T16:29:49.366Z" },
- { url = "https://files.pythonhosted.org/packages/3b/b0/830ff80f8c74577d53034c494639eac7a0ffc70935c01ceadfbe77f590c2/bottleneck-1.6.0-cp311-cp311-win32.whl", hash = "sha256:1f78bad13ad190180f73cceb92d22f4101bde3d768f4647030089f704ae7cac7", size = 107831, upload-time = "2025-09-08T16:29:51.397Z" },
- { url = "https://files.pythonhosted.org/packages/6f/42/01d4920b0aa51fba503f112c90714547609bbe17b6ecfc1c7ae1da3183df/bottleneck-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:8f2adef59fdb9edf2983fe3a4c07e5d1b677c43e5669f4711da2c3daad8321ad", size = 113358, upload-time = "2025-09-08T16:29:52.602Z" },
{ url = "https://files.pythonhosted.org/packages/8d/72/7e3593a2a3dd69ec831a9981a7b1443647acb66a5aec34c1620a5f7f8498/bottleneck-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bb16a16a86a655fdbb34df672109a8a227bb5f9c9cf5bb8ae400a639bc52fa3", size = 100515, upload-time = "2025-09-08T16:29:55.141Z" },
{ url = "https://files.pythonhosted.org/packages/b5/d4/e7bbea08f4c0f0bab819d38c1a613da5f194fba7b19aae3e2b3a27e78886/bottleneck-1.6.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0fbf5d0787af9aee6cef4db9cdd14975ce24bd02e0cc30155a51411ebe2ff35f", size = 377451, upload-time = "2025-09-08T16:29:56.718Z" },
{ url = "https://files.pythonhosted.org/packages/fe/80/a6da430e3b1a12fd85f9fe90d3ad8fe9a527ecb046644c37b4b3f4baacfc/bottleneck-1.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d08966f4a22384862258940346a72087a6f7cebb19038fbf3a3f6690ee7fd39f", size = 368303, upload-time = "2025-09-08T16:29:57.834Z" },
@@ -719,16 +648,6 @@ version = "1.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f7/16/c92ca344d646e71a43b8bb353f0a6490d7f6e06210f8554c8f874e454285/brotli-1.2.0.tar.gz", hash = "sha256:e310f77e41941c13340a95976fe66a8a95b01e783d430eeaf7a2f87e0a57dd0a", size = 7388632, upload-time = "2025-11-05T18:39:42.86Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/7a/ef/f285668811a9e1ddb47a18cb0b437d5fc2760d537a2fe8a57875ad6f8448/brotli-1.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:15b33fe93cedc4caaff8a0bd1eb7e3dab1c61bb22a0bf5bdfdfd97cd7da79744", size = 863110, upload-time = "2025-11-05T18:38:12.978Z" },
- { url = "https://files.pythonhosted.org/packages/50/62/a3b77593587010c789a9d6eaa527c79e0848b7b860402cc64bc0bc28a86c/brotli-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:898be2be399c221d2671d29eed26b6b2713a02c2119168ed914e7d00ceadb56f", size = 445438, upload-time = "2025-11-05T18:38:14.208Z" },
- { url = "https://files.pythonhosted.org/packages/cd/e1/7fadd47f40ce5549dc44493877db40292277db373da5053aff181656e16e/brotli-1.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350c8348f0e76fff0a0fd6c26755d2653863279d086d3aa2c290a6a7251135dd", size = 1534420, upload-time = "2025-11-05T18:38:15.111Z" },
- { url = "https://files.pythonhosted.org/packages/12/8b/1ed2f64054a5a008a4ccd2f271dbba7a5fb1a3067a99f5ceadedd4c1d5a7/brotli-1.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1ad3fda65ae0d93fec742a128d72e145c9c7a99ee2fcd667785d99eb25a7fe", size = 1632619, upload-time = "2025-11-05T18:38:16.094Z" },
- { url = "https://files.pythonhosted.org/packages/89/5a/7071a621eb2d052d64efd5da2ef55ecdac7c3b0c6e4f9d519e9c66d987ef/brotli-1.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40d918bce2b427a0c4ba189df7a006ac0c7277c180aee4617d99e9ccaaf59e6a", size = 1426014, upload-time = "2025-11-05T18:38:17.177Z" },
- { url = "https://files.pythonhosted.org/packages/26/6d/0971a8ea435af5156acaaccec1a505f981c9c80227633851f2810abd252a/brotli-1.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2a7f1d03727130fc875448b65b127a9ec5d06d19d0148e7554384229706f9d1b", size = 1489661, upload-time = "2025-11-05T18:38:18.41Z" },
- { url = "https://files.pythonhosted.org/packages/f3/75/c1baca8b4ec6c96a03ef8230fab2a785e35297632f402ebb1e78a1e39116/brotli-1.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9c79f57faa25d97900bfb119480806d783fba83cd09ee0b33c17623935b05fa3", size = 1599150, upload-time = "2025-11-05T18:38:19.792Z" },
- { url = "https://files.pythonhosted.org/packages/0d/1a/23fcfee1c324fd48a63d7ebf4bac3a4115bdb1b00e600f80f727d850b1ae/brotli-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:844a8ceb8483fefafc412f85c14f2aae2fb69567bf2a0de53cdb88b73e7c43ae", size = 1493505, upload-time = "2025-11-05T18:38:20.913Z" },
- { url = "https://files.pythonhosted.org/packages/36/e5/12904bbd36afeef53d45a84881a4810ae8810ad7e328a971ebbfd760a0b3/brotli-1.2.0-cp311-cp311-win32.whl", hash = "sha256:aa47441fa3026543513139cb8926a92a8e305ee9c71a6209ef7a97d91640ea03", size = 334451, upload-time = "2025-11-05T18:38:21.94Z" },
- { url = "https://files.pythonhosted.org/packages/02/8b/ecb5761b989629a4758c394b9301607a5880de61ee2ee5fe104b87149ebc/brotli-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:022426c9e99fd65d9475dce5c195526f04bb8be8907607e27e747893f6ee3e24", size = 369035, upload-time = "2025-11-05T18:38:22.941Z" },
{ url = "https://files.pythonhosted.org/packages/11/ee/b0a11ab2315c69bb9b45a2aaed022499c9c24a205c3a49c3513b541a7967/brotli-1.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:35d382625778834a7f3061b15423919aa03e4f5da34ac8e02c074e4b75ab4f84", size = 861543, upload-time = "2025-11-05T18:38:24.183Z" },
{ url = "https://files.pythonhosted.org/packages/e1/2f/29c1459513cd35828e25531ebfcbf3e92a5e49f560b1777a9af7203eb46e/brotli-1.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a61c06b334bd99bc5ae84f1eeb36bfe01400264b3c352f968c6e30a10f9d08b", size = 444288, upload-time = "2025-11-05T18:38:25.139Z" },
{ url = "https://files.pythonhosted.org/packages/3d/6f/feba03130d5fceadfa3a1bb102cb14650798c848b1df2a808356f939bb16/brotli-1.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:acec55bb7c90f1dfc476126f9711a8e81c9af7fb617409a9ee2953115343f08d", size = 1528071, upload-time = "2025-11-05T18:38:26.081Z" },
@@ -746,7 +665,7 @@ name = "brotlicffi"
version = "1.2.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "cffi", marker = "platform_python_implementation == 'PyPy'" },
+ { name = "cffi" },
]
sdist = { url = "https://files.pythonhosted.org/packages/84/85/57c314a6b35336efbbdc13e5fc9ae13f6b60a0647cfa7c1221178ac6d8ae/brotlicffi-1.2.0.0.tar.gz", hash = "sha256:34345d8d1f9d534fcac2249e57a4c3c8801a33c9942ff9f8574f67a175e17adb", size = 476682, upload-time = "2025-11-21T18:17:57.334Z" }
wheels = [
@@ -755,10 +674,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e4/9c/d51486bf366fc7d6735f0e46b5b96ca58dc005b250263525a1eea3cd5d21/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:33cfb408d0cff64cd50bef268c0fed397c46fbb53944aa37264148614a62e990", size = 1536547, upload-time = "2025-11-21T18:17:45.729Z" },
{ url = "https://files.pythonhosted.org/packages/1b/37/293a9a0a7caf17e6e657668bebb92dfe730305999fe8c0e2703b8888789c/brotlicffi-1.2.0.0-cp38-abi3-win32.whl", hash = "sha256:23e5c912fdc6fd37143203820230374d24babd078fc054e18070a647118158f6", size = 343085, upload-time = "2025-11-21T18:17:48.887Z" },
{ url = "https://files.pythonhosted.org/packages/07/6b/6e92009df3b8b7272f85a0992b306b61c34b7ea1c4776643746e61c380ac/brotlicffi-1.2.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:f139a7cdfe4ae7859513067b736eb44d19fae1186f9e99370092f6915216451b", size = 378586, upload-time = "2025-11-21T18:17:50.531Z" },
- { url = "https://files.pythonhosted.org/packages/a4/ec/52488a0563f1663e2ccc75834b470650f4b8bcdea3132aef3bf67219c661/brotlicffi-1.2.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fa102a60e50ddbd08de86a63431a722ea216d9bc903b000bf544149cc9b823dc", size = 402002, upload-time = "2025-11-21T18:17:51.76Z" },
- { url = "https://files.pythonhosted.org/packages/e4/63/d4aea4835fd97da1401d798d9b8ba77227974de565faea402f520b37b10f/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d3c4332fc808a94e8c1035950a10d04b681b03ab585ce897ae2a360d479037c", size = 406447, upload-time = "2025-11-21T18:17:53.614Z" },
- { url = "https://files.pythonhosted.org/packages/62/4e/5554ecb2615ff035ef8678d4e419549a0f7a28b3f096b272174d656749fb/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb4eb5830026b79a93bf503ad32b2c5257315e9ffc49e76b2715cffd07c8e3db", size = 402521, upload-time = "2025-11-21T18:17:54.875Z" },
- { url = "https://files.pythonhosted.org/packages/b5/d3/b07f8f125ac52bbee5dc00ef0d526f820f67321bf4184f915f17f50a4657/brotlicffi-1.2.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3832c66e00d6d82087f20a972b2fc03e21cd99ef22705225a6f8f418a9158ecc", size = 374730, upload-time = "2025-11-21T18:17:56.334Z" },
]
[[package]]
@@ -855,19 +770,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" },
- { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" },
- { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" },
- { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" },
- { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" },
- { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" },
- { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" },
- { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" },
- { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" },
- { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" },
- { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" },
- { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" },
- { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" },
{ url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },
{ url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },
{ url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
@@ -897,22 +799,6 @@ version = "3.4.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" },
- { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" },
- { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" },
- { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" },
- { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" },
- { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" },
- { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" },
- { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" },
- { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" },
- { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" },
- { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" },
- { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" },
- { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" },
- { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" },
- { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" },
- { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" },
{ url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
{ url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
{ url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
@@ -941,11 +827,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/73/09/10d57569e399ce9cbc5eee2134996581c957f63a9addfa6ca657daf006b8/chroma_hnswlib-0.7.6.tar.gz", hash = "sha256:4dce282543039681160259d29fcde6151cc9106c6461e0485f57cdccd83059b7", size = 32256, upload-time = "2024-07-22T20:19:29.259Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f5/af/d15fdfed2a204c0f9467ad35084fbac894c755820b203e62f5dcba2d41f1/chroma_hnswlib-0.7.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81181d54a2b1e4727369486a631f977ffc53c5533d26e3d366dda243fb0998ca", size = 196911, upload-time = "2024-07-22T20:18:33.46Z" },
- { url = "https://files.pythonhosted.org/packages/0d/19/aa6f2139f1ff7ad23a690ebf2a511b2594ab359915d7979f76f3213e46c4/chroma_hnswlib-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b4ab4e11f1083dd0a11ee4f0e0b183ca9f0f2ed63ededba1935b13ce2b3606f", size = 185000, upload-time = "2024-07-22T20:18:36.16Z" },
- { url = "https://files.pythonhosted.org/packages/79/b1/1b269c750e985ec7d40b9bbe7d66d0a890e420525187786718e7f6b07913/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53db45cd9173d95b4b0bdccb4dbff4c54a42b51420599c32267f3abbeb795170", size = 2377289, upload-time = "2024-07-22T20:18:37.761Z" },
- { url = "https://files.pythonhosted.org/packages/c7/2d/d5663e134436e5933bc63516a20b5edc08b4c1b1588b9680908a5f1afd04/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c093f07a010b499c00a15bc9376036ee4800d335360570b14f7fe92badcdcf9", size = 2411755, upload-time = "2024-07-22T20:18:39.949Z" },
- { url = "https://files.pythonhosted.org/packages/3e/79/1bce519cf186112d6d5ce2985392a89528c6e1e9332d680bf752694a4cdf/chroma_hnswlib-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:0540b0ac96e47d0aa39e88ea4714358ae05d64bbe6bf33c52f316c664190a6a3", size = 151888, upload-time = "2024-07-22T20:18:45.003Z" },
{ url = "https://files.pythonhosted.org/packages/93/ac/782b8d72de1c57b64fdf5cb94711540db99a92768d93d973174c62d45eb8/chroma_hnswlib-0.7.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e87e9b616c281bfbe748d01705817c71211613c3b063021f7ed5e47173556cb7", size = 197804, upload-time = "2024-07-22T20:18:46.442Z" },
{ url = "https://files.pythonhosted.org/packages/32/4e/fd9ce0764228e9a98f6ff46af05e92804090b5557035968c5b4198bc7af9/chroma_hnswlib-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec5ca25bc7b66d2ecbf14502b5729cde25f70945d22f2aaf523c2d747ea68912", size = 185421, upload-time = "2024-07-22T20:18:47.72Z" },
{ url = "https://files.pythonhosted.org/packages/d9/3d/b59a8dedebd82545d873235ef2d06f95be244dfece7ee4a1a6044f080b18/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305ae491de9d5f3c51e8bd52d84fdf2545a4a2bc7af49765cda286b7bb30b1d4", size = 2389672, upload-time = "2024-07-22T20:18:49.583Z" },
@@ -1074,14 +955,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/ec/59/c0b0a2c2e4c204e5baeca4917a95cc95add651da3cec86ec464a8e54cfa0/clickhouse_connect-0.15.0.tar.gz", hash = "sha256:529fcf072df335d18ae16339d99389190f4bd543067dcdc174541c7a9c622ef5", size = 126344, upload-time = "2026-03-26T18:34:52.316Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/83/b0/bf4a169a1b4e5e19f5e884596937ce13855146a3f4b3225228a87701fd18/clickhouse_connect-0.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f0928fdfb408d314c0e5151caf30b1c3bd56c2812ffdbc8d262fb60c0e7ab28", size = 284805, upload-time = "2026-03-26T18:33:18.659Z" },
- { url = "https://files.pythonhosted.org/packages/ec/d5/63dd572db91bd5e1231d7b7dc63591c52ffbbf653a57f9b8449681815976/clickhouse_connect-0.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6486b02825ac87f57811710e5a9a2da8531bb3c88bcb154fd5c7378742a33d66", size = 277846, upload-time = "2026-03-26T18:33:20.171Z" },
- { url = "https://files.pythonhosted.org/packages/e4/d6/192130a807de130945cc451e17c89ac6183625b8028026e5a4a7fc46fa59/clickhouse_connect-0.15.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f2df9c2fd97b40c6493232e0cbf516d8ba268165c6161851ef15f4f1fd0456e", size = 1096969, upload-time = "2026-03-26T18:33:21.728Z" },
- { url = "https://files.pythonhosted.org/packages/32/46/f2895cc4240ef45a2a274d4323f6858c0860034efe6c9a1c7168f1d8cecd/clickhouse_connect-0.15.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5a349d19c63abb49c884afe0a0387823045831f005451e85c09c032f953f1c1", size = 1101890, upload-time = "2026-03-26T18:33:23.038Z" },
- { url = "https://files.pythonhosted.org/packages/e8/69/dcecbca254b45525ad3fd8294441ac9cf8a8a8bd1fa8fd6b93e241b377a3/clickhouse_connect-0.15.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4d80205cbdbface6d2f35fbd65a6f85caf2b59ec65f2e9dd190f11e335fe7316", size = 1083561, upload-time = "2026-03-26T18:33:24.64Z" },
- { url = "https://files.pythonhosted.org/packages/69/10/21f0cb98453d9710aaeb92f9a9e156e909c1ac72e57210a48b0f615916a7/clickhouse_connect-0.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c3c84dfebf49ec7a2cd9ac31c46986f7a81b43ea781d23ef7d607907fcc6de5d", size = 1106257, upload-time = "2026-03-26T18:33:26.257Z" },
- { url = "https://files.pythonhosted.org/packages/70/91/ae0f5c8df5dc650f1ab327d4b40cde7e18bf9e8b3507764dce320c328092/clickhouse_connect-0.15.0-cp311-cp311-win32.whl", hash = "sha256:d2bbdccf9cd838b990576d3f7d1e6a0ab5c3a5c8eb830394258b7b225531fe74", size = 256591, upload-time = "2026-03-26T18:33:27.869Z" },
- { url = "https://files.pythonhosted.org/packages/e6/7f/85673ff522554ef76e17b5d267816c199a731fde836ef957b0960655f251/clickhouse_connect-0.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:1c4223d557bc0a3919cb7ce0d749d9091123b6e61341e028ffc09b7f9c847ac2", size = 274778, upload-time = "2026-03-26T18:33:29.02Z" },
{ url = "https://files.pythonhosted.org/packages/f5/be/86e149c60822caed29e4435acac4fc73e20fddfb0b56ea6452bc7a08ab10/clickhouse_connect-0.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d51f49694e9007564bfd8dac51a1f9e60b94d6c93a07eb4027113a2e62bbb384", size = 286680, upload-time = "2026-03-26T18:33:30.219Z" },
{ url = "https://files.pythonhosted.org/packages/aa/65/c38cc5028afa2ccd9e8ff65611434063c0c5c1b6edadc507dbbc80a09bfd/clickhouse_connect-0.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a48fbad9ebc2b6d1cd01d1f9b5d6740081f1c84f1aacc9f91651be949f6b6ed", size = 277579, upload-time = "2026-03-26T18:33:31.474Z" },
{ url = "https://files.pythonhosted.org/packages/0a/ef/c8b2ef597fefd04e8b7c017c991552162cb89b7cb73bfdd6225b1c79e2fe/clickhouse_connect-0.15.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36e1ae470b94cc56d270461c8626c8fd4dac16e6c1ffa8477f21c012462e22cf", size = 1121630, upload-time = "2026-03-26T18:33:32.983Z" },
@@ -1199,12 +1072,6 @@ version = "4.5.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/73/2f/8f92e743a91c2f4e2ebad0bcfc31ef386c817c64415d89bf44e64dde227a/couchbase-4.5.0.tar.gz", hash = "sha256:fb74386ea5e807ae12cfa294fa6740fe6be3ecaf3bb9ce4fb9ea73706ed05982", size = 6562752, upload-time = "2025-09-30T01:27:37.423Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ca/a7/ba28fcab4f211e570582990d9592d8a57566158a0712fbc9d0d9ac486c2a/couchbase-4.5.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:3d3258802baa87d9ffeccbb2b31dcabe2a4ef27c9be81e0d3d710fd7436da24a", size = 5037084, upload-time = "2025-09-30T01:25:16.748Z" },
- { url = "https://files.pythonhosted.org/packages/85/38/f26912b56a41f22ab9606304014ef1435fc4bef76144382f91c1a4ce1d4c/couchbase-4.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:18b47f1f3a2007f88203f611570d96e62bb1fb9568dec0483a292a5e87f6d1df", size = 4323514, upload-time = "2025-09-30T01:25:22.628Z" },
- { url = "https://files.pythonhosted.org/packages/35/a6/5ef140f8681a2488ed6eb2a2bc9fc918b6f11e9f71bbad75e4de73b8dbf3/couchbase-4.5.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9c2a16830db9437aae92e31f9ceda6c7b70707e316152fc99552b866b09a1967", size = 5181111, upload-time = "2025-09-30T01:25:30.538Z" },
- { url = "https://files.pythonhosted.org/packages/7b/2e/1f0f06e920dbae07c3d8af6b2af3d5213e43d3825e0931c19564fe4d5c1b/couchbase-4.5.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4a86774680e46488a7955c6eae8fba5200a1fd5f9de9ac0a34acb6c87dc2b513", size = 5442969, upload-time = "2025-09-30T01:25:37.976Z" },
- { url = "https://files.pythonhosted.org/packages/9a/2e/6ece47df4d987dbeaae3fdcf7aa4d6a8154c949c28e925f01074dfd0b8b8/couchbase-4.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b68dae005ab4c157930c76a3116e478df25aa1af00fa10cc1cc755df1831ad59", size = 6108562, upload-time = "2025-09-30T01:25:45.674Z" },
- { url = "https://files.pythonhosted.org/packages/be/a7/2f84a1d117cf70ad30e8b08ae9b1c4a03c65146bab030ed6eb84f454045b/couchbase-4.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbc50956fb68d42929d21d969f4512b38798259ae48c47cbf6d676cc3a01b058", size = 4269303, upload-time = "2025-09-30T01:25:49.341Z" },
{ url = "https://files.pythonhosted.org/packages/2f/bc/3b00403edd8b188a93f48b8231dbf7faf7b40d318d3e73bb0e68c4965bbd/couchbase-4.5.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:be1ac2bf7cbccf28eebd7fa8b1d7199fbe84c96b0f7f2c0d69963b1d6ce53985", size = 5128307, upload-time = "2025-09-30T01:25:53.615Z" },
{ url = "https://files.pythonhosted.org/packages/7f/52/2ccfa8c8650cc341813713a47eeeb8ad13a25e25b0f4747d224106602a24/couchbase-4.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:035c394d38297c484bd57fc92b27f6a571a36ab5675b4ec873fd15bf65e8f28e", size = 4326149, upload-time = "2025-09-30T01:25:57.524Z" },
{ url = "https://files.pythonhosted.org/packages/32/80/fe3f074f321474c824ec67b97c5c4aa99047d45c777bb29353f9397c6604/couchbase-4.5.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:117685f6827abbc332e151625b0a9890c2fafe0d3c3d9e564b903d5c411abe5d", size = 5184623, upload-time = "2025-09-30T01:26:02.166Z" },
@@ -1219,21 +1086,6 @@ version = "7.13.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/4b/37/d24c8f8220ff07b839b2c043ea4903a33b0f455abe673ae3c03bbdb7f212/coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d", size = 219381, upload-time = "2026-03-17T10:30:14.68Z" },
- { url = "https://files.pythonhosted.org/packages/35/8b/cd129b0ca4afe886a6ce9d183c44d8301acbd4ef248622e7c49a23145605/coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587", size = 219880, upload-time = "2026-03-17T10:30:16.231Z" },
- { url = "https://files.pythonhosted.org/packages/55/2f/e0e5b237bffdb5d6c530ce87cc1d413a5b7d7dfd60fb067ad6d254c35c76/coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642", size = 250303, upload-time = "2026-03-17T10:30:17.748Z" },
- { url = "https://files.pythonhosted.org/packages/92/be/b1afb692be85b947f3401375851484496134c5554e67e822c35f28bf2fbc/coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b", size = 252218, upload-time = "2026-03-17T10:30:19.804Z" },
- { url = "https://files.pythonhosted.org/packages/da/69/2f47bb6fa1b8d1e3e5d0c4be8ccb4313c63d742476a619418f85740d597b/coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686", size = 254326, upload-time = "2026-03-17T10:30:21.321Z" },
- { url = "https://files.pythonhosted.org/packages/d5/d0/79db81da58965bd29dabc8f4ad2a2af70611a57cba9d1ec006f072f30a54/coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743", size = 256267, upload-time = "2026-03-17T10:30:23.094Z" },
- { url = "https://files.pythonhosted.org/packages/e5/32/d0d7cc8168f91ddab44c0ce4806b969df5f5fdfdbb568eaca2dbc2a04936/coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75", size = 250430, upload-time = "2026-03-17T10:30:25.311Z" },
- { url = "https://files.pythonhosted.org/packages/4d/06/a055311d891ddbe231cd69fdd20ea4be6e3603ffebddf8704b8ca8e10a3c/coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209", size = 252017, upload-time = "2026-03-17T10:30:27.284Z" },
- { url = "https://files.pythonhosted.org/packages/d6/f6/d0fd2d21e29a657b5f77a2fe7082e1568158340dceb941954f776dce1b7b/coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a", size = 250080, upload-time = "2026-03-17T10:30:29.481Z" },
- { url = "https://files.pythonhosted.org/packages/4e/ab/0d7fb2efc2e9a5eb7ddcc6e722f834a69b454b7e6e5888c3a8567ecffb31/coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e", size = 253843, upload-time = "2026-03-17T10:30:31.301Z" },
- { url = "https://files.pythonhosted.org/packages/ba/6f/7467b917bbf5408610178f62a49c0ed4377bb16c1657f689cc61470da8ce/coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd", size = 249802, upload-time = "2026-03-17T10:30:33.358Z" },
- { url = "https://files.pythonhosted.org/packages/75/2c/1172fb689df92135f5bfbbd69fc83017a76d24ea2e2f3a1154007e2fb9f8/coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8", size = 250707, upload-time = "2026-03-17T10:30:35.2Z" },
- { url = "https://files.pythonhosted.org/packages/67/21/9ac389377380a07884e3b48ba7a620fcd9dbfaf1d40565facdc6b36ec9ef/coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf", size = 221880, upload-time = "2026-03-17T10:30:36.775Z" },
- { url = "https://files.pythonhosted.org/packages/af/7f/4cd8a92531253f9d7c1bbecd9fa1b472907fb54446ca768c59b531248dc5/coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9", size = 222816, upload-time = "2026-03-17T10:30:38.891Z" },
- { url = "https://files.pythonhosted.org/packages/12/a6/1d3f6155fb0010ca68eba7fe48ca6c9da7385058b77a95848710ecf189b1/coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028", size = 221483, upload-time = "2026-03-17T10:30:40.463Z" },
{ url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" },
{ url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" },
{ url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" },
@@ -1252,26 +1104,12 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" },
]
-[package.optional-dependencies]
-toml = [
- { name = "tomli", marker = "python_full_version <= '3.11'" },
-]
-
[[package]]
name = "crc32c"
version = "2.8"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e3/66/7e97aa77af7cf6afbff26e3651b564fe41932599bc2d3dce0b2f73d4829a/crc32c-2.8.tar.gz", hash = "sha256:578728964e59c47c356aeeedee6220e021e124b9d3e8631d95d9a5e5f06e261c", size = 48179, upload-time = "2025-10-17T06:20:13.61Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/dc/0b/5e03b22d913698e9cc563f39b9f6bbd508606bf6b8e9122cd6bf196b87ea/crc32c-2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e560a97fbb96c9897cb1d9b5076ef12fc12e2e25622530a1afd0de4240f17e1f", size = 66329, upload-time = "2025-10-17T06:19:01.771Z" },
- { url = "https://files.pythonhosted.org/packages/6b/38/2fe0051ffe8c6a650c8b1ac0da31b8802d1dbe5fa40a84e4b6b6f5583db5/crc32c-2.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6762d276d90331a490ef7e71ffee53b9c0eb053bd75a272d786f3b08d3fe3671", size = 62988, upload-time = "2025-10-17T06:19:02.953Z" },
- { url = "https://files.pythonhosted.org/packages/3e/30/5837a71c014be83aba1469c58820d287fc836512a0cad6b8fdd43868accd/crc32c-2.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:60670569f5ede91e39f48fb0cb4060e05b8d8704dd9e17ede930bf441b2f73ef", size = 61522, upload-time = "2025-10-17T06:19:03.796Z" },
- { url = "https://files.pythonhosted.org/packages/ca/29/63972fc1452778e2092ae998c50cbfc2fc93e3fa9798a0278650cd6169c5/crc32c-2.8-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:711743da6ccc70b3c6718c328947b0b6f34a1fe6a6c27cc6c1d69cc226bf70e9", size = 80200, upload-time = "2025-10-17T06:19:04.617Z" },
- { url = "https://files.pythonhosted.org/packages/cb/3a/60eb49d7bdada4122b3ffd45b0df54bdc1b8dd092cda4b069a287bdfcff4/crc32c-2.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5eb4094a2054774f13b26f21bf56792bb44fa1fcee6c6ad099387a43ffbfb4fa", size = 81757, upload-time = "2025-10-17T06:19:05.496Z" },
- { url = "https://files.pythonhosted.org/packages/f5/63/6efc1b64429ef7d23bd58b75b7ac24d15df327e3ebbe9c247a0f7b1c2ed1/crc32c-2.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fff15bf2bd3e95780516baae935ed12be88deaa5ebe6143c53eb0d26a7bdc7b7", size = 80830, upload-time = "2025-10-17T06:19:06.621Z" },
- { url = "https://files.pythonhosted.org/packages/e1/eb/0ae9f436f8004f1c88f7429e659a7218a3879bd11a6b18ed1257aad7e98b/crc32c-2.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c0e11e3826668121fa53e0745635baf5e4f0ded437e8ff63ea56f38fc4f970a", size = 80095, upload-time = "2025-10-17T06:19:07.381Z" },
- { url = "https://files.pythonhosted.org/packages/9e/81/4afc9d468977a4cd94a2eb62908553345009a7c0d30e74463a15d4b48ec3/crc32c-2.8-cp311-cp311-win32.whl", hash = "sha256:38f915336715d1f1353ab07d7d786f8a789b119e273aea106ba55355dfc9101d", size = 64886, upload-time = "2025-10-17T06:19:08.497Z" },
- { url = "https://files.pythonhosted.org/packages/d6/e8/94e839c9f7e767bf8479046a207afd440a08f5c59b52586e1af5e64fa4a0/crc32c-2.8-cp311-cp311-win_amd64.whl", hash = "sha256:60e0a765b1caab8d31b2ea80840639253906a9351d4b861551c8c8625ea20f86", size = 66639, upload-time = "2025-10-17T06:19:09.338Z" },
{ url = "https://files.pythonhosted.org/packages/b6/36/fd18ef23c42926b79c7003e16cb0f79043b5b179c633521343d3b499e996/crc32c-2.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:572ffb1b78cce3d88e8d4143e154d31044a44be42cb3f6fbbf77f1e7a941c5ab", size = 66379, upload-time = "2025-10-17T06:19:10.115Z" },
{ url = "https://files.pythonhosted.org/packages/7f/b8/c584958e53f7798dd358f5bdb1bbfc97483134f053ee399d3eeb26cca075/crc32c-2.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cf827b3758ee0c4aacd21ceca0e2da83681f10295c38a10bfeb105f7d98f7a68", size = 63042, upload-time = "2025-10-17T06:19:10.946Z" },
{ url = "https://files.pythonhosted.org/packages/62/e6/6f2af0ec64a668a46c861e5bc778ea3ee42171fedfc5440f791f470fd783/crc32c-2.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:106fbd79013e06fa92bc3b51031694fcc1249811ed4364ef1554ee3dd2c7f5a2", size = 61528, upload-time = "2025-10-17T06:19:11.768Z" },
@@ -1281,11 +1119,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/db/86/fad1a94cdeeeb6b6e2323c87f970186e74bfd6fbfbc247bf5c88ad0873d5/crc32c-2.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:59eee5f3a69ad0793d5fa9cdc9b9d743b0cd50edf7fccc0a3988a821fef0208c", size = 79886, upload-time = "2025-10-17T06:19:15.345Z" },
{ url = "https://files.pythonhosted.org/packages/d5/db/1a7cb6757a1e32376fa2dfce00c815ea4ee614a94f9bff8228e37420c183/crc32c-2.8-cp312-cp312-win32.whl", hash = "sha256:a73d03ce3604aa5d7a2698e9057a0eef69f529c46497b27ee1c38158e90ceb76", size = 64896, upload-time = "2025-10-17T06:19:16.457Z" },
{ url = "https://files.pythonhosted.org/packages/bf/8e/2024de34399b2e401a37dcb54b224b56c747b0dc46de4966886827b4d370/crc32c-2.8-cp312-cp312-win_amd64.whl", hash = "sha256:56b3b7d015247962cf58186e06d18c3d75a1a63d709d3233509e1c50a2d36aa2", size = 66645, upload-time = "2025-10-17T06:19:17.235Z" },
- { url = "https://files.pythonhosted.org/packages/a7/1d/dd926c68eb8aac8b142a1a10b8eb62d95212c1cf81775644373fe7cceac2/crc32c-2.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5833f4071da7ea182c514ba17d1eee8aec3c5be927d798222fbfbbd0f5eea02c", size = 62345, upload-time = "2025-10-17T06:20:09.39Z" },
- { url = "https://files.pythonhosted.org/packages/51/be/803404e5abea2ef2c15042edca04bbb7f625044cca879e47f186b43887c2/crc32c-2.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:1dc4da036126ac07b39dd9d03e93e585ec615a2ad28ff12757aef7de175295a8", size = 61229, upload-time = "2025-10-17T06:20:10.236Z" },
- { url = "https://files.pythonhosted.org/packages/fc/3a/00cc578cd27ed0b22c9be25cef2c24539d92df9fa80ebd67a3fc5419724c/crc32c-2.8-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:15905fa78344654e241371c47e6ed2411f9eeb2b8095311c68c88eccf541e8b4", size = 64108, upload-time = "2025-10-17T06:20:11.072Z" },
- { url = "https://files.pythonhosted.org/packages/6b/bc/0587ef99a1c7629f95dd0c9d4f3d894de383a0df85831eb16c48a6afdae4/crc32c-2.8-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c596f918688821f796434e89b431b1698396c38bf0b56de873621528fe3ecb1e", size = 64815, upload-time = "2025-10-17T06:20:11.919Z" },
- { url = "https://files.pythonhosted.org/packages/73/42/94f2b8b92eae9064fcfb8deef2b971514065bd606231f8857ff8ae02bebd/crc32c-2.8-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8d23c4fe01b3844cb6e091044bc1cebdef7d16472e058ce12d9fadf10d2614af", size = 66659, upload-time = "2025-10-17T06:20:12.766Z" },
]
[[package]]
@@ -1344,12 +1177,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" },
{ url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" },
{ url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" },
- { url = "https://files.pythonhosted.org/packages/2e/84/7ccff00ced5bac74b775ce0beb7d1be4e8637536b522b5df9b73ada42da2/cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead", size = 3475444, upload-time = "2026-03-25T23:34:38.944Z" },
- { url = "https://files.pythonhosted.org/packages/bc/1f/4c926f50df7749f000f20eede0c896769509895e2648db5da0ed55db711d/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8", size = 4218227, upload-time = "2026-03-25T23:34:40.871Z" },
- { url = "https://files.pythonhosted.org/packages/c6/65/707be3ffbd5f786028665c3223e86e11c4cda86023adbc56bd72b1b6bab5/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0", size = 4381399, upload-time = "2026-03-25T23:34:42.609Z" },
- { url = "https://files.pythonhosted.org/packages/f3/6d/73557ed0ef7d73d04d9aba745d2c8e95218213687ee5e76b7d236a5030fc/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b", size = 4217595, upload-time = "2026-03-25T23:34:44.205Z" },
- { url = "https://files.pythonhosted.org/packages/9e/c5/e1594c4eec66a567c3ac4400008108a415808be2ce13dcb9a9045c92f1a0/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a", size = 4380912, upload-time = "2026-03-25T23:34:46.328Z" },
- { url = "https://files.pythonhosted.org/packages/1a/89/843b53614b47f97fe1abc13f9a86efa5ec9e275292c457af1d4a60dc80e0/cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e", size = 3409955, upload-time = "2026-03-25T23:34:48.465Z" },
]
[[package]]
@@ -1358,14 +1185,6 @@ version = "2.0.13"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c0/8f/2f0fbb32535c3731b7c2974c569fb9325e0a38ed5565a08e1139a3b71e82/cymem-2.0.13.tar.gz", hash = "sha256:1c91a92ae8c7104275ac26bd4d29b08ccd3e7faff5893d3858cb6fadf1bc1588", size = 12320, upload-time = "2025-11-14T14:58:36.902Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/10/64/1db41f7576a6b69f70367e3c15e968fd775ba7419e12059c9966ceb826f8/cymem-2.0.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:673183466b0ff2e060d97ec5116711d44200b8f7be524323e080d215ee2d44a5", size = 43587, upload-time = "2025-11-14T14:57:22.39Z" },
- { url = "https://files.pythonhosted.org/packages/81/13/57f936fc08551323aab3f92ff6b7f4d4b89d5b4e495c870a67cb8d279757/cymem-2.0.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bee2791b3f6fc034ce41268851462bf662ff87e8947e35fb6dd0115b4644a61f", size = 43139, upload-time = "2025-11-14T14:57:23.363Z" },
- { url = "https://files.pythonhosted.org/packages/32/a6/9345754be51e0479aa387b7b6cffc289d0fd3201aaeb8dade4623abd1e02/cymem-2.0.13-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f3aee3adf16272bca81c5826eed55ba3c938add6d8c9e273f01c6b829ecfde22", size = 245063, upload-time = "2025-11-14T14:57:24.839Z" },
- { url = "https://files.pythonhosted.org/packages/d6/01/6bc654101526fa86e82bf6b05d99b2cd47c30a333cfe8622c26c0592beb2/cymem-2.0.13-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:30c4e75a3a1d809e89106b0b21803eb78e839881aa1f5b9bd27b454bc73afde3", size = 244496, upload-time = "2025-11-14T14:57:26.42Z" },
- { url = "https://files.pythonhosted.org/packages/c4/fb/853b7b021e701a1f41687f3704d5f469aeb2a4f898c3fbb8076806885955/cymem-2.0.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec99efa03cf8ec11c8906aa4d4cc0c47df393bc9095c9dd64b89b9b43e220b04", size = 243287, upload-time = "2025-11-14T14:57:27.542Z" },
- { url = "https://files.pythonhosted.org/packages/d4/2b/0e4664cafc581de2896d75000651fd2ce7094d33263f466185c28ffc96e4/cymem-2.0.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c90a6ecba994a15b17a3f45d7ec74d34081df2f73bd1b090e2adc0317e4e01b6", size = 248287, upload-time = "2025-11-14T14:57:29.055Z" },
- { url = "https://files.pythonhosted.org/packages/21/0f/f94c6950edbfc2aafb81194fc40b6cacc8e994e9359d3cb4328c5705b9b5/cymem-2.0.13-cp311-cp311-win_amd64.whl", hash = "sha256:ce821e6ba59148ed17c4567113b8683a6a0be9c9ac86f14e969919121efb61a5", size = 40116, upload-time = "2025-11-14T14:57:30.592Z" },
- { url = "https://files.pythonhosted.org/packages/00/df/2455eff6ac0381ff165db6883b311f7016e222e3dd62185517f8e8187ed0/cymem-2.0.13-cp311-cp311-win_arm64.whl", hash = "sha256:0dca715e708e545fd1d97693542378a00394b20a37779c1ae2c8bdbb43acef79", size = 36349, upload-time = "2025-11-14T14:57:31.573Z" },
{ url = "https://files.pythonhosted.org/packages/c9/52/478a2911ab5028cb710b4900d64aceba6f4f882fcb13fd8d40a456a1b6dc/cymem-2.0.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8afbc5162a0fe14b6463e1c4e45248a1b2fe2cbcecc8a5b9e511117080da0eb", size = 43745, upload-time = "2025-11-14T14:57:32.52Z" },
{ url = "https://files.pythonhosted.org/packages/f9/71/f0f8adee945524774b16af326bd314a14a478ed369a728a22834e6785a18/cymem-2.0.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9251d889348fe79a75e9b3e4d1b5fa651fca8a64500820685d73a3acc21b6a8", size = 42927, upload-time = "2025-11-14T14:57:33.827Z" },
{ url = "https://files.pythonhosted.org/packages/62/6d/159780fe162ff715d62b809246e5fc20901cef87ca28b67d255a8d741861/cymem-2.0.13-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:742fc19764467a49ed22e56a4d2134c262d73a6c635409584ae3bf9afa092c33", size = 258346, upload-time = "2025-11-14T14:57:34.917Z" },
@@ -2072,17 +1891,6 @@ version = "0.14.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232, upload-time = "2025-10-19T22:19:22.402Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/98/f3/12481bda4e5b6d3e698fbf525df4443cc7dce746f246b86b6fcb2fba1844/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:73946cb950c8caf65127d4e9a325e2b6be0442a224fd51ba3b6ac44e1912ce34", size = 516386, upload-time = "2025-10-19T22:42:40.176Z" },
- { url = "https://files.pythonhosted.org/packages/59/19/2fc58a1446e4d72b655648eb0879b04e88ed6fa70d474efcf550f640f6ec/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:12ac85024637586a5b69645e7ed986f7535106ed3013640a393a03e461740cb7", size = 264569, upload-time = "2025-10-19T22:25:50.977Z" },
- { url = "https://files.pythonhosted.org/packages/78/29/3c74756e5b02c40cfcc8b1d8b5bac4edbd532b55917a6bcc9113550e99d1/fastuuid-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:05a8dde1f395e0c9b4be515b7a521403d1e8349443e7641761af07c7ad1624b1", size = 254366, upload-time = "2025-10-19T22:29:49.166Z" },
- { url = "https://files.pythonhosted.org/packages/52/96/d761da3fccfa84f0f353ce6e3eb8b7f76b3aa21fd25e1b00a19f9c80a063/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09378a05020e3e4883dfdab438926f31fea15fd17604908f3d39cbeb22a0b4dc", size = 278978, upload-time = "2025-10-19T22:35:41.306Z" },
- { url = "https://files.pythonhosted.org/packages/fc/c2/f84c90167cc7765cb82b3ff7808057608b21c14a38531845d933a4637307/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbb0c4b15d66b435d2538f3827f05e44e2baafcc003dd7d8472dc67807ab8fd8", size = 279692, upload-time = "2025-10-19T22:25:36.997Z" },
- { url = "https://files.pythonhosted.org/packages/af/7b/4bacd03897b88c12348e7bd77943bac32ccf80ff98100598fcff74f75f2e/fastuuid-0.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd5a7f648d4365b41dbf0e38fe8da4884e57bed4e77c83598e076ac0c93995e7", size = 303384, upload-time = "2025-10-19T22:29:46.578Z" },
- { url = "https://files.pythonhosted.org/packages/c0/a2/584f2c29641df8bd810d00c1f21d408c12e9ad0c0dafdb8b7b29e5ddf787/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c0a94245afae4d7af8c43b3159d5e3934c53f47140be0be624b96acd672ceb73", size = 460921, upload-time = "2025-10-19T22:36:42.006Z" },
- { url = "https://files.pythonhosted.org/packages/24/68/c6b77443bb7764c760e211002c8638c0c7cce11cb584927e723215ba1398/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b29e23c97e77c3a9514d70ce343571e469098ac7f5a269320a0f0b3e193ab36", size = 480575, upload-time = "2025-10-19T22:28:18.975Z" },
- { url = "https://files.pythonhosted.org/packages/5a/87/93f553111b33f9bb83145be12868c3c475bf8ea87c107063d01377cc0e8e/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1e690d48f923c253f28151b3a6b4e335f2b06bf669c68a02665bc150b7839e94", size = 452317, upload-time = "2025-10-19T22:25:32.75Z" },
- { url = "https://files.pythonhosted.org/packages/9e/8c/a04d486ca55b5abb7eaa65b39df8d891b7b1635b22db2163734dc273579a/fastuuid-0.14.0-cp311-cp311-win32.whl", hash = "sha256:a6f46790d59ab38c6aa0e35c681c0484b50dc0acf9e2679c005d61e019313c24", size = 154804, upload-time = "2025-10-19T22:24:15.615Z" },
- { url = "https://files.pythonhosted.org/packages/9c/b2/2d40bf00820de94b9280366a122cbaa60090c8cf59e89ac3938cf5d75895/fastuuid-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:e150eab56c95dc9e3fefc234a0eedb342fac433dacc273cd4d150a5b0871e1fa", size = 156099, upload-time = "2025-10-19T22:24:31.646Z" },
{ url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164, upload-time = "2025-10-19T22:31:45.635Z" },
{ url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837, upload-time = "2025-10-19T22:38:38.53Z" },
{ url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370, upload-time = "2025-10-19T22:40:26.07Z" },
@@ -2253,22 +2061,6 @@ version = "1.8.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" },
- { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" },
- { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" },
- { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" },
- { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" },
- { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" },
- { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" },
- { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" },
- { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" },
- { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" },
- { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" },
- { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" },
- { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" },
- { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" },
- { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" },
- { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" },
{ url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" },
{ url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" },
{ url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" },
@@ -2318,13 +2110,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/9e/48/b3ef2673ffb940f980966694e40d6d32560f3ffa284ecaeb5ea3a90a6d3f/gevent-25.9.1.tar.gz", hash = "sha256:adf9cd552de44a4e6754c51ff2e78d9193b7fa6eab123db9578a210e657235dd", size = 5059025, upload-time = "2025-09-17T16:15:34.528Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/81/86/03f8db0704fed41b0fa830425845f1eb4e20c92efa3f18751ee17809e9c6/gevent-25.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5aff9e8342dc954adb9c9c524db56c2f3557999463445ba3d9cbe3dada7b7", size = 1792418, upload-time = "2025-09-17T15:41:24.384Z" },
- { url = "https://files.pythonhosted.org/packages/5f/35/f6b3a31f0849a62cfa2c64574bcc68a781d5499c3195e296e892a121a3cf/gevent-25.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1cdf6db28f050ee103441caa8b0448ace545364f775059d5e2de089da975c457", size = 1875700, upload-time = "2025-09-17T15:48:59.652Z" },
- { url = "https://files.pythonhosted.org/packages/66/1e/75055950aa9b48f553e061afa9e3728061b5ccecca358cef19166e4ab74a/gevent-25.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:812debe235a8295be3b2a63b136c2474241fa5c58af55e6a0f8cfc29d4936235", size = 1831365, upload-time = "2025-09-17T15:49:19.426Z" },
- { url = "https://files.pythonhosted.org/packages/31/e8/5c1f6968e5547e501cfa03dcb0239dff55e44c3660a37ec534e32a0c008f/gevent-25.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b28b61ff9216a3d73fe8f35669eefcafa957f143ac534faf77e8a19eb9e6883a", size = 2122087, upload-time = "2025-09-17T15:15:12.329Z" },
- { url = "https://files.pythonhosted.org/packages/c0/2c/ebc5d38a7542af9fb7657bfe10932a558bb98c8a94e4748e827d3823fced/gevent-25.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5e4b6278b37373306fc6b1e5f0f1cf56339a1377f67c35972775143d8d7776ff", size = 1808776, upload-time = "2025-09-17T15:52:40.16Z" },
- { url = "https://files.pythonhosted.org/packages/e6/26/e1d7d6c8ffbf76fe1fbb4e77bdb7f47d419206adc391ec40a8ace6ebbbf0/gevent-25.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d99f0cb2ce43c2e8305bf75bee61a8bde06619d21b9d0316ea190fc7a0620a56", size = 2179141, upload-time = "2025-09-17T15:24:09.895Z" },
- { url = "https://files.pythonhosted.org/packages/1d/6c/bb21fd9c095506aeeaa616579a356aa50935165cc0f1e250e1e0575620a7/gevent-25.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:72152517ecf548e2f838c61b4be76637d99279dbaa7e01b3924df040aa996586", size = 1677941, upload-time = "2025-09-17T19:59:50.185Z" },
{ url = "https://files.pythonhosted.org/packages/f7/49/e55930ba5259629eb28ac7ee1abbca971996a9165f902f0249b561602f24/gevent-25.9.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:46b188248c84ffdec18a686fcac5dbb32365d76912e14fda350db5dc0bfd4f86", size = 2955991, upload-time = "2025-09-17T14:52:30.568Z" },
{ url = "https://files.pythonhosted.org/packages/aa/88/63dc9e903980e1da1e16541ec5c70f2b224ec0a8e34088cb42794f1c7f52/gevent-25.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f2b54ea3ca6f0c763281cd3f96010ac7e98c2e267feb1221b5a26e2ca0b9a692", size = 1808503, upload-time = "2025-09-17T15:41:25.59Z" },
{ url = "https://files.pythonhosted.org/packages/7a/8d/7236c3a8f6ef7e94c22e658397009596fa90f24c7d19da11ad7ab3a9248e/gevent-25.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7a834804ac00ed8a92a69d3826342c677be651b1c3cd66cc35df8bc711057aa2", size = 1890001, upload-time = "2025-09-17T15:49:01.227Z" },
@@ -2365,14 +2150,6 @@ version = "2.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/57/57/86fd2ed7722cddfc7b1aa87cc768ef89944aa759b019595765aff5ad96a7/gmpy2-2.3.0.tar.gz", hash = "sha256:2d943cc9051fcd6b15b2a09369e2f7e18c526bc04c210782e4da61b62495eb4a", size = 302252, upload-time = "2026-02-08T00:57:42.808Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/a3/70/0b5bde5f8e960c25ee18a352eb12bf5078d7fff3367c86d04985371de3f5/gmpy2-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2792ec96b2c4ee5af9f72409cd5b786edaf8277321f7022ce80ddff265815b01", size = 858392, upload-time = "2026-02-08T00:56:06.264Z" },
- { url = "https://files.pythonhosted.org/packages/c7/9b/2b52e92d0f1f36428e93ad7980634156fb5a1c88044984b0c03988951dc7/gmpy2-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3770aa5e44c5650d18232a0b8b8ed3d12db530d8278d4c800e4de5eef24cac5", size = 708753, upload-time = "2026-02-08T00:56:07.539Z" },
- { url = "https://files.pythonhosted.org/packages/e8/74/dac71b2f9f7844c40b38b6e43e3f793193420fd65573258147792cc069ce/gmpy2-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9b4cee1fa3647505f53b81dc3b60ac49034768117f6295a04aaf4d3f216b821", size = 1674005, upload-time = "2026-02-08T00:56:10.932Z" },
- { url = "https://files.pythonhosted.org/packages/2c/29/16548784d70b2a58919720cb976a968b9b14a1b8ccebfe4a21d21647ecec/gmpy2-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd9f4124d7dc39d50896ba08820049a95f9f3952dcd6e072cc3a9d07361b7f1f", size = 1774200, upload-time = "2026-02-08T00:56:13.167Z" },
- { url = "https://files.pythonhosted.org/packages/75/c5/ef9efb075388e91c166f74234cd54897af7a2d3b93c66a9c3a266c796c99/gmpy2-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2f6b38e1b6d2aeb553c936c136c3a12cf983c9f9ce3e211b8632744a15f2bce7", size = 1693346, upload-time = "2026-02-08T00:56:14.999Z" },
- { url = "https://files.pythonhosted.org/packages/13/7e/1a1d6f50bb428434ca6930df0df6d9f8ad914c103106e60574b5df349f36/gmpy2-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:089229ef18b8d804a76fec9bd7e7d653f598a977e8354f7de8850731a48adb37", size = 1731821, upload-time = "2026-02-08T00:56:16.524Z" },
- { url = "https://files.pythonhosted.org/packages/49/47/f1140943bed78da59261edb377b9497b74f6e583d7accc9dc20592753a25/gmpy2-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:f1843f2ca5a1643fac7563a12a6a7d68e539d93de4afe5812355d32fb1613891", size = 1234877, upload-time = "2026-02-08T00:56:17.919Z" },
- { url = "https://files.pythonhosted.org/packages/64/44/a19e4a1628067bf7d27eeda2a1a874b1a5e750e2f5847cc2c49e90946eb5/gmpy2-2.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:cd5b92fa675dde5151ebe8d89814c78d573e5210cdc162016080782778f15654", size = 855570, upload-time = "2026-02-08T00:56:19.415Z" },
{ url = "https://files.pythonhosted.org/packages/5c/e0/f70385e41b265b4f3534c7f41e78eefcf78dfe3a0d490816c697bb0703a9/gmpy2-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f35d6b1a8f067323a0a0d7034699284baebef498b030bbb29ab31d2ec13d1068", size = 857355, upload-time = "2026-02-08T00:56:20.674Z" },
{ url = "https://files.pythonhosted.org/packages/52/31/637015bd02bc74c6d854fc92ca1c24109a91691df07bc5e10bd14e09fd15/gmpy2-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:392d0560526dfa377c54c5c001d507fbbdea6cf54574895b90a97fc3587fa51e", size = 708996, upload-time = "2026-02-08T00:56:22.058Z" },
{ url = "https://files.pythonhosted.org/packages/f4/21/7f8bf79c486cff140aca76d958cdecfd1986cf989d28e14791a6e09004d8/gmpy2-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e900f41cc46700a5f49a4fbdcd5cd895e00bd0c2b9889fb2504ac1d594c21ac2", size = 1667404, upload-time = "2026-02-08T00:56:25.199Z" },
@@ -2381,11 +2158,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0f/02/1644480dc9f499f510979033a09069bb5a4fb3e75cf8f79c894d4ba17eed/gmpy2-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9d135dcef824e26e1b3af544004d8f98564d090e7cf1001c50cc93d9dc1dc047", size = 1722019, upload-time = "2026-02-08T00:56:29.973Z" },
{ url = "https://files.pythonhosted.org/packages/5a/3f/5a74a2c9ac2e6076819649707293e16fd0384bee9f065f097d0f2fb89b0c/gmpy2-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:9dcbb628f9c806f0e6789f2c5e056e67e949b317af0e9ea0c3f0e0488c56e2a8", size = 1236149, upload-time = "2026-02-08T00:56:31.734Z" },
{ url = "https://files.pythonhosted.org/packages/59/34/e9157d26278462feca182515fd58de1e7a2bb5da0ee7ba80aeed0363776c/gmpy2-2.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:19022e0103aa76803b666720f107d8ab1941c597fd3fe70fadf7c49bac82a097", size = 856534, upload-time = "2026-02-08T00:56:33.059Z" },
- { url = "https://files.pythonhosted.org/packages/a1/10/f95d0103be9c1c458d5d92a72cca341a4ce0f1ca3ae6f79839d0f171f7ea/gmpy2-2.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:71dc3734104fa1f300d35ac6f55c7e98f7b0e1c7fd96f27b409110ed1c0c47d2", size = 840903, upload-time = "2026-02-08T00:57:34.192Z" },
- { url = "https://files.pythonhosted.org/packages/5b/50/677daeb75c038cdd773d575eefd34e96dbdd7b03c91166e56e6f8ed7acc2/gmpy2-2.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4623e700423396ef3d1658efa83b6feb0615fb68cb0b850e9ac0cba966db34c8", size = 691637, upload-time = "2026-02-08T00:57:35.495Z" },
- { url = "https://files.pythonhosted.org/packages/bd/cf/f1eb022f61c7bcc2dc428d345a7c012f0fabe1acb8db0d8216f23a46a915/gmpy2-2.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:692289a37442468856328986e0fab7e7e71c514bc470e1abae82d3bc54ca4cd2", size = 939209, upload-time = "2026-02-08T00:57:37.19Z" },
- { url = "https://files.pythonhosted.org/packages/db/ae/c651b8d903f4d8a65e4f959e2fd39c963d36cb2c6bfc452aa6d7db0fc5b3/gmpy2-2.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb379412033b52c3ec6bc44c6eaa134c88a068b6f1f360e6c13ca962082478ee", size = 1039433, upload-time = "2026-02-08T00:57:38.841Z" },
- { url = "https://files.pythonhosted.org/packages/53/1a/72844930f855d50b831a899f53365404ec81c165a68dea6ea3fa1668ba46/gmpy2-2.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8d087b262a0356c318a56fbb5c718e4e56762d861b2f9d581adc90a180264db9", size = 1233930, upload-time = "2026-02-08T00:57:40.228Z" },
]
[[package]]
@@ -2563,18 +2335,11 @@ version = "1.7.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/19/ae/87802e6d9f9d69adfaedfcfd599266bf386a54d0be058b532d04c794f76d/google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472", size = 14495, upload-time = "2025-03-26T14:29:13.32Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f7/94/220139ea87822b6fdfdab4fb9ba81b3fff7ea2c82e2af34adc726085bffc/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06", size = 30468, upload-time = "2025-03-26T14:32:52.215Z" },
- { url = "https://files.pythonhosted.org/packages/94/97/789b23bdeeb9d15dc2904660463ad539d0318286d7633fe2760c10ed0c1c/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9", size = 30313, upload-time = "2025-03-26T14:57:38.758Z" },
- { url = "https://files.pythonhosted.org/packages/81/b8/976a2b843610c211e7ccb3e248996a61e87dbb2c09b1499847e295080aec/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77", size = 33048, upload-time = "2025-03-26T14:41:30.679Z" },
- { url = "https://files.pythonhosted.org/packages/c9/16/a3842c2cf591093b111d4a5e2bfb478ac6692d02f1b386d2a33283a19dc9/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53", size = 32669, upload-time = "2025-03-26T14:41:31.432Z" },
- { url = "https://files.pythonhosted.org/packages/04/17/ed9aba495916fcf5fe4ecb2267ceb851fc5f273c4e4625ae453350cfd564/google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d", size = 33476, upload-time = "2025-03-26T14:29:10.211Z" },
{ url = "https://files.pythonhosted.org/packages/dd/b7/787e2453cf8639c94b3d06c9d61f512234a82e1d12d13d18584bd3049904/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194", size = 30470, upload-time = "2025-03-26T14:34:31.655Z" },
{ url = "https://files.pythonhosted.org/packages/ed/b4/6042c2b0cbac3ec3a69bb4c49b28d2f517b7a0f4a0232603c42c58e22b44/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e", size = 30315, upload-time = "2025-03-26T15:01:54.634Z" },
{ url = "https://files.pythonhosted.org/packages/29/ad/01e7a61a5d059bc57b702d9ff6a18b2585ad97f720bd0a0dbe215df1ab0e/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337", size = 33180, upload-time = "2025-03-26T14:41:32.168Z" },
{ url = "https://files.pythonhosted.org/packages/3b/a5/7279055cf004561894ed3a7bfdf5bf90a53f28fadd01af7cd166e88ddf16/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65", size = 32794, upload-time = "2025-03-26T14:41:33.264Z" },
{ url = "https://files.pythonhosted.org/packages/0f/d6/77060dbd140c624e42ae3ece3df53b9d811000729a5c821b9fd671ceaac6/google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6", size = 33477, upload-time = "2025-03-26T14:29:10.94Z" },
- { url = "https://files.pythonhosted.org/packages/16/1b/1693372bf423ada422f80fd88260dbfd140754adb15cbc4d7e9a68b1cb8e/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48", size = 28241, upload-time = "2025-03-26T14:41:45.898Z" },
- { url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048, upload-time = "2025-03-26T14:41:46.696Z" },
]
[[package]]
@@ -2703,17 +2468,6 @@ version = "3.2.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" },
- { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" },
- { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" },
- { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" },
- { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" },
- { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" },
- { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" },
- { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" },
- { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" },
- { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" },
- { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" },
{ url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" },
{ url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" },
{ url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" },
@@ -2736,20 +2490,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/63/46/79764cfb61a3ac80dadae5d94fb10acdb7800e31fecf4113cf3d345e4952/grimp-3.14.tar.gz", hash = "sha256:645fbd835983901042dae4e1b24fde3a89bf7ac152f9272dd17a97e55cb4f871", size = 830882, upload-time = "2025-12-10T17:55:01.287Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/25/31/d4a86207c38954b6c3d859a1fc740a80b04bbe6e3b8a39f4e66f9633dfa4/grimp-3.14-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f1c91e3fa48c2196bf62e3c71492140d227b2bfcd6d15e735cbc0b3e2d5308e0", size = 2185572, upload-time = "2025-12-10T17:53:41.287Z" },
- { url = "https://files.pythonhosted.org/packages/f5/61/ed4cba5bd75d37fe46e17a602f616619a9e4f74ad8adfcf560ce4b2a1697/grimp-3.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6291c8f1690a9fe21b70923c60b075f4a89676541999e3d33084cbc69ac06a1", size = 2118002, upload-time = "2025-12-10T17:53:18.546Z" },
- { url = "https://files.pythonhosted.org/packages/77/6a/688f6144d0b207d7845bd8ab403820a83630ce3c9420cbbc7c9e9282f9c0/grimp-3.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ec312383935c2d09e4085c8435780ada2e13ebef14e105609c2988a02a5b2ce", size = 2283939, upload-time = "2025-12-10T17:52:06.228Z" },
- { url = "https://files.pythonhosted.org/packages/a5/98/4c540de151bf3fd58d6d7b3fe2269b6a6af6c61c915de1bc991802bfaff8/grimp-3.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f43cbf640e73ee703ad91639591046828d20103a1c363a02516e77a66a4ac07", size = 2233693, upload-time = "2025-12-10T17:52:18.938Z" },
- { url = "https://files.pythonhosted.org/packages/3e/7b/84b4b52b6c6dd5bf083cb1a72945748f56ea2e61768bbebf87e8d9d0ef75/grimp-3.14-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a93c9fddccb9ff16f5c6b5fca44227f5f86cba7cffc145d2176119603d2d7c7", size = 2389745, upload-time = "2025-12-10T17:53:00.659Z" },
- { url = "https://files.pythonhosted.org/packages/a7/33/31b96907c7dd78953df5e1ce67c558bd6057220fa1203d28d52566315a2e/grimp-3.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5653a2769fdc062cb7598d12200352069c9c6559b6643af6ada3639edb98fcc3", size = 2569055, upload-time = "2025-12-10T17:52:33.556Z" },
- { url = "https://files.pythonhosted.org/packages/b2/24/ce1a8110f3d5b178153b903aafe54b6a9216588b5bff3656e30af43e9c29/grimp-3.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:071c7ddf5e5bb7b2fdf79aefdf6e1c237cd81c095d6d0a19620e777e85bf103c", size = 2358044, upload-time = "2025-12-10T17:52:47.545Z" },
- { url = "https://files.pythonhosted.org/packages/05/7f/16d98c02287bc99884843478b9a68b04a2ef13b5cb8b9f36a9ca7daea75b/grimp-3.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e01b7a4419f535b667dfdcb556d3815b52981474f791fb40d72607228389a31", size = 2310304, upload-time = "2025-12-10T17:53:09.679Z" },
- { url = "https://files.pythonhosted.org/packages/a5/8c/0fde9781b0f6b4f9227d485685f48f6bcc70b95af22e2f85ff7f416cbfc1/grimp-3.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c29682f336151d1d018d0c3aa9eeaa35734b970e4593fa396b901edca7ef5c79", size = 2463682, upload-time = "2025-12-10T17:53:49.185Z" },
- { url = "https://files.pythonhosted.org/packages/51/cb/2baff301c2c2cc2792b6e225ea0784793ca587c81b97572be0bad122cfc8/grimp-3.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a5c4fd71f363ea39e8aab0630010ced77a8de9789f27c0acdd0d7e6269d4a8ef", size = 2500573, upload-time = "2025-12-10T17:54:03.899Z" },
- { url = "https://files.pythonhosted.org/packages/96/69/797e4242f42d6665da5fe22cb250cae3f14ece4cb22ad153e9cd97158179/grimp-3.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:766911e3ba0b13d833fdd03ad1f217523a8a2b2527b5507335f71dca1153183d", size = 2503005, upload-time = "2025-12-10T17:54:32.993Z" },
- { url = "https://files.pythonhosted.org/packages/fd/45/da1a27a6377807ca427cd56534231f0920e1895e16630204f382a0df14c5/grimp-3.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:154e84a2053e9f858ae48743de23a5ad4eb994007518c29371276f59b8419036", size = 2515776, upload-time = "2025-12-10T17:54:47.962Z" },
- { url = "https://files.pythonhosted.org/packages/4f/8d/b918a29ce98029cd7a9e33a584be43a93288d5283fb7ccef5b6b2ba39ede/grimp-3.14-cp311-cp311-win32.whl", hash = "sha256:3189c86c3e73016a1907ee3ba9f7a6ca037e3601ad09e60ce9bf12b88877f812", size = 1873189, upload-time = "2025-12-10T17:55:11.872Z" },
- { url = "https://files.pythonhosted.org/packages/90/d7/2327c203f83a25766fbd62b0df3b24230d422b6e53518ff4d1c5e69793f1/grimp-3.14-cp311-cp311-win_amd64.whl", hash = "sha256:201f46a6a4e5ee9dfba4a2f7d043f7deab080d1d84233f4a1aee812678c25307", size = 2014277, upload-time = "2025-12-10T17:55:04.144Z" },
{ url = "https://files.pythonhosted.org/packages/75/d6/a35ff62f35aa5fd148053506eddd7a8f2f6afaed31870dc608dd0eb38e4f/grimp-3.14-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ffabc6940301214753bad89ec0bfe275892fa1f64b999e9a101f6cebfc777133", size = 2178573, upload-time = "2025-12-10T17:53:42.836Z" },
{ url = "https://files.pythonhosted.org/packages/93/e2/bd2e80273da4d46110969fc62252e5372e0249feb872bc7fe76fdc7f1818/grimp-3.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:075d9a1c78d607792d0ed8d4d3d7754a621ef04c8a95eaebf634930dc9232bb2", size = 2110452, upload-time = "2025-12-10T17:53:19.831Z" },
{ url = "https://files.pythonhosted.org/packages/44/c3/7307249c657d34dca9d250d73ba027d6cfe15a98fb3119b6e5210bc388b7/grimp-3.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06ff52addeb20955a4d6aa097bee910573ffc9ef0d3c8a860844f267ad958156", size = 2283064, upload-time = "2025-12-10T17:52:07.673Z" },
@@ -2764,16 +2504,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0a/e6/23bed3da9206138d36d01890b656c7fb7adfb3a37daac8842d84d8777ade/grimp-3.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce8352a8ea0e27b143136ea086582fc6653419aa8a7c15e28ed08c898c42b185", size = 2514751, upload-time = "2025-12-10T17:54:49.384Z" },
{ url = "https://files.pythonhosted.org/packages/eb/45/6f1f55c97ee982f133ec5ccb22fc99bf5335aee70c208f4fb86cd833b8d5/grimp-3.14-cp312-cp312-win32.whl", hash = "sha256:3fc0f98b3c60d88e9ffa08faff3200f36604930972f8b29155f323b76ea25a06", size = 1875041, upload-time = "2025-12-10T17:55:13.326Z" },
{ url = "https://files.pythonhosted.org/packages/cf/cf/03ba01288e2a41a948bc8526f32c2eeaddd683ed34be1b895e31658d5a4c/grimp-3.14-cp312-cp312-win_amd64.whl", hash = "sha256:6bca77d1d50c8dc402c96af21f4e28e2f1e9938eeabd7417592a22bd83cde3c3", size = 2013868, upload-time = "2025-12-10T17:55:05.907Z" },
- { url = "https://files.pythonhosted.org/packages/65/cc/dbc00210d0324b8fc1242d8e857757c7e0b62ff0fc0c1bc8dcc42342da85/grimp-3.14-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c8a8aab9b4310a7e69d7d845cac21cf14563aa0520ea322b948eadeae56d303", size = 2284804, upload-time = "2025-12-10T17:52:16.379Z" },
- { url = "https://files.pythonhosted.org/packages/80/89/851d3d345342e9bcec3fe85d3997db29501fa59f958c1566bf3e24d9d7d9/grimp-3.14-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d781943b27e5875a41c8f9cfc80f8f0a349f864379192b8c3faa0e6a22593313", size = 2235176, upload-time = "2025-12-10T17:52:30.795Z" },
- { url = "https://files.pythonhosted.org/packages/58/78/5f94702a8d5c121cafcdc9664de34c34f19d0d91a1127bf3946a2631f7a3/grimp-3.14-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9630d4633607aff94d0ac84b9c64fef1382cdb05b00d9acbde47f8745e264871", size = 2391258, upload-time = "2025-12-10T17:53:06.906Z" },
- { url = "https://files.pythonhosted.org/packages/e9/a2/df8c79de5c9e227856d048cc1551c4742a5f97660c40304ac278bd48607f/grimp-3.14-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cb00e1bcca583668554a8e9e1e4229a1d11b0620969310aae40148829ff6a32", size = 2571443, upload-time = "2025-12-10T17:52:43.853Z" },
- { url = "https://files.pythonhosted.org/packages/f0/21/747b7ed9572bbdc34a76dfec12ce510e80164b1aa06d3b21b34994e5f567/grimp-3.14-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3389da4ceaaa7f7de24a668c0afc307a9f95997bd90f81ec359a828a9bd1d270", size = 2357767, upload-time = "2025-12-10T17:52:57.84Z" },
- { url = "https://files.pythonhosted.org/packages/0c/e6/485c5e3b64933e71f72f0cc45b0d7130418a6a5a13cedc2e8411bd76f290/grimp-3.14-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd7a32970ef97e42d4e7369397c7795287d84a736d788ccb90b6c14f0561d975", size = 2309069, upload-time = "2025-12-10T17:53:15.203Z" },
- { url = "https://files.pythonhosted.org/packages/31/bd/12024a8cba1c77facc1422a7b48cd0d04c252fc9178fd6f99dc05a8af57b/grimp-3.14-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:fd1278623fa09f62abc0fd8a6500f31b421a1fd479980f44c2926020a0becf02", size = 2466429, upload-time = "2025-12-10T17:54:00.286Z" },
- { url = "https://files.pythonhosted.org/packages/ee/7f/0e5977887e1c8f00f84bb4125217534806ffdcef9cf52f3580aa3b151f4b/grimp-3.14-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:9cfa52c89333d3d8fe9dc782529e888270d060231c3783e036d424044671dde0", size = 2501190, upload-time = "2025-12-10T17:54:30.107Z" },
- { url = "https://files.pythonhosted.org/packages/42/6b/06acb94b6d0d8c7277bb3e33f93224aa3be5b04643f853479d3bf7b23ace/grimp-3.14-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:48a5be4a12fca6587e6885b4fc13b9e242ab8bf874519292f0f13814aecf52cc", size = 2503440, upload-time = "2025-12-10T17:54:44.444Z" },
- { url = "https://files.pythonhosted.org/packages/5b/4d/2e531370d12e7a564f67f680234710bbc08554238a54991cd244feb61fb6/grimp-3.14-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3fcc332466783a12a42cd317fd344c30fe734ba4fa2362efff132dc3f8d36da7", size = 2516525, upload-time = "2025-12-10T17:54:58.987Z" },
]
[[package]]
@@ -2799,16 +2529,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" },
- { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" },
- { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" },
- { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" },
- { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" },
- { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" },
- { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" },
- { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" },
- { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" },
- { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" },
{ url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" },
{ url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" },
{ url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" },
@@ -2846,16 +2566,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/ad/9a/edfefb47f11ef6b0f39eea4d8f022c5bb05ac1d14fcc7058e84a51305b73/grpcio_tools-1.71.2.tar.gz", hash = "sha256:b5304d65c7569b21270b568e404a5a843cf027c66552a6a0978b23f137679c09", size = 5330655, upload-time = "2025-06-28T04:22:00.308Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/17/e4/0568d38b8da6237ea8ea15abb960fb7ab83eb7bb51e0ea5926dab3d865b1/grpcio_tools-1.71.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:0acb8151ea866be5b35233877fbee6445c36644c0aa77e230c9d1b46bf34b18b", size = 2385557, upload-time = "2025-06-28T04:20:54.323Z" },
- { url = "https://files.pythonhosted.org/packages/76/fb/700d46f72b0f636cf0e625f3c18a4f74543ff127471377e49a071f64f1e7/grpcio_tools-1.71.2-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:b28f8606f4123edb4e6da281547465d6e449e89f0c943c376d1732dc65e6d8b3", size = 5447590, upload-time = "2025-06-28T04:20:55.836Z" },
- { url = "https://files.pythonhosted.org/packages/12/69/d9bb2aec3de305162b23c5c884b9f79b1a195d42b1e6dabcc084cc9d0804/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:cbae6f849ad2d1f5e26cd55448b9828e678cb947fa32c8729d01998238266a6a", size = 2348495, upload-time = "2025-06-28T04:20:57.33Z" },
- { url = "https://files.pythonhosted.org/packages/d5/83/f840aba1690461b65330efbca96170893ee02fae66651bcc75f28b33a46c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4d1027615cfb1e9b1f31f2f384251c847d68c2f3e025697e5f5c72e26ed1316", size = 2742333, upload-time = "2025-06-28T04:20:59.051Z" },
- { url = "https://files.pythonhosted.org/packages/30/34/c02cd9b37de26045190ba665ee6ab8597d47f033d098968f812d253bbf8c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bac95662dc69338edb9eb727cc3dd92342131b84b12b3e8ec6abe973d4cbf1b", size = 2473490, upload-time = "2025-06-28T04:21:00.614Z" },
- { url = "https://files.pythonhosted.org/packages/4d/c7/375718ae091c8f5776828ce97bdcb014ca26244296f8b7f70af1a803ed2f/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c50250c7248055040f89eb29ecad39d3a260a4b6d3696af1575945f7a8d5dcdc", size = 2850333, upload-time = "2025-06-28T04:21:01.95Z" },
- { url = "https://files.pythonhosted.org/packages/19/37/efc69345bd92a73b2bc80f4f9e53d42dfdc234b2491ae58c87da20ca0ea5/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6ab1ad955e69027ef12ace4d700c5fc36341bdc2f420e87881e9d6d02af3d7b8", size = 3300748, upload-time = "2025-06-28T04:21:03.451Z" },
- { url = "https://files.pythonhosted.org/packages/d2/1f/15f787eb25ae42086f55ed3e4260e85f385921c788debf0f7583b34446e3/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dd75dde575781262b6b96cc6d0b2ac6002b2f50882bf5e06713f1bf364ee6e09", size = 2913178, upload-time = "2025-06-28T04:21:04.879Z" },
- { url = "https://files.pythonhosted.org/packages/12/aa/69cb3a9dff7d143a05e4021c3c9b5cde07aacb8eb1c892b7c5b9fb4973e3/grpcio_tools-1.71.2-cp311-cp311-win32.whl", hash = "sha256:9a3cb244d2bfe0d187f858c5408d17cb0e76ca60ec9a274c8fd94cc81457c7fc", size = 946256, upload-time = "2025-06-28T04:21:06.518Z" },
- { url = "https://files.pythonhosted.org/packages/1e/df/fb951c5c87eadb507a832243942e56e67d50d7667b0e5324616ffd51b845/grpcio_tools-1.71.2-cp311-cp311-win_amd64.whl", hash = "sha256:00eb909997fd359a39b789342b476cbe291f4dd9c01ae9887a474f35972a257e", size = 1117661, upload-time = "2025-06-28T04:21:08.18Z" },
{ url = "https://files.pythonhosted.org/packages/9c/d3/3ed30a9c5b2424627b4b8411e2cd6a1a3f997d3812dbc6a8630a78bcfe26/grpcio_tools-1.71.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:bfc0b5d289e383bc7d317f0e64c9dfb59dc4bef078ecd23afa1a816358fb1473", size = 2385479, upload-time = "2025-06-28T04:21:10.413Z" },
{ url = "https://files.pythonhosted.org/packages/54/61/e0b7295456c7e21ef777eae60403c06835160c8d0e1e58ebfc7d024c51d3/grpcio_tools-1.71.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b4669827716355fa913b1376b1b985855d5cfdb63443f8d18faf210180199006", size = 5431521, upload-time = "2025-06-28T04:21:12.261Z" },
{ url = "https://files.pythonhosted.org/packages/75/d7/7bcad6bcc5f5b7fab53e6bce5db87041f38ef3e740b1ec2d8c49534fa286/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d4071f9b44564e3f75cdf0f05b10b3e8c7ea0ca5220acbf4dc50b148552eef2f", size = 2350289, upload-time = "2025-06-28T04:21:13.625Z" },
@@ -2924,19 +2634,6 @@ version = "3.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/65/82/d2817ce0653628e0a0cb128533f6af0dd6318a49f3f3a6a7bd1f2f2154af/hiredis-3.3.0.tar.gz", hash = "sha256:105596aad9249634361815c574351f1bd50455dc23b537c2940066c4a9dea685", size = 89048, upload-time = "2025-10-14T16:33:34.263Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/34/0c/be3b1093f93a7c823ca16fbfbb83d3a1de671bbd2add8da1fe2bcfccb2b8/hiredis-3.3.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:63ee6c1ae6a2462a2439eb93c38ab0315cd5f4b6d769c6a34903058ba538b5d6", size = 81813, upload-time = "2025-10-14T16:32:00.576Z" },
- { url = "https://files.pythonhosted.org/packages/95/2b/ed722d392ac59a7eee548d752506ef32c06ffdd0bce9cf91125a74b8edf9/hiredis-3.3.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:31eda3526e2065268a8f97fbe3d0e9a64ad26f1d89309e953c80885c511ea2ae", size = 46049, upload-time = "2025-10-14T16:32:01.319Z" },
- { url = "https://files.pythonhosted.org/packages/e5/61/8ace8027d5b3f6b28e1dc55f4a504be038ba8aa8bf71882b703e8f874c91/hiredis-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a26bae1b61b7bcafe3d0d0c7d012fb66ab3c95f2121dbea336df67e344e39089", size = 41814, upload-time = "2025-10-14T16:32:02.076Z" },
- { url = "https://files.pythonhosted.org/packages/23/0e/380ade1ffb21034976663a5128f0383533f35caccdba13ff0537dd5ace79/hiredis-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9546079f7fd5c50fbff9c791710049b32eebe7f9b94debec1e8b9f4c048cba2", size = 167572, upload-time = "2025-10-14T16:32:03.125Z" },
- { url = "https://files.pythonhosted.org/packages/ca/60/b4a8d2177575b896730f73e6890644591aa56790a75c2b6d6f2302a1dae6/hiredis-3.3.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ae327fc13b1157b694d53f92d50920c0051e30b0c245f980a7036e299d039ab4", size = 179373, upload-time = "2025-10-14T16:32:04.04Z" },
- { url = "https://files.pythonhosted.org/packages/31/53/a473a18d27cfe8afda7772ff9adfba1718fd31d5e9c224589dc17774fa0b/hiredis-3.3.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4016e50a8be5740a59c5af5252e5ad16c395021a999ad24c6604f0d9faf4d346", size = 177504, upload-time = "2025-10-14T16:32:04.934Z" },
- { url = "https://files.pythonhosted.org/packages/7e/0f/f6ee4c26b149063dbf5b1b6894b4a7a1f00a50e3d0cfd30a22d4c3479db3/hiredis-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17b473f273465a3d2168a57a5b43846165105ac217d5652a005e14068589ddc", size = 169449, upload-time = "2025-10-14T16:32:05.808Z" },
- { url = "https://files.pythonhosted.org/packages/64/38/e3e113172289e1261ccd43e387a577dd268b0b9270721b5678735803416c/hiredis-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9ecd9b09b11bd0b8af87d29c3f5da628d2bdc2a6c23d2dd264d2da082bd4bf32", size = 164010, upload-time = "2025-10-14T16:32:06.695Z" },
- { url = "https://files.pythonhosted.org/packages/8d/9a/ccf4999365691ea73d0dd2ee95ee6ef23ebc9a835a7417f81765bc49eade/hiredis-3.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:00fb04eac208cd575d14f246e74a468561081ce235937ab17d77cde73aefc66c", size = 174623, upload-time = "2025-10-14T16:32:07.627Z" },
- { url = "https://files.pythonhosted.org/packages/ed/c7/ee55fa2ade078b7c4f17e8ddc9bc28881d0b71b794ebf9db4cfe4c8f0623/hiredis-3.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:60814a7d0b718adf3bfe2c32c6878b0e00d6ae290ad8e47f60d7bba3941234a6", size = 167650, upload-time = "2025-10-14T16:32:08.615Z" },
- { url = "https://files.pythonhosted.org/packages/bf/06/f6cd90275dcb0ba03f69767805151eb60b602bc25830648bd607660e1f97/hiredis-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fcbd1a15e935aa323b5b2534b38419511b7909b4b8ee548e42b59090a1b37bb1", size = 165452, upload-time = "2025-10-14T16:32:09.561Z" },
- { url = "https://files.pythonhosted.org/packages/c3/10/895177164a6c4409a07717b5ae058d84a908e1ab629f0401110b02aaadda/hiredis-3.3.0-cp311-cp311-win32.whl", hash = "sha256:73679607c5a19f4bcfc9cf6eb54480bcd26617b68708ac8b1079da9721be5449", size = 20394, upload-time = "2025-10-14T16:32:10.469Z" },
- { url = "https://files.pythonhosted.org/packages/3c/c7/1e8416ae4d4134cb62092c61cabd76b3d720507ee08edd19836cdeea4c7a/hiredis-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:30a4df3d48f32538de50648d44146231dde5ad7f84f8f08818820f426840ae97", size = 22336, upload-time = "2025-10-14T16:32:11.221Z" },
{ url = "https://files.pythonhosted.org/packages/48/1c/ed28ae5d704f5c7e85b946fa327f30d269e6272c847fef7e91ba5fc86193/hiredis-3.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5b8e1d6a2277ec5b82af5dce11534d3ed5dffeb131fd9b210bc1940643b39b5f", size = 82026, upload-time = "2025-10-14T16:32:12.004Z" },
{ url = "https://files.pythonhosted.org/packages/f4/9b/79f30c5c40e248291023b7412bfdef4ad9a8a92d9e9285d65d600817dac7/hiredis-3.3.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c4981de4d335f996822419e8a8b3b87367fcef67dc5fb74d3bff4df9f6f17783", size = 46217, upload-time = "2025-10-14T16:32:13.133Z" },
{ url = "https://files.pythonhosted.org/packages/e7/c3/02b9ed430ad9087aadd8afcdf616717452d16271b701fa47edfe257b681e/hiredis-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1706480a683e328ae9ba5d704629dee2298e75016aa0207e7067b9c40cecc271", size = 41858, upload-time = "2025-10-14T16:32:13.98Z" },
@@ -3019,13 +2716,6 @@ version = "0.7.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" },
- { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" },
- { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" },
- { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" },
- { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" },
- { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" },
- { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" },
{ url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" },
{ url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" },
{ url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" },
@@ -3246,19 +2936,6 @@ version = "0.12.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/32/f9/eaca4633486b527ebe7e681c431f529b63fe2709e7c5242fc0f43f77ce63/jiter-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8f8a7e317190b2c2d60eb2e8aa835270b008139562d70fe732e1c0020ec53c9", size = 316435, upload-time = "2025-11-09T20:47:02.087Z" },
- { url = "https://files.pythonhosted.org/packages/10/c1/40c9f7c22f5e6ff715f28113ebaba27ab85f9af2660ad6e1dd6425d14c19/jiter-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2218228a077e784c6c8f1a8e5d6b8cb1dea62ce25811c356364848554b2056cd", size = 320548, upload-time = "2025-11-09T20:47:03.409Z" },
- { url = "https://files.pythonhosted.org/packages/6b/1b/efbb68fe87e7711b00d2cfd1f26bb4bfc25a10539aefeaa7727329ffb9cb/jiter-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9354ccaa2982bf2188fd5f57f79f800ef622ec67beb8329903abf6b10da7d423", size = 351915, upload-time = "2025-11-09T20:47:05.171Z" },
- { url = "https://files.pythonhosted.org/packages/15/2d/c06e659888c128ad1e838123d0638f0efad90cc30860cb5f74dd3f2fc0b3/jiter-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f2607185ea89b4af9a604d4c7ec40e45d3ad03ee66998b031134bc510232bb7", size = 368966, upload-time = "2025-11-09T20:47:06.508Z" },
- { url = "https://files.pythonhosted.org/packages/6b/20/058db4ae5fb07cf6a4ab2e9b9294416f606d8e467fb74c2184b2a1eeacba/jiter-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a585a5e42d25f2e71db5f10b171f5e5ea641d3aa44f7df745aa965606111cc2", size = 482047, upload-time = "2025-11-09T20:47:08.382Z" },
- { url = "https://files.pythonhosted.org/packages/49/bb/dc2b1c122275e1de2eb12905015d61e8316b2f888bdaac34221c301495d6/jiter-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9e21d34edff5a663c631f850edcb786719c960ce887a5661e9c828a53a95d9", size = 380835, upload-time = "2025-11-09T20:47:09.81Z" },
- { url = "https://files.pythonhosted.org/packages/23/7d/38f9cd337575349de16da575ee57ddb2d5a64d425c9367f5ef9e4612e32e/jiter-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a612534770470686cd5431478dc5a1b660eceb410abade6b1b74e320ca98de6", size = 364587, upload-time = "2025-11-09T20:47:11.529Z" },
- { url = "https://files.pythonhosted.org/packages/f0/a3/b13e8e61e70f0bb06085099c4e2462647f53cc2ca97614f7fedcaa2bb9f3/jiter-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3985aea37d40a908f887b34d05111e0aae822943796ebf8338877fee2ab67725", size = 390492, upload-time = "2025-11-09T20:47:12.993Z" },
- { url = "https://files.pythonhosted.org/packages/07/71/e0d11422ed027e21422f7bc1883c61deba2d9752b720538430c1deadfbca/jiter-0.12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b1207af186495f48f72529f8d86671903c8c10127cac6381b11dddc4aaa52df6", size = 522046, upload-time = "2025-11-09T20:47:14.6Z" },
- { url = "https://files.pythonhosted.org/packages/9f/59/b968a9aa7102a8375dbbdfbd2aeebe563c7e5dddf0f47c9ef1588a97e224/jiter-0.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef2fb241de583934c9915a33120ecc06d94aa3381a134570f59eed784e87001e", size = 513392, upload-time = "2025-11-09T20:47:16.011Z" },
- { url = "https://files.pythonhosted.org/packages/ca/e4/7df62002499080dbd61b505c5cb351aa09e9959d176cac2aa8da6f93b13b/jiter-0.12.0-cp311-cp311-win32.whl", hash = "sha256:453b6035672fecce8007465896a25b28a6b59cfe8fbc974b2563a92f5a92a67c", size = 206096, upload-time = "2025-11-09T20:47:17.344Z" },
- { url = "https://files.pythonhosted.org/packages/bb/60/1032b30ae0572196b0de0e87dce3b6c26a1eff71aad5fe43dee3082d32e0/jiter-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:ca264b9603973c2ad9435c71a8ec8b49f8f715ab5ba421c85a51cde9887e421f", size = 204899, upload-time = "2025-11-09T20:47:19.365Z" },
- { url = "https://files.pythonhosted.org/packages/49/d5/c145e526fccdb834063fb45c071df78b0cc426bbaf6de38b0781f45d956f/jiter-0.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:cb00ef392e7d684f2754598c02c409f376ddcef857aae796d559e6cacc2d78a5", size = 188070, upload-time = "2025-11-09T20:47:20.75Z" },
{ url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" },
{ url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" },
{ url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" },
@@ -3272,10 +2949,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" },
{ url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" },
{ url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" },
- { url = "https://files.pythonhosted.org/packages/fe/54/5339ef1ecaa881c6948669956567a64d2670941925f245c434f494ffb0e5/jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:4739a4657179ebf08f85914ce50332495811004cc1747852e8b2041ed2aab9b8", size = 311144, upload-time = "2025-11-09T20:49:10.503Z" },
- { url = "https://files.pythonhosted.org/packages/27/74/3446c652bffbd5e81ab354e388b1b5fc1d20daac34ee0ed11ff096b1b01a/jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:41da8def934bf7bec16cb24bd33c0ca62126d2d45d81d17b864bd5ad721393c3", size = 305877, upload-time = "2025-11-09T20:49:12.269Z" },
- { url = "https://files.pythonhosted.org/packages/a1/f4/ed76ef9043450f57aac2d4fbeb27175aa0eb9c38f833be6ef6379b3b9a86/jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c44ee814f499c082e69872d426b624987dbc5943ab06e9bbaa4f81989fdb79e", size = 340419, upload-time = "2025-11-09T20:49:13.803Z" },
- { url = "https://files.pythonhosted.org/packages/21/01/857d4608f5edb0664aa791a3d45702e1a5bcfff9934da74035e7b9803846/jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd2097de91cf03eaa27b3cbdb969addf83f0179c6afc41bbc4513705e013c65d", size = 347212, upload-time = "2025-11-09T20:49:15.643Z" },
{ url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" },
{ url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" },
{ url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" },
@@ -3446,19 +3119,6 @@ version = "0.8.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/1d/01/0e748af5e4fee180cf7cd12bd12b0513ad23b045dccb2a83191bde82d168/librt-0.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:681dc2451d6d846794a828c16c22dc452d924e9f700a485b7ecb887a30aad1fd", size = 65315, upload-time = "2026-02-17T16:11:25.152Z" },
- { url = "https://files.pythonhosted.org/packages/9d/4d/7184806efda571887c798d573ca4134c80ac8642dcdd32f12c31b939c595/librt-0.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3b4350b13cc0e6f5bec8fa7caf29a8fb8cdc051a3bae45cfbfd7ce64f009965", size = 68021, upload-time = "2026-02-17T16:11:26.129Z" },
- { url = "https://files.pythonhosted.org/packages/ae/88/c3c52d2a5d5101f28d3dc89298444626e7874aa904eed498464c2af17627/librt-0.8.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ac1e7817fd0ed3d14fd7c5df91daed84c48e4c2a11ee99c0547f9f62fdae13da", size = 194500, upload-time = "2026-02-17T16:11:27.177Z" },
- { url = "https://files.pythonhosted.org/packages/d6/5d/6fb0a25b6a8906e85b2c3b87bee1d6ed31510be7605b06772f9374ca5cb3/librt-0.8.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:747328be0c5b7075cde86a0e09d7a9196029800ba75a1689332348e998fb85c0", size = 205622, upload-time = "2026-02-17T16:11:28.242Z" },
- { url = "https://files.pythonhosted.org/packages/b2/a6/8006ae81227105476a45691f5831499e4d936b1c049b0c1feb17c11b02d1/librt-0.8.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0af2bd2bc204fa27f3d6711d0f360e6b8c684a035206257a81673ab924aa11e", size = 218304, upload-time = "2026-02-17T16:11:29.344Z" },
- { url = "https://files.pythonhosted.org/packages/ee/19/60e07886ad16670aae57ef44dada41912c90906a6fe9f2b9abac21374748/librt-0.8.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d480de377f5b687b6b1bc0c0407426da556e2a757633cc7e4d2e1a057aa688f3", size = 211493, upload-time = "2026-02-17T16:11:30.445Z" },
- { url = "https://files.pythonhosted.org/packages/9c/cf/f666c89d0e861d05600438213feeb818c7514d3315bae3648b1fc145d2b6/librt-0.8.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d0ee06b5b5291f609ddb37b9750985b27bc567791bc87c76a569b3feed8481ac", size = 219129, upload-time = "2026-02-17T16:11:32.021Z" },
- { url = "https://files.pythonhosted.org/packages/8f/ef/f1bea01e40b4a879364c031476c82a0dc69ce068daad67ab96302fed2d45/librt-0.8.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e2c6f77b9ad48ce5603b83b7da9ee3e36b3ab425353f695cba13200c5d96596", size = 213113, upload-time = "2026-02-17T16:11:33.192Z" },
- { url = "https://files.pythonhosted.org/packages/9b/80/cdab544370cc6bc1b72ea369525f547a59e6938ef6863a11ab3cd24759af/librt-0.8.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:439352ba9373f11cb8e1933da194dcc6206daf779ff8df0ed69c5e39113e6a99", size = 212269, upload-time = "2026-02-17T16:11:34.373Z" },
- { url = "https://files.pythonhosted.org/packages/9d/9c/48d6ed8dac595654f15eceab2035131c136d1ae9a1e3548e777bb6dbb95d/librt-0.8.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:82210adabbc331dbb65d7868b105185464ef13f56f7f76688565ad79f648b0fe", size = 234673, upload-time = "2026-02-17T16:11:36.063Z" },
- { url = "https://files.pythonhosted.org/packages/16/01/35b68b1db517f27a01be4467593292eb5315def8900afad29fabf56304ba/librt-0.8.1-cp311-cp311-win32.whl", hash = "sha256:52c224e14614b750c0a6d97368e16804a98c684657c7518752c356834fff83bb", size = 54597, upload-time = "2026-02-17T16:11:37.544Z" },
- { url = "https://files.pythonhosted.org/packages/71/02/796fe8f02822235966693f257bf2c79f40e11337337a657a8cfebba5febc/librt-0.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:c00e5c884f528c9932d278d5c9cbbea38a6b81eb62c02e06ae53751a83a4d52b", size = 61733, upload-time = "2026-02-17T16:11:38.691Z" },
- { url = "https://files.pythonhosted.org/packages/28/ad/232e13d61f879a42a4e7117d65e4984bb28371a34bb6fb9ca54ec2c8f54e/librt-0.8.1-cp311-cp311-win_arm64.whl", hash = "sha256:f7cdf7f26c2286ffb02e46d7bac56c94655540b26347673bea15fa52a6af17e9", size = 52273, upload-time = "2026-02-17T16:11:40.308Z" },
{ url = "https://files.pythonhosted.org/packages/95/21/d39b0a87ac52fc98f621fb6f8060efb017a767ebbbac2f99fbcbc9ddc0d7/librt-0.8.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a28f2612ab566b17f3698b0da021ff9960610301607c9a5e8eaca62f5e1c350a", size = 66516, upload-time = "2026-02-17T16:11:41.604Z" },
{ url = "https://files.pythonhosted.org/packages/69/f1/46375e71441c43e8ae335905e069f1c54febee63a146278bcee8782c84fd/librt-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:60a78b694c9aee2a0f1aaeaa7d101cf713e92e8423a941d2897f4fa37908dab9", size = 68634, upload-time = "2026-02-17T16:11:43.268Z" },
{ url = "https://files.pythonhosted.org/packages/0a/33/c510de7f93bf1fa19e13423a606d8189a02624a800710f6e6a0a0f0784b3/librt-0.8.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:758509ea3f1eba2a57558e7e98f4659d0ea7670bff49673b0dde18a3c7e6c0eb", size = 198941, upload-time = "2026-02-17T16:11:44.28Z" },
@@ -3503,11 +3163,6 @@ version = "0.45.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/99/8d/5baf1cef7f9c084fb35a8afbde88074f0d6a727bc63ef764fe0e7543ba40/llvmlite-0.45.1.tar.gz", hash = "sha256:09430bb9d0bb58fc45a45a57c7eae912850bedc095cd0810a57de109c69e1c32", size = 185600, upload-time = "2025-10-01T17:59:52.046Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/04/ad/9bdc87b2eb34642c1cfe6bcb4f5db64c21f91f26b010f263e7467e7536a3/llvmlite-0.45.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:60f92868d5d3af30b4239b50e1717cb4e4e54f6ac1c361a27903b318d0f07f42", size = 43043526, upload-time = "2025-10-01T18:03:15.051Z" },
- { url = "https://files.pythonhosted.org/packages/a5/ea/c25c6382f452a943b4082da5e8c1665ce29a62884e2ec80608533e8e82d5/llvmlite-0.45.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98baab513e19beb210f1ef39066288784839a44cd504e24fff5d17f1b3cf0860", size = 37253118, upload-time = "2025-10-01T18:04:06.783Z" },
- { url = "https://files.pythonhosted.org/packages/fe/af/85fc237de98b181dbbe8647324331238d6c52a3554327ccdc83ced28efba/llvmlite-0.45.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3adc2355694d6a6fbcc024d59bb756677e7de506037c878022d7b877e7613a36", size = 56288209, upload-time = "2025-10-01T18:01:00.168Z" },
- { url = "https://files.pythonhosted.org/packages/0a/df/3daf95302ff49beff4230065e3178cd40e71294968e8d55baf4a9e560814/llvmlite-0.45.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f3377a6db40f563058c9515dedcc8a3e562d8693a106a28f2ddccf2c8fcf6ca", size = 55140958, upload-time = "2025-10-01T18:02:11.199Z" },
- { url = "https://files.pythonhosted.org/packages/a4/56/4c0d503fe03bac820ecdeb14590cf9a248e120f483bcd5c009f2534f23f0/llvmlite-0.45.1-cp311-cp311-win_amd64.whl", hash = "sha256:f9c272682d91e0d57f2a76c6d9ebdfccc603a01828cdbe3d15273bdca0c3363a", size = 38132232, upload-time = "2025-10-01T18:04:52.181Z" },
{ url = "https://files.pythonhosted.org/packages/e2/7c/82cbd5c656e8991bcc110c69d05913be2229302a92acb96109e166ae31fb/llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:28e763aba92fe9c72296911e040231d486447c01d4f90027c8e893d89d49b20e", size = 43043524, upload-time = "2025-10-01T18:03:30.666Z" },
{ url = "https://files.pythonhosted.org/packages/9d/bc/5314005bb2c7ee9f33102c6456c18cc81745d7055155d1218f1624463774/llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a53f4b74ee9fd30cb3d27d904dadece67a7575198bd80e687ee76474620735f", size = 37253123, upload-time = "2025-10-01T18:04:18.177Z" },
{ url = "https://files.pythonhosted.org/packages/96/76/0f7154952f037cb320b83e1c952ec4a19d5d689cf7d27cb8a26887d7bbc1/llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b3796b1b1e1c14dcae34285d2f4ea488402fbd2c400ccf7137603ca3800864f", size = 56288211, upload-time = "2025-10-01T18:01:24.079Z" },
@@ -3521,22 +3176,6 @@ version = "6.0.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" },
- { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" },
- { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" },
- { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" },
- { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" },
- { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" },
- { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" },
- { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" },
- { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" },
- { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" },
- { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" },
- { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" },
- { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" },
- { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" },
- { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" },
- { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" },
{ url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" },
{ url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" },
{ url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" },
@@ -3555,12 +3194,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" },
{ url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" },
{ url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" },
- { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" },
- { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" },
- { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" },
- { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" },
- { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" },
- { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" },
]
[[package]]
@@ -3578,14 +3211,6 @@ version = "4.4.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/57/51/f1b86d93029f418033dddf9b9f79c8d2641e7454080478ee2aab5123173e/lz4-4.4.5.tar.gz", hash = "sha256:5f0b9e53c1e82e88c10d7c180069363980136b9d7a8306c4dca4f760d60c39f0", size = 172886, upload-time = "2025-11-03T13:02:36.061Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/93/5b/6edcd23319d9e28b1bedf32768c3d1fd56eed8223960a2c47dacd2cec2af/lz4-4.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d6da84a26b3aa5da13a62e4b89ab36a396e9327de8cd48b436a3467077f8ccd4", size = 207391, upload-time = "2025-11-03T13:01:36.644Z" },
- { url = "https://files.pythonhosted.org/packages/34/36/5f9b772e85b3d5769367a79973b8030afad0d6b724444083bad09becd66f/lz4-4.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61d0ee03e6c616f4a8b69987d03d514e8896c8b1b7cc7598ad029e5c6aedfd43", size = 207146, upload-time = "2025-11-03T13:01:37.928Z" },
- { url = "https://files.pythonhosted.org/packages/04/f4/f66da5647c0d72592081a37c8775feacc3d14d2625bbdaabd6307c274565/lz4-4.4.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:33dd86cea8375d8e5dd001e41f321d0a4b1eb7985f39be1b6a4f466cd480b8a7", size = 1292623, upload-time = "2025-11-03T13:01:39.341Z" },
- { url = "https://files.pythonhosted.org/packages/85/fc/5df0f17467cdda0cad464a9197a447027879197761b55faad7ca29c29a04/lz4-4.4.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:609a69c68e7cfcfa9d894dc06be13f2e00761485b62df4e2472f1b66f7b405fb", size = 1279982, upload-time = "2025-11-03T13:01:40.816Z" },
- { url = "https://files.pythonhosted.org/packages/25/3b/b55cb577aa148ed4e383e9700c36f70b651cd434e1c07568f0a86c9d5fbb/lz4-4.4.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75419bb1a559af00250b8f1360d508444e80ed4b26d9d40ec5b09fe7875cb989", size = 1368674, upload-time = "2025-11-03T13:01:42.118Z" },
- { url = "https://files.pythonhosted.org/packages/fb/31/e97e8c74c59ea479598e5c55cbe0b1334f03ee74ca97726e872944ed42df/lz4-4.4.5-cp311-cp311-win32.whl", hash = "sha256:12233624f1bc2cebc414f9efb3113a03e89acce3ab6f72035577bc61b270d24d", size = 88168, upload-time = "2025-11-03T13:01:43.282Z" },
- { url = "https://files.pythonhosted.org/packages/18/47/715865a6c7071f417bef9b57c8644f29cb7a55b77742bd5d93a609274e7e/lz4-4.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:8a842ead8ca7c0ee2f396ca5d878c4c40439a527ebad2b996b0444f0074ed004", size = 99491, upload-time = "2025-11-03T13:01:44.167Z" },
- { url = "https://files.pythonhosted.org/packages/14/e7/ac120c2ca8caec5c945e6356ada2aa5cfabd83a01e3170f264a5c42c8231/lz4-4.4.5-cp311-cp311-win_arm64.whl", hash = "sha256:83bc23ef65b6ae44f3287c38cbf82c269e2e96a26e560aa551735883388dcc4b", size = 91271, upload-time = "2025-11-03T13:01:45.016Z" },
{ url = "https://files.pythonhosted.org/packages/1b/ac/016e4f6de37d806f7cc8f13add0a46c9a7cfc41a5ddc2bc831d7954cf1ce/lz4-4.4.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:df5aa4cead2044bab83e0ebae56e0944cc7fcc1505c7787e9e1057d6d549897e", size = 207163, upload-time = "2025-11-03T13:01:45.895Z" },
{ url = "https://files.pythonhosted.org/packages/8d/df/0fadac6e5bd31b6f34a1a8dbd4db6a7606e70715387c27368586455b7fc9/lz4-4.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d0bf51e7745484d2092b3a51ae6eb58c3bd3ce0300cf2b2c14f76c536d5697a", size = 207150, upload-time = "2025-11-03T13:01:47.205Z" },
{ url = "https://files.pythonhosted.org/packages/b7/17/34e36cc49bb16ca73fb57fbd4c5eaa61760c6b64bce91fcb4e0f4a97f852/lz4-4.4.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7b62f94b523c251cf32aa4ab555f14d39bd1a9df385b72443fd76d7c7fb051f5", size = 1292045, upload-time = "2025-11-03T13:01:48.667Z" },
@@ -3635,17 +3260,6 @@ version = "3.0.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" },
- { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" },
- { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" },
- { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" },
- { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" },
- { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" },
- { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" },
- { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" },
- { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" },
- { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" },
- { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" },
{ url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" },
{ url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" },
{ url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" },
@@ -3704,22 +3318,6 @@ version = "5.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a7/af/f28c2c2f51f31abb4725f9a64bc7863d5f491f6539bd26aee2a1d21a649e/mmh3-5.2.0.tar.gz", hash = "sha256:1efc8fec8478e9243a78bb993422cf79f8ff85cb4cf6b79647480a31e0d950a8", size = 33582, upload-time = "2025-07-29T07:43:48.49Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f7/87/399567b3796e134352e11a8b973cd470c06b2ecfad5468fe580833be442b/mmh3-5.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7901c893e704ee3c65f92d39b951f8f34ccf8e8566768c58103fb10e55afb8c1", size = 56107, upload-time = "2025-07-29T07:41:57.07Z" },
- { url = "https://files.pythonhosted.org/packages/c3/09/830af30adf8678955b247d97d3d9543dd2fd95684f3cd41c0cd9d291da9f/mmh3-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5f5536b1cbfa72318ab3bfc8a8188b949260baed186b75f0abc75b95d8c051", size = 40635, upload-time = "2025-07-29T07:41:57.903Z" },
- { url = "https://files.pythonhosted.org/packages/07/14/eaba79eef55b40d653321765ac5e8f6c9ac38780b8a7c2a2f8df8ee0fb72/mmh3-5.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cedac4f4054b8f7859e5aed41aaa31ad03fce6851901a7fdc2af0275ac533c10", size = 40078, upload-time = "2025-07-29T07:41:58.772Z" },
- { url = "https://files.pythonhosted.org/packages/bb/26/83a0f852e763f81b2265d446b13ed6d49ee49e1fc0c47b9655977e6f3d81/mmh3-5.2.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eb756caf8975882630ce4e9fbbeb9d3401242a72528230422c9ab3a0d278e60c", size = 97262, upload-time = "2025-07-29T07:41:59.678Z" },
- { url = "https://files.pythonhosted.org/packages/00/7d/b7133b10d12239aeaebf6878d7eaf0bf7d3738c44b4aba3c564588f6d802/mmh3-5.2.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:097e13c8b8a66c5753c6968b7640faefe85d8e38992703c1f666eda6ef4c3762", size = 103118, upload-time = "2025-07-29T07:42:01.197Z" },
- { url = "https://files.pythonhosted.org/packages/7b/3e/62f0b5dce2e22fd5b7d092aba285abd7959ea2b17148641e029f2eab1ffa/mmh3-5.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7c0c7845566b9686480e6a7e9044db4afb60038d5fabd19227443f0104eeee4", size = 106072, upload-time = "2025-07-29T07:42:02.601Z" },
- { url = "https://files.pythonhosted.org/packages/66/84/ea88bb816edfe65052c757a1c3408d65c4201ddbd769d4a287b0f1a628b2/mmh3-5.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:61ac226af521a572700f863d6ecddc6ece97220ce7174e311948ff8c8919a363", size = 112925, upload-time = "2025-07-29T07:42:03.632Z" },
- { url = "https://files.pythonhosted.org/packages/2e/13/c9b1c022807db575fe4db806f442d5b5784547e2e82cff36133e58ea31c7/mmh3-5.2.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:582f9dbeefe15c32a5fa528b79b088b599a1dfe290a4436351c6090f90ddebb8", size = 120583, upload-time = "2025-07-29T07:42:04.991Z" },
- { url = "https://files.pythonhosted.org/packages/8a/5f/0e2dfe1a38f6a78788b7eb2b23432cee24623aeabbc907fed07fc17d6935/mmh3-5.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2ebfc46b39168ab1cd44670a32ea5489bcbc74a25795c61b6d888c5c2cf654ed", size = 99127, upload-time = "2025-07-29T07:42:05.929Z" },
- { url = "https://files.pythonhosted.org/packages/77/27/aefb7d663b67e6a0c4d61a513c83e39ba2237e8e4557fa7122a742a23de5/mmh3-5.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1556e31e4bd0ac0c17eaf220be17a09c171d7396919c3794274cb3415a9d3646", size = 98544, upload-time = "2025-07-29T07:42:06.87Z" },
- { url = "https://files.pythonhosted.org/packages/ab/97/a21cc9b1a7c6e92205a1b5fa030cdf62277d177570c06a239eca7bd6dd32/mmh3-5.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81df0dae22cd0da87f1c978602750f33d17fb3d21fb0f326c89dc89834fea79b", size = 106262, upload-time = "2025-07-29T07:42:07.804Z" },
- { url = "https://files.pythonhosted.org/packages/43/18/db19ae82ea63c8922a880e1498a75342311f8aa0c581c4dd07711473b5f7/mmh3-5.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:eba01ec3bd4a49b9ac5ca2bc6a73ff5f3af53374b8556fcc2966dd2af9eb7779", size = 109824, upload-time = "2025-07-29T07:42:08.735Z" },
- { url = "https://files.pythonhosted.org/packages/9f/f5/41dcf0d1969125fc6f61d8618b107c79130b5af50b18a4651210ea52ab40/mmh3-5.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9a011469b47b752e7d20de296bb34591cdfcbe76c99c2e863ceaa2aa61113d2", size = 97255, upload-time = "2025-07-29T07:42:09.706Z" },
- { url = "https://files.pythonhosted.org/packages/32/b3/cce9eaa0efac1f0e735bb178ef9d1d2887b4927fe0ec16609d5acd492dda/mmh3-5.2.0-cp311-cp311-win32.whl", hash = "sha256:bc44fc2b886243d7c0d8daeb37864e16f232e5b56aaec27cc781d848264cfd28", size = 40779, upload-time = "2025-07-29T07:42:10.546Z" },
- { url = "https://files.pythonhosted.org/packages/7c/e9/3fa0290122e6d5a7041b50ae500b8a9f4932478a51e48f209a3879fe0b9b/mmh3-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ebf241072cf2777a492d0e09252f8cc2b3edd07dfdb9404b9757bffeb4f2cee", size = 41549, upload-time = "2025-07-29T07:42:11.399Z" },
- { url = "https://files.pythonhosted.org/packages/3a/54/c277475b4102588e6f06b2e9095ee758dfe31a149312cdbf62d39a9f5c30/mmh3-5.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:b5f317a727bba0e633a12e71228bc6a4acb4f471a98b1c003163b917311ea9a9", size = 39336, upload-time = "2025-07-29T07:42:12.209Z" },
{ url = "https://files.pythonhosted.org/packages/bf/6a/d5aa7edb5c08e0bd24286c7d08341a0446f9a2fbbb97d96a8a6dd81935ee/mmh3-5.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:384eda9361a7bf83a85e09447e1feafe081034af9dd428893701b959230d84be", size = 56141, upload-time = "2025-07-29T07:42:13.456Z" },
{ url = "https://files.pythonhosted.org/packages/08/49/131d0fae6447bc4a7299ebdb1a6fb9d08c9f8dcf97d75ea93e8152ddf7ab/mmh3-5.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c9da0d568569cc87315cb063486d761e38458b8ad513fedd3dc9263e1b81bcd", size = 40681, upload-time = "2025-07-29T07:42:14.306Z" },
{ url = "https://files.pythonhosted.org/packages/8f/6f/9221445a6bcc962b7f5ff3ba18ad55bba624bacdc7aa3fc0a518db7da8ec/mmh3-5.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86d1be5d63232e6eb93c50881aea55ff06eb86d8e08f9b5417c8c9b10db9db96", size = 40062, upload-time = "2025-07-29T07:42:15.08Z" },
@@ -3793,24 +3391,6 @@ version = "6.7.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" },
- { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" },
- { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" },
- { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" },
- { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" },
- { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" },
- { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" },
- { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" },
- { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" },
- { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" },
- { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" },
- { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" },
- { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" },
- { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" },
- { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" },
- { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" },
- { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" },
- { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" },
{ url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" },
{ url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" },
{ url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" },
@@ -3838,14 +3418,6 @@ version = "1.0.15"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/23/2e/88c147931ea9725d634840d538622e94122bceaf346233349b7b5c62964b/murmurhash-1.0.15.tar.gz", hash = "sha256:58e2b27b7847f9e2a6edf10b47a8c8dd70a4705f45dccb7bf76aeadacf56ba01", size = 13291, upload-time = "2025-11-14T09:51:15.272Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/6b/ca/77d3e69924a8eb4508bb4f0ad34e46adbeedeb93616a71080e61e53dad71/murmurhash-1.0.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f32307fb9347680bb4fe1cbef6362fb39bd994f1b59abd8c09ca174e44199081", size = 27397, upload-time = "2025-11-14T09:50:03.077Z" },
- { url = "https://files.pythonhosted.org/packages/e6/53/a936f577d35b245d47b310f29e5e9f09fcac776c8c992f1ab51a9fb0cee2/murmurhash-1.0.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:539d8405885d1d19c005f3a2313b47e8e54b0ee89915eb8dfbb430b194328e6c", size = 27692, upload-time = "2025-11-14T09:50:04.144Z" },
- { url = "https://files.pythonhosted.org/packages/4d/64/5f8cfd1fd9cbeb43fcff96672f5bd9e7e1598d1c970f808ecd915490dc20/murmurhash-1.0.15-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c4cd739a00f5a4602201b74568ddabae46ec304719d9be752fd8f534a9464b5e", size = 128396, upload-time = "2025-11-14T09:50:05.268Z" },
- { url = "https://files.pythonhosted.org/packages/ac/10/d9ce29d559a75db0d8a3f13ea12c7f541ec9de2afca38dc70418b890eedb/murmurhash-1.0.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44d211bcc3ec203c47dac06f48ee871093fcbdffa6652a6cc5ea7180306680a8", size = 128687, upload-time = "2025-11-14T09:50:06.527Z" },
- { url = "https://files.pythonhosted.org/packages/48/cd/dc97ab7e68cdfa1537a56e36dbc846c5a66701cc39ecee2d4399fe61996c/murmurhash-1.0.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f9bf47101354fb1dc4b2e313192566f04ba295c28a37e2f71c692759acc1ba3c", size = 128198, upload-time = "2025-11-14T09:50:08.062Z" },
- { url = "https://files.pythonhosted.org/packages/53/73/32f2aaa22c1e4afae337106baf0c938abf36a6cc879cfee83a00461bbbf7/murmurhash-1.0.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c69b4d3bcd6233782a78907fe10b9b7a796bdc5d28060cf097d067bec280a5d", size = 127214, upload-time = "2025-11-14T09:50:09.265Z" },
- { url = "https://files.pythonhosted.org/packages/82/ed/812103a7f353eba2d83655b08205e13a38c93b4db0692f94756e1eb44516/murmurhash-1.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:e43a69496342ce530bdd670264cb7c8f45490b296e4764c837ce577e3c7ebd53", size = 25241, upload-time = "2025-11-14T09:50:10.373Z" },
- { url = "https://files.pythonhosted.org/packages/eb/5f/2c511bdd28f7c24da37a00116ffd0432b65669d098f0d0260c66ac0ffdc2/murmurhash-1.0.15-cp311-cp311-win_arm64.whl", hash = "sha256:f3e99a6ee36ef5372df5f138e3d9c801420776d3641a34a49e5c2555f44edba7", size = 23216, upload-time = "2025-11-14T09:50:11.651Z" },
{ url = "https://files.pythonhosted.org/packages/b6/46/be8522d3456fdccf1b8b049c6d82e7a3c1114c4fc2cfe14b04cba4b3e701/murmurhash-1.0.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d37e3ae44746bca80b1a917c2ea625cf216913564ed43f69d2888e5df97db0cb", size = 27884, upload-time = "2025-11-14T09:50:13.133Z" },
{ url = "https://files.pythonhosted.org/packages/ed/cc/630449bf4f6178d7daf948ce46ad00b25d279065fc30abd8d706be3d87e0/murmurhash-1.0.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0861cb11039409eaf46878456b7d985ef17b6b484103a6fc367b2ecec846891d", size = 27855, upload-time = "2025-11-14T09:50:14.859Z" },
{ url = "https://files.pythonhosted.org/packages/ff/30/ea8f601a9bf44db99468696efd59eb9cff1157cd55cb586d67116697583f/murmurhash-1.0.15-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5a301decfaccfec70fe55cb01dde2a012c3014a874542eaa7cc73477bb749616", size = 134088, upload-time = "2025-11-14T09:50:15.958Z" },
@@ -3868,12 +3440,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" },
- { url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" },
- { url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" },
- { url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" },
- { url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" },
- { url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" },
{ url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" },
{ url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" },
{ url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" },
@@ -3887,9 +3453,6 @@ wheels = [
name = "mypy-boto3-bedrock-runtime"
version = "1.42.42"
source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "typing-extensions", marker = "python_full_version < '3.12'" },
-]
sdist = { url = "https://files.pythonhosted.org/packages/46/bb/65dc1b2c5796a6ab5f60bdb57343bd6c3ecb82251c580eca415c8548333e/mypy_boto3_bedrock_runtime-1.42.42.tar.gz", hash = "sha256:3a4088218478b6fbbc26055c03c95bee4fc04624a801090b3cce3037e8275c8d", size = 29840, upload-time = "2026-02-04T20:53:05.999Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/00/43/7ea062f2228f47b5779dcfa14dab48d6e29f979b35d1a5102b0ba80b9c1b/mypy_boto3_bedrock_runtime-1.42.42-py3-none-any.whl", hash = "sha256:b2d16eae22607d0685f90796b3a0afc78c0b09d45872e00eafd634a31dd9358f", size = 36077, upload-time = "2026-02-04T20:53:01.768Z" },
@@ -3910,11 +3473,6 @@ version = "9.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6f/6e/c89babc7de3df01467d159854414659c885152579903a8220c8db02a3835/mysql_connector_python-9.6.0.tar.gz", hash = "sha256:c453bb55347174d87504b534246fb10c589daf5d057515bf615627198a3c7ef1", size = 12254999, upload-time = "2026-02-10T12:04:52.63Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/2a/08/0e9bce000736454c2b8bb4c40bded79328887483689487dad7df4cf59fb7/mysql_connector_python-9.6.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:011931f7392a1087e10d305b0303f2a20cc1af2c1c8a15cd5691609aa95dfcbd", size = 17582646, upload-time = "2026-01-21T09:04:48.327Z" },
- { url = "https://files.pythonhosted.org/packages/93/aa/3dd4db039fc6a9bcbdbade83be9914ead6786c0be4918170dfaf89327b76/mysql_connector_python-9.6.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b5212372aff6833473d2560ac87d3df9fb2498d0faacb7ebf231d947175fa36a", size = 18449358, upload-time = "2026-01-21T09:04:50.278Z" },
- { url = "https://files.pythonhosted.org/packages/53/38/ecd6d35382b6265ff5f030464d53b45e51ff2c2523ab88771c277fd84c05/mysql_connector_python-9.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61deca6e243fafbb3cf08ae27bd0c83d0f8188de8456e46aeba0d3db15bb7230", size = 34169309, upload-time = "2026-01-21T09:04:52.402Z" },
- { url = "https://files.pythonhosted.org/packages/18/1d/fe1133eb76089342854d8fbe88e28598f7e06bc684a763d21fc7b23f1d5e/mysql_connector_python-9.6.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:adabbc5e1475cdf5fb6f1902a25edc3bd1e0726fa45f01ab1b8f479ff43b3337", size = 34541101, upload-time = "2026-01-21T09:04:55.897Z" },
- { url = "https://files.pythonhosted.org/packages/3f/99/da0f55beb970ca049fd7d37a6391d686222af89a8b13e636d8e9bbd06536/mysql_connector_python-9.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:8732ca0b7417b45238bcbfc7e64d9c4d62c759672207c6284f0921c366efddc7", size = 16514767, upload-time = "2026-02-10T12:03:50.584Z" },
{ url = "https://files.pythonhosted.org/packages/8f/d9/2a4b4d90b52f4241f0f71618cd4bd8779dd6d18db8058b0a4dd83ec0541c/mysql_connector_python-9.6.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9664e217c72dd6fb700f4c8512af90261f72d2f5d7c00c4e13e4c1e09bfa3d5e", size = 17585672, upload-time = "2026-02-10T12:03:52.955Z" },
{ url = "https://files.pythonhosted.org/packages/33/91/2495835733a054e716a17dc28404748b33f2dc1da1ae4396fb45574adf40/mysql_connector_python-9.6.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:1ed4b5c4761e5333035293e746683890e4ef2e818e515d14023fd80293bc31fa", size = 18452624, upload-time = "2026-02-10T12:03:56.153Z" },
{ url = "https://files.pythonhosted.org/packages/7a/69/e83abbbbf7f8eed855b5a5ff7285bc0afb1199418ac036c7691edf41e154/mysql_connector_python-9.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5095758dcb89a6bce2379f349da336c268c407129002b595c5dba82ce387e2a5", size = 34169154, upload-time = "2026-02-10T12:03:58.831Z" },
@@ -3973,11 +3531,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/a3/20/33dbdbfe60e5fd8e3dbfde299d106279a33d9f8308346022316781368591/numba-0.62.1.tar.gz", hash = "sha256:7b774242aa890e34c21200a1fc62e5b5757d5286267e71103257f4e2af0d5161", size = 2749817, upload-time = "2025-09-29T10:46:31.551Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/dd/5f/8b3491dd849474f55e33c16ef55678ace1455c490555337899c35826836c/numba-0.62.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f43e24b057714e480fe44bc6031de499e7cf8150c63eb461192caa6cc8530bc8", size = 2684279, upload-time = "2025-09-29T10:43:37.213Z" },
- { url = "https://files.pythonhosted.org/packages/bf/18/71969149bfeb65a629e652b752b80167fe8a6a6f6e084f1f2060801f7f31/numba-0.62.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:57cbddc53b9ee02830b828a8428757f5c218831ccc96490a314ef569d8342b7b", size = 2687330, upload-time = "2025-09-29T10:43:59.601Z" },
- { url = "https://files.pythonhosted.org/packages/0e/7d/403be3fecae33088027bc8a95dc80a2fda1e3beff3e0e5fc4374ada3afbe/numba-0.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:604059730c637c7885386521bb1b0ddcbc91fd56131a6dcc54163d6f1804c872", size = 3739727, upload-time = "2025-09-29T10:42:45.922Z" },
- { url = "https://files.pythonhosted.org/packages/e0/c3/3d910d08b659a6d4c62ab3cd8cd93c4d8b7709f55afa0d79a87413027ff6/numba-0.62.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6c540880170bee817011757dc9049dba5a29db0c09b4d2349295991fe3ee55f", size = 3445490, upload-time = "2025-09-29T10:43:12.692Z" },
- { url = "https://files.pythonhosted.org/packages/5b/82/9d425c2f20d9f0a37f7cb955945a553a00fa06a2b025856c3550227c5543/numba-0.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:03de6d691d6b6e2b76660ba0f38f37b81ece8b2cc524a62f2a0cfae2bfb6f9da", size = 2745550, upload-time = "2025-09-29T10:44:20.571Z" },
{ url = "https://files.pythonhosted.org/packages/5e/fa/30fa6873e9f821c0ae755915a3ca444e6ff8d6a7b6860b669a3d33377ac7/numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:1b743b32f8fa5fff22e19c2e906db2f0a340782caf024477b97801b918cf0494", size = 2685346, upload-time = "2025-09-29T10:43:43.677Z" },
{ url = "https://files.pythonhosted.org/packages/a9/d5/504ce8dc46e0dba2790c77e6b878ee65b60fe3e7d6d0006483ef6fde5a97/numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90fa21b0142bcf08ad8e32a97d25d0b84b1e921bc9423f8dda07d3652860eef6", size = 2688139, upload-time = "2025-09-29T10:44:04.894Z" },
{ url = "https://files.pythonhosted.org/packages/50/5f/6a802741176c93f2ebe97ad90751894c7b0c922b52ba99a4395e79492205/numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ef84d0ac19f1bf80431347b6f4ce3c39b7ec13f48f233a48c01e2ec06ecbc59", size = 3796453, upload-time = "2025-09-29T10:42:52.771Z" },
@@ -3994,14 +3547,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/cb/2f/fdba158c9dbe5caca9c3eca3eaffffb251f2fb8674bf8e2d0aed5f38d319/numexpr-2.14.1.tar.gz", hash = "sha256:4be00b1086c7b7a5c32e31558122b7b80243fe098579b170967da83f3152b48b", size = 119400, upload-time = "2025-10-13T16:17:27.351Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/b2/a3/67999bdd1ed1f938d38f3fedd4969632f2f197b090e50505f7cc1fa82510/numexpr-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d03fcb4644a12f70a14d74006f72662824da5b6128bf1bcd10cc3ed80e64c34", size = 163195, upload-time = "2025-10-13T16:16:31.212Z" },
- { url = "https://files.pythonhosted.org/packages/25/95/d64f680ea1fc56d165457287e0851d6708800f9fcea346fc1b9957942ee6/numexpr-2.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2773ee1133f77009a1fc2f34fe236f3d9823779f5f75450e183137d49f00499f", size = 152088, upload-time = "2025-10-13T16:16:33.186Z" },
- { url = "https://files.pythonhosted.org/packages/0e/7f/3bae417cb13ae08afd86d08bb0301c32440fe0cae4e6262b530e0819aeda/numexpr-2.14.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebe4980f9494b9f94d10d2e526edc29e72516698d3bf95670ba79415492212a4", size = 451126, upload-time = "2025-10-13T16:13:22.248Z" },
- { url = "https://files.pythonhosted.org/packages/4c/1a/edbe839109518364ac0bd9e918cf874c755bb2c128040e920f198c494263/numexpr-2.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a381e5e919a745c9503bcefffc1c7f98c972c04ec58fc8e999ed1a929e01ba6", size = 442012, upload-time = "2025-10-13T16:14:51.416Z" },
- { url = "https://files.pythonhosted.org/packages/66/b1/be4ce99bff769a5003baddac103f34681997b31d4640d5a75c0e8ed59c78/numexpr-2.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d08856cfc1b440eb1caaa60515235369654321995dd68eb9377577392020f6cb", size = 1415975, upload-time = "2025-10-13T16:13:26.088Z" },
- { url = "https://files.pythonhosted.org/packages/e7/33/b33b8fdc032a05d9ebb44a51bfcd4b92c178a2572cd3e6c1b03d8a4b45b2/numexpr-2.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03130afa04edf83a7b590d207444f05a00363c9b9ea5d81c0f53b1ea13fad55a", size = 1464683, upload-time = "2025-10-13T16:14:58.87Z" },
- { url = "https://files.pythonhosted.org/packages/d0/b2/ddcf0ac6cf0a1d605e5aecd4281507fd79a9628a67896795ab2e975de5df/numexpr-2.14.1-cp311-cp311-win32.whl", hash = "sha256:db78fa0c9fcbaded3ae7453faf060bd7a18b0dc10299d7fcd02d9362be1213ed", size = 166838, upload-time = "2025-10-13T16:17:06.765Z" },
- { url = "https://files.pythonhosted.org/packages/64/72/4ca9bd97b2eb6dce9f5e70a3b6acec1a93e1fb9b079cb4cba2cdfbbf295d/numexpr-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e9b2f957798c67a2428be96b04bce85439bed05efe78eb78e4c2ca43737578e7", size = 160069, upload-time = "2025-10-13T16:17:08.752Z" },
{ url = "https://files.pythonhosted.org/packages/9d/20/c473fc04a371f5e2f8c5749e04505c13e7a8ede27c09e9f099b2ad6f43d6/numexpr-2.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ebae0ab18c799b0e6b8c5a8d11e1fa3848eb4011271d99848b297468a39430", size = 162790, upload-time = "2025-10-13T16:16:34.903Z" },
{ url = "https://files.pythonhosted.org/packages/45/93/b6760dd1904c2a498e5f43d1bb436f59383c3ddea3815f1461dfaa259373/numexpr-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47041f2f7b9e69498fb311af672ba914a60e6e6d804011caacb17d66f639e659", size = 152196, upload-time = "2025-10-13T16:16:36.593Z" },
{ url = "https://files.pythonhosted.org/packages/72/94/cc921e35593b820521e464cbbeaf8212bbdb07f16dc79fe283168df38195/numexpr-2.14.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d686dfb2c1382d9e6e0ee0b7647f943c1886dba3adbf606c625479f35f1956c1", size = 452468, upload-time = "2025-10-13T16:13:29.531Z" },
@@ -4018,14 +3563,6 @@ version = "1.26.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" },
- { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" },
- { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" },
- { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" },
- { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" },
- { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" },
- { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" },
- { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" },
{ url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901, upload-time = "2024-02-05T23:55:32.801Z" },
{ url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868, upload-time = "2024-02-05T23:55:56.28Z" },
{ url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109, upload-time = "2024-02-05T23:56:20.368Z" },
@@ -4088,11 +3625,6 @@ dependencies = [
{ name = "sympy" },
]
wheels = [
- { url = "https://files.pythonhosted.org/packages/44/be/467b00f09061572f022ffd17e49e49e5a7a789056bad95b54dfd3bee73ff/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:6f91d2c9b0965e86827a5ba01531d5b669770b01775b23199565d6c1f136616c", size = 17196113, upload-time = "2025-10-22T03:47:33.526Z" },
- { url = "https://files.pythonhosted.org/packages/9f/a8/3c23a8f75f93122d2b3410bfb74d06d0f8da4ac663185f91866b03f7da1b/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:87d8b6eaf0fbeb6835a60a4265fde7a3b60157cf1b2764773ac47237b4d48612", size = 19153857, upload-time = "2025-10-22T03:46:37.578Z" },
- { url = "https://files.pythonhosted.org/packages/3f/d8/506eed9af03d86f8db4880a4c47cd0dffee973ef7e4f4cff9f1d4bcf7d22/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbfd2fca76c855317568c1b36a885ddea2272c13cb0e395002c402f2360429a6", size = 15220095, upload-time = "2025-10-22T03:46:24.769Z" },
- { url = "https://files.pythonhosted.org/packages/e9/80/113381ba832d5e777accedc6cb41d10f9eca82321ae31ebb6bcede530cea/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da44b99206e77734c5819aa2142c69e64f3b46edc3bd314f6a45a932defc0b3e", size = 17372080, upload-time = "2025-10-22T03:47:00.265Z" },
- { url = "https://files.pythonhosted.org/packages/3a/db/1b4a62e23183a0c3fe441782462c0ede9a2a65c6bbffb9582fab7c7a0d38/onnxruntime-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:902c756d8b633ce0dedd889b7c08459433fbcf35e9c38d1c03ddc020f0648c6e", size = 13468349, upload-time = "2025-10-22T03:47:25.783Z" },
{ url = "https://files.pythonhosted.org/packages/1b/9e/f748cd64161213adeef83d0cb16cb8ace1e62fa501033acdd9f9341fff57/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:b8f029a6b98d3cf5be564d52802bb50a8489ab73409fa9db0bf583eabb7c2321", size = 17195929, upload-time = "2025-10-22T03:47:36.24Z" },
{ url = "https://files.pythonhosted.org/packages/91/9d/a81aafd899b900101988ead7fb14974c8a58695338ab6a0f3d6b0100f30b/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:218295a8acae83905f6f1aed8cacb8e3eb3bd7513a13fe4ba3b2664a19fc4a6b", size = 19157705, upload-time = "2025-10-22T03:46:40.415Z" },
{ url = "https://files.pythonhosted.org/packages/3c/35/4e40f2fba272a6698d62be2cd21ddc3675edfc1a4b9ddefcc4648f115315/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76ff670550dc23e58ea9bc53b5149b99a44e63b34b524f7b8547469aaa0dcb8c", size = 15226915, upload-time = "2025-10-22T03:46:27.773Z" },
@@ -4543,11 +4075,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/f7/02/70a872d1a4a739b4f7371ab8d3d5ed8c6e57e142e2503531aafcb220893c/oracledb-3.4.2.tar.gz", hash = "sha256:46e0f2278ff1fe83fbc33a3b93c72d429323ec7eed47bc9484e217776cd437e5", size = 855467, upload-time = "2026-01-28T17:25:39.91Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/64/80/be263b668ba32b258d07c85f7bfb6967a9677e016c299207b28734f04c4b/oracledb-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b8e4b8a852251cef09038b75f30fce1227010835f4e19cfbd436027acba2697c", size = 4228552, upload-time = "2026-01-28T17:25:54.844Z" },
- { url = "https://files.pythonhosted.org/packages/91/bc/e832a649529da7c60409a81be41f3213b4c7ffda4fe424222b2145e8d43c/oracledb-3.4.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1617a1db020346883455af005efbefd51be2c4d797e43b1b38455a19f8526b48", size = 2421924, upload-time = "2026-01-28T17:25:56.984Z" },
- { url = "https://files.pythonhosted.org/packages/86/21/d867c37e493a63b5521bd248110ad5b97b18253d64a30703e3e8f3d9631e/oracledb-3.4.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed78d7e7079a778062744ccf42141ce4806818c3f4dd6463e4a7edd561c9f86", size = 2599301, upload-time = "2026-01-28T17:25:58.529Z" },
- { url = "https://files.pythonhosted.org/packages/2a/de/9b1843ea27f7791449652d7f340f042c3053336d2c11caf29e59bab86189/oracledb-3.4.2-cp311-cp311-win32.whl", hash = "sha256:0e16fe3d057e0c41a23ad2ae95bfa002401690773376d476be608f79ac74bf05", size = 1492890, upload-time = "2026-01-28T17:26:00.662Z" },
- { url = "https://files.pythonhosted.org/packages/d6/10/cbc8afa2db0cec80530858d3e4574f9734fae8c0b7f1df261398aa026c5f/oracledb-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:f93cae08e8ed20f2d5b777a8602a71f9418389c661d2c937e84d94863e7e7011", size = 1843355, upload-time = "2026-01-28T17:26:02.637Z" },
{ url = "https://files.pythonhosted.org/packages/8f/81/2e6154f34b71cd93b4946c73ea13b69d54b8d45a5f6bbffe271793240d21/oracledb-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a7396664e592881225ba66385ee83ce339d864f39003d6e4ca31a894a7e7c552", size = 4220806, upload-time = "2026-01-28T17:26:04.322Z" },
{ url = "https://files.pythonhosted.org/packages/ab/a9/a1d59aaac77d8f727156ec6a3b03399917c90b7da4f02d057f92e5601f56/oracledb-3.4.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f04a2d62073407672f114d02529921de0677c6883ed7c64d8d1a3c04caa3238", size = 2233795, upload-time = "2026-01-28T17:26:05.877Z" },
{ url = "https://files.pythonhosted.org/packages/94/ec/8c4a38020cd251572bd406ddcbde98ca052ec94b5684f9aa9ef1ddfcc68c/oracledb-3.4.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8d75e4f879b908be66cce05ba6c05791a5dbb4a15e39abc01aa25c8a2492bd9", size = 2424756, upload-time = "2026-01-28T17:26:07.35Z" },
@@ -4561,21 +4088,6 @@ version = "3.11.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/70/a3/4e09c61a5f0c521cba0bb433639610ae037437669f1a4cbc93799e731d78/orjson-3.11.6.tar.gz", hash = "sha256:0a54c72259f35299fd033042367df781c2f66d10252955ca1efb7db309b954cb", size = 6175856, upload-time = "2026-01-29T15:13:07.942Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f3/fd/d6b0a36854179b93ed77839f107c4089d91cccc9f9ba1b752b6e3bac5f34/orjson-3.11.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e259e85a81d76d9665f03d6129e09e4435531870de5961ddcd0bf6e3a7fde7d7", size = 250029, upload-time = "2026-01-29T15:11:35.942Z" },
- { url = "https://files.pythonhosted.org/packages/a3/bb/22902619826641cf3b627c24aab62e2ad6b571bdd1d34733abb0dd57f67a/orjson-3.11.6-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:52263949f41b4a4822c6b1353bcc5ee2f7109d53a3b493501d3369d6d0e7937a", size = 134518, upload-time = "2026-01-29T15:11:37.347Z" },
- { url = "https://files.pythonhosted.org/packages/72/90/7a818da4bba1de711a9653c420749c0ac95ef8f8651cbc1dca551f462fe0/orjson-3.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6439e742fa7834a24698d358a27346bb203bff356ae0402e7f5df8f749c621a8", size = 137917, upload-time = "2026-01-29T15:11:38.511Z" },
- { url = "https://files.pythonhosted.org/packages/59/0f/02846c1cac8e205cb3822dd8aa8f9114acda216f41fd1999ace6b543418d/orjson-3.11.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b81ffd68f084b4e993e3867acb554a049fa7787cc8710bbcc1e26965580d99be", size = 134923, upload-time = "2026-01-29T15:11:39.711Z" },
- { url = "https://files.pythonhosted.org/packages/94/cf/aeaf683001b474bb3c3c757073a4231dfdfe8467fceaefa5bfd40902c99f/orjson-3.11.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5a5468e5e60f7ef6d7f9044b06c8f94a3c56ba528c6e4f7f06ae95164b595ec", size = 140752, upload-time = "2026-01-29T15:11:41.347Z" },
- { url = "https://files.pythonhosted.org/packages/fc/fe/dad52d8315a65f084044a0819d74c4c9daf9ebe0681d30f525b0d29a31f0/orjson-3.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72c5005eb45bd2535632d4f3bec7ad392832cfc46b62a3021da3b48a67734b45", size = 144201, upload-time = "2026-01-29T15:11:42.537Z" },
- { url = "https://files.pythonhosted.org/packages/36/bc/ab070dd421565b831801077f1e390c4d4af8bfcecafc110336680a33866b/orjson-3.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b14dd49f3462b014455a28a4d810d3549bf990567653eb43765cd847df09145", size = 142380, upload-time = "2026-01-29T15:11:44.309Z" },
- { url = "https://files.pythonhosted.org/packages/e6/d8/4b581c725c3a308717f28bf45a9fdac210bca08b67e8430143699413ff06/orjson-3.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bb2c1ea30ef302f0f89f9bf3e7f9ab5e2af29dc9f80eb87aa99788e4e2d65", size = 145582, upload-time = "2026-01-29T15:11:45.506Z" },
- { url = "https://files.pythonhosted.org/packages/5b/a2/09aab99b39f9a7f175ea8fa29adb9933a3d01e7d5d603cdee7f1c40c8da2/orjson-3.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:825e0a85d189533c6bff7e2fc417a28f6fcea53d27125c4551979aecd6c9a197", size = 147270, upload-time = "2026-01-29T15:11:46.782Z" },
- { url = "https://files.pythonhosted.org/packages/b8/2f/5ef8eaf7829dc50da3bf497c7775b21ee88437bc8c41f959aa3504ca6631/orjson-3.11.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:b04575417a26530637f6ab4b1f7b4f666eb0433491091da4de38611f97f2fcf3", size = 421222, upload-time = "2026-01-29T15:11:48.106Z" },
- { url = "https://files.pythonhosted.org/packages/3b/b0/dd6b941294c2b5b13da5fdc7e749e58d0c55a5114ab37497155e83050e95/orjson-3.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b83eb2e40e8c4da6d6b340ee6b1d6125f5195eb1b0ebb7eac23c6d9d4f92d224", size = 155562, upload-time = "2026-01-29T15:11:49.408Z" },
- { url = "https://files.pythonhosted.org/packages/8e/09/43924331a847476ae2f9a16bd6d3c9dab301265006212ba0d3d7fd58763a/orjson-3.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1f42da604ee65a6b87eef858c913ce3e5777872b19321d11e6fc6d21de89b64f", size = 147432, upload-time = "2026-01-29T15:11:50.635Z" },
- { url = "https://files.pythonhosted.org/packages/5d/e9/d9865961081816909f6b49d880749dbbd88425afd7c5bbce0549e2290d77/orjson-3.11.6-cp311-cp311-win32.whl", hash = "sha256:5ae45df804f2d344cffb36c43fdf03c82fb6cd247f5faa41e21891b40dfbf733", size = 139623, upload-time = "2026-01-29T15:11:51.82Z" },
- { url = "https://files.pythonhosted.org/packages/b4/f9/6836edb92f76eec1082919101eb1145d2f9c33c8f2c5e6fa399b82a2aaa8/orjson-3.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:f4295948d65ace0a2d8f2c4ccc429668b7eb8af547578ec882e16bf79b0050b2", size = 136647, upload-time = "2026-01-29T15:11:53.454Z" },
- { url = "https://files.pythonhosted.org/packages/b3/0c/4954082eea948c9ae52ee0bcbaa2f99da3216a71bcc314ab129bde22e565/orjson-3.11.6-cp311-cp311-win_arm64.whl", hash = "sha256:314e9c45e0b81b547e3a1cfa3df3e07a815821b3dac9fe8cb75014071d0c16a4", size = 135327, upload-time = "2026-01-29T15:11:56.616Z" },
{ url = "https://files.pythonhosted.org/packages/14/ba/759f2879f41910b7e5e0cdbd9cf82a4f017c527fb0e972e9869ca7fe4c8e/orjson-3.11.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6f03f30cd8953f75f2a439070c743c7336d10ee940da918d71c6f3556af3ddcf", size = 249988, upload-time = "2026-01-29T15:11:58.294Z" },
{ url = "https://files.pythonhosted.org/packages/f0/70/54cecb929e6c8b10104fcf580b0cc7dc551aa193e83787dd6f3daba28bb5/orjson-3.11.6-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:af44baae65ef386ad971469a8557a0673bb042b0b9fd4397becd9c2dfaa02588", size = 134445, upload-time = "2026-01-29T15:11:59.819Z" },
{ url = "https://files.pythonhosted.org/packages/f2/6f/ec0309154457b9ba1ad05f11faa4441f76037152f75e1ac577db3ce7ca96/orjson-3.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c310a48542094e4f7dbb6ac076880994986dda8ca9186a58c3cb70a3514d3231", size = 137708, upload-time = "2026-01-29T15:12:01.488Z" },
@@ -4636,14 +4148,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ff/07/c7087e003ceee9b9a82539b40414ec557aa795b584a1a346e89180853d79/pandas-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de09668c1bf3b925c07e5762291602f0d789eca1b3a781f99c1c78f6cac0e7ea", size = 10323380, upload-time = "2026-02-17T22:18:16.133Z" },
- { url = "https://files.pythonhosted.org/packages/c1/27/90683c7122febeefe84a56f2cde86a9f05f68d53885cebcc473298dfc33e/pandas-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24ba315ba3d6e5806063ac6eb717504e499ce30bd8c236d8693a5fd3f084c796", size = 9923455, upload-time = "2026-02-17T22:18:19.13Z" },
- { url = "https://files.pythonhosted.org/packages/0e/f1/ed17d927f9950643bc7631aa4c99ff0cc83a37864470bc419345b656a41f/pandas-3.0.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:406ce835c55bac912f2a0dcfaf27c06d73c6b04a5dde45f1fd3169ce31337389", size = 10753464, upload-time = "2026-02-17T22:18:21.134Z" },
- { url = "https://files.pythonhosted.org/packages/2e/7c/870c7e7daec2a6c7ff2ac9e33b23317230d4e4e954b35112759ea4a924a7/pandas-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:830994d7e1f31dd7e790045235605ab61cff6c94defc774547e8b7fdfbff3dc7", size = 11255234, upload-time = "2026-02-17T22:18:24.175Z" },
- { url = "https://files.pythonhosted.org/packages/5c/39/3653fe59af68606282b989c23d1a543ceba6e8099cbcc5f1d506a7bae2aa/pandas-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a64ce8b0f2de1d2efd2ae40b0abe7f8ae6b29fbfb3812098ed5a6f8e235ad9bf", size = 11767299, upload-time = "2026-02-17T22:18:26.824Z" },
- { url = "https://files.pythonhosted.org/packages/9b/31/1daf3c0c94a849c7a8dab8a69697b36d313b229918002ba3e409265c7888/pandas-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9832c2c69da24b602c32e0c7b1b508a03949c18ba08d4d9f1c1033426685b447", size = 12333292, upload-time = "2026-02-17T22:18:28.996Z" },
- { url = "https://files.pythonhosted.org/packages/1f/67/af63f83cd6ca603a00fe8530c10a60f0879265b8be00b5930e8e78c5b30b/pandas-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:84f0904a69e7365f79a0c77d3cdfccbfb05bf87847e3a51a41e1426b0edb9c79", size = 9892176, upload-time = "2026-02-17T22:18:31.79Z" },
- { url = "https://files.pythonhosted.org/packages/79/ab/9c776b14ac4b7b4140788eca18468ea39894bc7340a408f1d1e379856a6b/pandas-3.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:4a68773d5a778afb31d12e34f7dd4612ab90de8c6fb1d8ffe5d4a03b955082a1", size = 9151328, upload-time = "2026-02-17T22:18:35.721Z" },
{ url = "https://files.pythonhosted.org/packages/37/51/b467209c08dae2c624873d7491ea47d2b47336e5403309d433ea79c38571/pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d", size = 10344357, upload-time = "2026-02-17T22:18:38.262Z" },
{ url = "https://files.pythonhosted.org/packages/7c/f1/e2567ffc8951ab371db2e40b2fe068e36b81d8cf3260f06ae508700e5504/pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955", size = 9884543, upload-time = "2026-02-17T22:18:41.476Z" },
{ url = "https://files.pythonhosted.org/packages/d7/39/327802e0b6d693182403c144edacbc27eb82907b57062f23ef5a4c4a5ea7/pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b", size = 10396030, upload-time = "2026-02-17T22:18:43.822Z" },
@@ -4743,17 +4247,6 @@ version = "12.1.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/2b/46/5da1ec4a5171ee7bf1a0efa064aba70ba3d6e0788ce3f5acd1375d23c8c0/pillow-12.1.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e879bb6cd5c73848ef3b2b48b8af9ff08c5b71ecda8048b7dd22d8a33f60be32", size = 5304084, upload-time = "2026-02-11T04:20:27.501Z" },
- { url = "https://files.pythonhosted.org/packages/78/93/a29e9bc02d1cf557a834da780ceccd54e02421627200696fcf805ebdc3fb/pillow-12.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:365b10bb9417dd4498c0e3b128018c4a624dc11c7b97d8cc54effe3b096f4c38", size = 4657866, upload-time = "2026-02-11T04:20:29.827Z" },
- { url = "https://files.pythonhosted.org/packages/13/84/583a4558d492a179d31e4aae32eadce94b9acf49c0337c4ce0b70e0a01f2/pillow-12.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d4ce8e329c93845720cd2014659ca67eac35f6433fd3050393d85f3ecef0dad5", size = 6232148, upload-time = "2026-02-11T04:20:31.329Z" },
- { url = "https://files.pythonhosted.org/packages/d5/e2/53c43334bbbb2d3b938978532fbda8e62bb6e0b23a26ce8592f36bcc4987/pillow-12.1.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc354a04072b765eccf2204f588a7a532c9511e8b9c7f900e1b64e3e33487090", size = 8038007, upload-time = "2026-02-11T04:20:34.225Z" },
- { url = "https://files.pythonhosted.org/packages/b8/a6/3d0e79c8a9d58150dd98e199d7c1c56861027f3829a3a60b3c2784190180/pillow-12.1.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7e7976bf1910a8116b523b9f9f58bf410f3e8aa330cd9a2bb2953f9266ab49af", size = 6345418, upload-time = "2026-02-11T04:20:35.858Z" },
- { url = "https://files.pythonhosted.org/packages/a2/c8/46dfeac5825e600579157eea177be43e2f7ff4a99da9d0d0a49533509ac5/pillow-12.1.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:597bd9c8419bc7c6af5604e55847789b69123bbe25d65cc6ad3012b4f3c98d8b", size = 7034590, upload-time = "2026-02-11T04:20:37.91Z" },
- { url = "https://files.pythonhosted.org/packages/af/bf/e6f65d3db8a8bbfeaf9e13cc0417813f6319863a73de934f14b2229ada18/pillow-12.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2c1fc0f2ca5f96a3c8407e41cca26a16e46b21060fe6d5b099d2cb01412222f5", size = 6458655, upload-time = "2026-02-11T04:20:39.496Z" },
- { url = "https://files.pythonhosted.org/packages/f9/c2/66091f3f34a25894ca129362e510b956ef26f8fb67a0e6417bc5744e56f1/pillow-12.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:578510d88c6229d735855e1f278aa305270438d36a05031dfaae5067cc8eb04d", size = 7159286, upload-time = "2026-02-11T04:20:41.139Z" },
- { url = "https://files.pythonhosted.org/packages/7b/5a/24bc8eb526a22f957d0cec6243146744966d40857e3d8deb68f7902ca6c1/pillow-12.1.1-cp311-cp311-win32.whl", hash = "sha256:7311c0a0dcadb89b36b7025dfd8326ecfa36964e29913074d47382706e516a7c", size = 6328663, upload-time = "2026-02-11T04:20:43.184Z" },
- { url = "https://files.pythonhosted.org/packages/31/03/bef822e4f2d8f9d7448c133d0a18185d3cce3e70472774fffefe8b0ed562/pillow-12.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:fbfa2a7c10cc2623f412753cddf391c7f971c52ca40a3f65dc5039b2939e8563", size = 7031448, upload-time = "2026-02-11T04:20:44.696Z" },
- { url = "https://files.pythonhosted.org/packages/49/70/f76296f53610bd17b2e7d31728b8b7825e3ac3b5b3688b51f52eab7c0818/pillow-12.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:b81b5e3511211631b3f672a595e3221252c90af017e399056d0faabb9538aa80", size = 2453651, upload-time = "2026-02-11T04:20:46.243Z" },
{ url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803, upload-time = "2026-02-11T04:20:47.653Z" },
{ url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601, upload-time = "2026-02-11T04:20:49.328Z" },
{ url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995, upload-time = "2026-02-11T04:20:51.032Z" },
@@ -4765,13 +4258,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/1f/e7/7c4552d80052337eb28653b617eafdef39adfb137c49dd7e831b8dc13bc5/pillow-12.1.1-cp312-cp312-win32.whl", hash = "sha256:5d1f9575a12bed9e9eedd9a4972834b08c97a352bd17955ccdebfeca5913fa0a", size = 6328823, upload-time = "2026-02-11T04:21:01.385Z" },
{ url = "https://files.pythonhosted.org/packages/3d/17/688626d192d7261bbbf98846fc98995726bddc2c945344b65bec3a29d731/pillow-12.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:21329ec8c96c6e979cd0dfd29406c40c1d52521a90544463057d2aaa937d66a6", size = 7033367, upload-time = "2026-02-11T04:21:03.536Z" },
{ url = "https://files.pythonhosted.org/packages/ed/fe/a0ef1f73f939b0eca03ee2c108d0043a87468664770612602c63266a43c4/pillow-12.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:af9a332e572978f0218686636610555ae3defd1633597be015ed50289a03c523", size = 2453811, upload-time = "2026-02-11T04:21:05.116Z" },
- { url = "https://files.pythonhosted.org/packages/56/11/5d43209aa4cb58e0cc80127956ff1796a68b928e6324bbf06ef4db34367b/pillow-12.1.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:600fd103672b925fe62ed08e0d874ea34d692474df6f4bf7ebe148b30f89f39f", size = 5228606, upload-time = "2026-02-11T04:22:52.106Z" },
- { url = "https://files.pythonhosted.org/packages/5f/d5/3b005b4e4fda6698b371fa6c21b097d4707585d7db99e98d9b0b87ac612a/pillow-12.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:665e1b916b043cef294bc54d47bf02d87e13f769bc4bc5fa225a24b3a6c5aca9", size = 4622321, upload-time = "2026-02-11T04:22:53.827Z" },
- { url = "https://files.pythonhosted.org/packages/df/36/ed3ea2d594356fd8037e5a01f6156c74bc8d92dbb0fa60746cc96cabb6e8/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:495c302af3aad1ca67420ddd5c7bd480c8867ad173528767d906428057a11f0e", size = 5247579, upload-time = "2026-02-11T04:22:56.094Z" },
- { url = "https://files.pythonhosted.org/packages/54/9a/9cc3e029683cf6d20ae5085da0dafc63148e3252c2f13328e553aaa13cfb/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fd420ef0c52c88b5a035a0886f367748c72147b2b8f384c9d12656678dfdfa9", size = 6989094, upload-time = "2026-02-11T04:22:58.288Z" },
- { url = "https://files.pythonhosted.org/packages/00/98/fc53ab36da80b88df0967896b6c4b4cd948a0dc5aa40a754266aa3ae48b3/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f975aa7ef9684ce7e2c18a3aa8f8e2106ce1e46b94ab713d156b2898811651d3", size = 5313850, upload-time = "2026-02-11T04:23:00.554Z" },
- { url = "https://files.pythonhosted.org/packages/30/02/00fa585abfd9fe9d73e5f6e554dc36cc2b842898cbfc46d70353dae227f8/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8089c852a56c2966cf18835db62d9b34fef7ba74c726ad943928d494fa7f4735", size = 5963343, upload-time = "2026-02-11T04:23:02.934Z" },
- { url = "https://files.pythonhosted.org/packages/f2/26/c56ce33ca856e358d27fda9676c055395abddb82c35ac0f593877ed4562e/pillow-12.1.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:cb9bb857b2d057c6dfc72ac5f3b44836924ba15721882ef103cecb40d002d80e", size = 7029880, upload-time = "2026-02-11T04:23:04.783Z" },
]
[[package]]
@@ -4870,14 +4356,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/bf/34/eb4f5f0f678e152a96e826da867d2f41c4b18a2d589e40e1dd3347219e91/preshed-3.0.12.tar.gz", hash = "sha256:b73f9a8b54ee1d44529cc6018356896cff93d48f755f29c134734d9371c0d685", size = 15027, upload-time = "2025-11-17T13:00:33.621Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/1e/54/d1e02d0a0ea348fb6a769506166e366abfe87ee917c2f11f7139c7acbf10/preshed-3.0.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc45fda3fd4ae1ae15c37f18f0777cf389ce9184ef8884b39b18894416fd1341", size = 128439, upload-time = "2025-11-17T12:59:21.317Z" },
- { url = "https://files.pythonhosted.org/packages/8c/cb/685ca57ca6e438345b3f6c20226705a0e056a3de399a5bf8a9ee89b3dd2b/preshed-3.0.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75d6e628bc78c022dbb9267242715718f862c3105927732d166076ff009d65de", size = 124544, upload-time = "2025-11-17T12:59:22.944Z" },
- { url = "https://files.pythonhosted.org/packages/f8/07/018fcd3bf298304e1570065cf80601ac16acd29f799578fd47b715dd3ca2/preshed-3.0.12-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b901cff5c814facf7a864b0a4c14a16d45fa1379899a585b3fb48ee36a2dccdb", size = 824728, upload-time = "2025-11-17T12:59:24.614Z" },
- { url = "https://files.pythonhosted.org/packages/79/dc/d888b328fcedae530df53396d9fc0006026aa8793fec54d7d34f57f31ff5/preshed-3.0.12-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d1099253bf73dd3c39313280bd5331841f769637b27ddb576ff362c4e7bad298", size = 825969, upload-time = "2025-11-17T12:59:26.493Z" },
- { url = "https://files.pythonhosted.org/packages/21/51/f19933301f42ece1ffef1f7f4c370d09f0351c43c528e66fac24560e44d2/preshed-3.0.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1af4a049ffe9d0246e5dc10d6f54820ed064c40e5c3f7b6526127c664008297c", size = 842346, upload-time = "2025-11-17T12:59:28.092Z" },
- { url = "https://files.pythonhosted.org/packages/51/46/025f60fd3d51bf60606a0f8f0cd39c40068b9b5e4d249bca1682e4ff09c3/preshed-3.0.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57159bcedca0cb4c99390f8a6e730f8659fdb663a5a3efcd9c4531e0f54b150e", size = 865504, upload-time = "2025-11-17T12:59:29.648Z" },
- { url = "https://files.pythonhosted.org/packages/88/b5/2e6ee5ab19b03e7983fc5e1850c812fb71dc178dd140d6aca3b45306bdf7/preshed-3.0.12-cp311-cp311-win_amd64.whl", hash = "sha256:8fe9cf1745e203e5aa58b8700436f78da1dcf0f0e2efb0054b467effd9d7d19d", size = 117736, upload-time = "2025-11-17T12:59:30.974Z" },
- { url = "https://files.pythonhosted.org/packages/1e/17/8a0a8f4b01e71b5fb7c5cd4c9fec04d7b852d42f1f9e096b01e7d2b16b17/preshed-3.0.12-cp311-cp311-win_arm64.whl", hash = "sha256:12d880f8786cb6deac34e99b8b07146fb92d22fbca0023208e03325f5944606b", size = 105127, upload-time = "2025-11-17T12:59:32.171Z" },
{ url = "https://files.pythonhosted.org/packages/4b/f7/ff3aca937eeaee19c52c45ddf92979546e52ed0686e58be4bc09c47e7d88/preshed-3.0.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2779861f5d69480493519ed123a622a13012d1182126779036b99d9d989bf7e9", size = 129958, upload-time = "2025-11-17T12:59:33.391Z" },
{ url = "https://files.pythonhosted.org/packages/80/24/fd654a9c0f5f3ed1a9b1d8a392f063ae9ca29ad0b462f0732ae0147f7cee/preshed-3.0.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffe1fd7d92f51ed34383e20d8b734780c814ca869cfdb7e07f2d31651f90cdf4", size = 124550, upload-time = "2025-11-17T12:59:34.688Z" },
{ url = "https://files.pythonhosted.org/packages/71/49/8271c7f680696f4b0880f44357d2a903d649cb9f6e60a1efc97a203104df/preshed-3.0.12-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:91893404858502cc4e856d338fef3d2a4a552135f79a1041c24eb919817c19db", size = 874987, upload-time = "2025-11-17T12:59:36.062Z" },
@@ -4906,21 +4384,6 @@ version = "0.4.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" },
- { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" },
- { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" },
- { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" },
- { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" },
- { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" },
- { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" },
- { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" },
- { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" },
- { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" },
- { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" },
- { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" },
- { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" },
- { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" },
- { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" },
{ url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" },
{ url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" },
{ url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" },
@@ -5010,17 +4473,6 @@ name = "psycopg-binary"
version = "3.3.3"
source = { registry = "https://pypi.org/simple" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/be/c0/b389119dd754483d316805260f3e73cdcad97925839107cc7a296f6132b1/psycopg_binary-3.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a89bb9ee11177b2995d87186b1d9fa892d8ea725e85eab28c6525e4cc14ee048", size = 4609740, upload-time = "2026-02-18T16:47:51.093Z" },
- { url = "https://files.pythonhosted.org/packages/cf/e3/9976eef20f61840285174d360da4c820a311ab39d6b82fa09fbb545be825/psycopg_binary-3.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f7d0cf072c6fbac3795b08c98ef9ea013f11db609659dcfc6b1f6cc31f9e181", size = 4676837, upload-time = "2026-02-18T16:47:55.523Z" },
- { url = "https://files.pythonhosted.org/packages/9f/f2/d28ba2f7404fd7f68d41e8a11df86313bd646258244cb12a8dd83b868a97/psycopg_binary-3.3.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:90eecd93073922f085967f3ed3a98ba8c325cbbc8c1a204e300282abd2369e13", size = 5497070, upload-time = "2026-02-18T16:47:59.929Z" },
- { url = "https://files.pythonhosted.org/packages/de/2f/6c5c54b815edeb30a281cfcea96dc93b3bb6be939aea022f00cab7aa1420/psycopg_binary-3.3.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dac7ee2f88b4d7bb12837989ca354c38d400eeb21bce3b73dac02622f0a3c8d6", size = 5172410, upload-time = "2026-02-18T16:48:05.665Z" },
- { url = "https://files.pythonhosted.org/packages/51/75/8206c7008b57de03c1ada46bd3110cc3743f3fd9ed52031c4601401d766d/psycopg_binary-3.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b62cf8784eb6d35beaee1056d54caf94ec6ecf2b7552395e305518ab61eb8fd2", size = 6763408, upload-time = "2026-02-18T16:48:13.541Z" },
- { url = "https://files.pythonhosted.org/packages/d4/5a/ea1641a1e6c8c8b3454b0fcb43c3045133a8b703e6e824fae134088e63bd/psycopg_binary-3.3.3-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a39f34c9b18e8f6794cca17bfbcd64572ca2482318db644268049f8c738f35a6", size = 5006255, upload-time = "2026-02-18T16:48:22.176Z" },
- { url = "https://files.pythonhosted.org/packages/aa/fb/538df099bf55ae1637d52d7ccb6b9620b535a40f4c733897ac2b7bb9e14c/psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:883d68d48ca9ff3cb3d10c5fdebea02c79b48eecacdddbf7cce6e7cdbdc216b8", size = 4532694, upload-time = "2026-02-18T16:48:27.338Z" },
- { url = "https://files.pythonhosted.org/packages/a1/d1/00780c0e187ea3c13dfc53bd7060654b2232cd30df562aac91a5f1c545ac/psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:cab7bc3d288d37a80aa8c0820033250c95e40b1c2b5c57cf59827b19c2a8b69d", size = 4222833, upload-time = "2026-02-18T16:48:31.221Z" },
- { url = "https://files.pythonhosted.org/packages/7a/34/a07f1ff713c51d64dc9f19f2c32be80299a2055d5d109d5853662b922cb4/psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:56c767007ca959ca32f796b42379fc7e1ae2ed085d29f20b05b3fc394f3715cc", size = 3952818, upload-time = "2026-02-18T16:48:35.869Z" },
- { url = "https://files.pythonhosted.org/packages/d3/67/d33f268a7759b4445f3c9b5a181039b01af8c8263c865c1be7a6444d4749/psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da2f331a01af232259a21573a01338530c6016dcfad74626c01330535bcd8628", size = 4258061, upload-time = "2026-02-18T16:48:41.365Z" },
- { url = "https://files.pythonhosted.org/packages/b4/3b/0d8d2c5e8e29ccc07d28c8af38445d9d9abcd238d590186cac82ee71fc84/psycopg_binary-3.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:19f93235ece6dbfc4036b5e4f6d8b13f0b8f2b3eeb8b0bd2936d406991bcdd40", size = 3558915, upload-time = "2026-02-18T16:48:46.679Z" },
{ url = "https://files.pythonhosted.org/packages/90/15/021be5c0cbc5b7c1ab46e91cc3434eb42569f79a0592e67b8d25e66d844d/psycopg_binary-3.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6698dbab5bcef8fdb570fc9d35fd9ac52041771bfcfe6fd0fc5f5c4e36f1e99d", size = 4591170, upload-time = "2026-02-18T16:48:55.594Z" },
{ url = "https://files.pythonhosted.org/packages/f1/54/a60211c346c9a2f8c6b272b5f2bbe21f6e11800ce7f61e99ba75cf8b63e1/psycopg_binary-3.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:329ff393441e75f10b673ae99ab45276887993d49e65f141da20d915c05aafd8", size = 4670009, upload-time = "2026-02-18T16:49:03.608Z" },
{ url = "https://files.pythonhosted.org/packages/c1/53/ac7c18671347c553362aadbf65f92786eef9540676ca24114cc02f5be405/psycopg_binary-3.3.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:eb072949b8ebf4082ae24289a2b0fd724da9adc8f22743409d6fd718ddb379df", size = 5469735, upload-time = "2026-02-18T16:49:10.128Z" },
@@ -5040,17 +4492,6 @@ version = "2.9.11"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" },
- { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" },
- { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" },
- { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" },
- { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" },
- { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" },
- { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" },
- { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" },
- { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" },
- { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" },
- { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" },
{ url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" },
{ url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" },
{ url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" },
@@ -5091,13 +4532,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/d7/8b/d18b7eb6fb22e5ed6ffcbc073c85dae635778dbd1270a6cf5d750b031e84/pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025", size = 1063645, upload-time = "2023-12-18T15:43:41.625Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/94/8a/411ef0b05483076b7f548c74ccaa0f90c1e60d3875db71a821f6ffa8cf42/pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b", size = 26904455, upload-time = "2023-12-18T15:40:43.477Z" },
- { url = "https://files.pythonhosted.org/packages/6c/6c/882a57798877e3a49ba54d8e0540bea24aed78fb42e1d860f08c3449c75e/pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23", size = 23997116, upload-time = "2023-12-18T15:40:48.533Z" },
- { url = "https://files.pythonhosted.org/packages/ec/3f/ef47fe6192ce4d82803a073db449b5292135406c364a7fc49dfbcd34c987/pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200", size = 35944575, upload-time = "2023-12-18T15:40:55.128Z" },
- { url = "https://files.pythonhosted.org/packages/1a/90/2021e529d7f234a3909f419d4341d53382541ef77d957fa274a99c533b18/pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696", size = 38079719, upload-time = "2023-12-18T15:41:02.565Z" },
- { url = "https://files.pythonhosted.org/packages/30/a9/474caf5fd54a6d5315aaf9284c6e8f5d071ca825325ad64c53137b646e1f/pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a", size = 35429706, upload-time = "2023-12-18T15:41:09.955Z" },
- { url = "https://files.pythonhosted.org/packages/d9/f8/cfba56f5353e51c19b0c240380ce39483f4c76e5c4aee5a000f3d75b72da/pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02", size = 38001476, upload-time = "2023-12-18T15:41:16.372Z" },
- { url = "https://files.pythonhosted.org/packages/43/3f/7bdf7dc3b3b0cfdcc60760e7880954ba99ccd0bc1e0df806f3dd61bc01cd/pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b", size = 24576230, upload-time = "2023-12-18T15:41:22.561Z" },
{ url = "https://files.pythonhosted.org/packages/69/5b/d8ab6c20c43b598228710e4e4a6cba03a01f6faa3d08afff9ce76fd0fd47/pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944", size = 26819585, upload-time = "2023-12-18T15:41:27.59Z" },
{ url = "https://files.pythonhosted.org/packages/2d/29/bed2643d0dd5e9570405244a61f6db66c7f4704a6e9ce313f84fa5a3675a/pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5", size = 23965222, upload-time = "2023-12-18T15:41:32.449Z" },
{ url = "https://files.pythonhosted.org/packages/2a/34/da464632e59a8cdd083370d69e6c14eae30221acb284f671c6bc9273fadd/pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422", size = 35942036, upload-time = "2023-12-18T15:41:38.767Z" },
@@ -5180,20 +4614,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" },
- { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" },
- { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" },
- { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" },
- { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" },
- { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" },
- { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" },
- { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" },
- { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" },
- { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" },
- { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" },
- { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" },
- { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" },
- { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" },
{ url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
{ url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
{ url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
@@ -5208,22 +4628,10 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
{ url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
{ url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
- { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" },
- { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" },
- { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" },
- { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" },
{ url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
{ url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
{ url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
{ url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
- { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" },
- { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" },
- { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" },
- { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" },
- { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" },
- { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" },
- { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" },
- { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" },
]
[[package]]
@@ -5255,11 +4663,11 @@ wheels = [
[[package]]
name = "pygments"
-version = "2.19.2"
+version = "2.20.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" },
]
[[package]]
@@ -5479,7 +4887,7 @@ name = "pytest-cov"
version = "7.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "coverage", extra = ["toml"] },
+ { name = "coverage" },
{ name = "pluggy" },
{ name = "pytest" },
]
@@ -5543,20 +4951,6 @@ name = "python-calamine"
version = "0.5.4"
source = { registry = "https://pypi.org/simple" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/25/1a/ff59788a7e8bfeded91a501abdd068dc7e2f5865ee1a55432133b0f7f08c/python_calamine-0.5.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:944bcc072aca29d346456b4e42675c4831c52c25641db3e976c6013cdd07d4cd", size = 854308, upload-time = "2025-10-21T07:10:55.17Z" },
- { url = "https://files.pythonhosted.org/packages/24/7d/33fc441a70b771093d10fa5086831be289766535cbcb2b443ff1d5e549d8/python_calamine-0.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e637382e50cabc263a37eda7a3cd33f054271e4391a304f68cecb2e490827533", size = 830841, upload-time = "2025-10-21T07:10:57.353Z" },
- { url = "https://files.pythonhosted.org/packages/0f/38/b5b25e6ce0a983c9751fb026bd8c5d77eb81a775948cc3d9ce2b18b2fc91/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b2a31d1e711c5661b4f04efd89975d311788bd9a43a111beff74d7c4c8f8d7a", size = 898287, upload-time = "2025-10-21T07:10:58.977Z" },
- { url = "https://files.pythonhosted.org/packages/0f/e9/ab288cd489999f962f791d6c8544803c29dcf24e9b6dde24634c41ec09dd/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2078ede35cbd26cf7186673405ff13321caacd9e45a5e57b54ce7b3ef0eec2ff", size = 886960, upload-time = "2025-10-21T07:11:00.462Z" },
- { url = "https://files.pythonhosted.org/packages/f0/4d/2a261f2ccde7128a683cdb20733f9bc030ab37a90803d8de836bf6113e5b/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:faab9f59bb9cedba2b35c6e1f5dc72461d8f2837e8f6ab24fafff0d054ddc4b5", size = 1044123, upload-time = "2025-10-21T07:11:02.153Z" },
- { url = "https://files.pythonhosted.org/packages/20/dc/a84c5a5a2c38816570bcc96ae4c9c89d35054e59c4199d3caef9c60b65cf/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300d8d5e6c63bdecf79268d3b6d2a84078cda39cb3394ed09c5c00a61ce9ff32", size = 941997, upload-time = "2025-10-21T07:11:03.537Z" },
- { url = "https://files.pythonhosted.org/packages/dd/92/b970d8316c54f274d9060e7c804b79dbfa250edeb6390cd94f5fcfeb5f87/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0019a74f1c0b1cbf08fee9ece114d310522837cdf63660a46fe46d3688f215ea", size = 905881, upload-time = "2025-10-21T07:11:05.228Z" },
- { url = "https://files.pythonhosted.org/packages/ac/88/9186ac8d3241fc6f90995cc7539bdbd75b770d2dab20978a702c36fbce5f/python_calamine-0.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:30b40ffb374f7fb9ce20ca87f43a609288f568e41872f8a72e5af313a9e20af0", size = 947224, upload-time = "2025-10-21T07:11:06.618Z" },
- { url = "https://files.pythonhosted.org/packages/ee/ec/6ac1882dc6b6fa829e2d1d94ffa58bd0c67df3dba074b2e2f3134d7f573a/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:206242690a5a5dff73a193fb1a1ca3c7a8aed95e2f9f10c875dece5a22068801", size = 1078351, upload-time = "2025-10-21T07:11:08.368Z" },
- { url = "https://files.pythonhosted.org/packages/3e/f1/07aff6966b04b7452c41a802b37199d9e9ac656d66d6092b83ab0937e212/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:88628e1a17a6f352d6433b0abf6edc4cb2295b8fbb3451392390f3a6a7a8cada", size = 1150148, upload-time = "2025-10-21T07:11:10.18Z" },
- { url = "https://files.pythonhosted.org/packages/4e/be/90aedeb0b77ea592a698a20db09014a5217ce46a55b699121849e239c8e7/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:22524cfb7720d15894a02392bbd49f8e7a8c173493f0628a45814d78e4243fff", size = 1080101, upload-time = "2025-10-21T07:11:11.489Z" },
- { url = "https://files.pythonhosted.org/packages/30/89/1fadd511d132d5ea9326c003c8753b6d234d61d9a72775fb1632cc94beb9/python_calamine-0.5.4-cp311-cp311-win32.whl", hash = "sha256:d159e98ef3475965555b67354f687257648f5c3686ed08e7faa34d54cc9274e1", size = 679593, upload-time = "2025-10-21T07:11:12.758Z" },
- { url = "https://files.pythonhosted.org/packages/e9/ba/d7324400a02491549ef30e0e480561a3a841aa073ac7c096313bc2cea555/python_calamine-0.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:0d019b082f9a114cf1e130dc52b77f9f881325ab13dc31485d7b4563ad9e0812", size = 721570, upload-time = "2025-10-21T07:11:14.336Z" },
- { url = "https://files.pythonhosted.org/packages/4f/15/8c7895e603b4ae63ff279aae4aa6120658a15f805750ccdb5d8b311df616/python_calamine-0.5.4-cp311-cp311-win_arm64.whl", hash = "sha256:bb20875776e5b4c85134c2bf49fea12288e64448ed49f1d89a3a83f5bb16bd59", size = 685789, upload-time = "2025-10-21T07:11:15.646Z" },
{ url = "https://files.pythonhosted.org/packages/ff/60/b1ace7a0fd636581b3bb27f1011cb7b2fe4d507b58401c4d328cfcb5c849/python_calamine-0.5.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4d711f91283d28f19feb111ed666764de69e6d2a0201df8f84e81a238f68d193", size = 850087, upload-time = "2025-10-21T07:11:17.002Z" },
{ url = "https://files.pythonhosted.org/packages/7f/32/32ca71ce50f9b7c7d6e7ec5fcc579a97ddd8b8ce314fe143ba2a19441dc7/python_calamine-0.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ed67afd3adedb5bcfb428cf1f2d7dfd936dea9fe979ab631194495ab092973ba", size = 825659, upload-time = "2025-10-21T07:11:18.248Z" },
{ url = "https://files.pythonhosted.org/packages/63/c5/27ba71a9da2a09be9ff2f0dac522769956c8c89d6516565b21c9c78bfae6/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13662895dac487315ccce25ea272a1ea7e7ac05d899cde4e33d59d6c43274c54", size = 897332, upload-time = "2025-10-21T07:11:19.89Z" },
@@ -5571,15 +4965,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/99/85/c5612a63292eb7d0648b17c5ff32ad5d6c6f3e1d78825f01af5c765f4d3f/python_calamine-0.5.4-cp312-cp312-win32.whl", hash = "sha256:cebb9c88983ae676c60c8c02aa29a9fe13563f240579e66de5c71b969ace5fd9", size = 676617, upload-time = "2025-10-21T07:11:32.833Z" },
{ url = "https://files.pythonhosted.org/packages/bb/18/5a037942de8a8df0c805224b2fba06df6d25c1be3c9484ba9db1ca4f3ee6/python_calamine-0.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:15abd7aff98fde36d7df91ac051e86e66e5d5326a7fa98d54697afe95a613501", size = 721464, upload-time = "2025-10-21T07:11:34.383Z" },
{ url = "https://files.pythonhosted.org/packages/d1/8b/89ca17b44bcd8be5d0e8378d87b880ae17a837573553bd2147cceca7e759/python_calamine-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:1cef0d0fc936974020a24acf1509ed2a285b30a4e1adf346c057112072e84251", size = 687268, upload-time = "2025-10-21T07:11:36.324Z" },
- { url = "https://files.pythonhosted.org/packages/ab/a8/0e05992489f8ca99eadfb52e858a7653b01b27a7c66d040abddeb4bdf799/python_calamine-0.5.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8d4be45952555f129584e0ca6ddb442bed5cb97b8d7cd0fd5ae463237b98eb15", size = 856420, upload-time = "2025-10-21T07:13:20.962Z" },
- { url = "https://files.pythonhosted.org/packages/f0/b0/5bbe52c97161acb94066e7020c2fed7eafbca4bf6852a4b02ed80bf0b24b/python_calamine-0.5.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b387d12cb8cae98c8e0c061c5400f80bad1f43f26fafcf95ff5934df995f50b", size = 833240, upload-time = "2025-10-21T07:13:22.801Z" },
- { url = "https://files.pythonhosted.org/packages/c7/b9/44fa30f6bf479072d9042856d3fab8bdd1532d2d901e479e199bc1de0e6c/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2103714954b7dbed72a0b0eff178b08e854bba130be283e3ae3d7c95521e8f69", size = 899470, upload-time = "2025-10-21T07:13:25.176Z" },
- { url = "https://files.pythonhosted.org/packages/0e/f2/acbb2c1d6acba1eaf6b1efb6485c98995050bddedfb6b93ce05be2753a85/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c09fdebe23a5045d09e12b3366ff8fd45165b6fb56f55e9a12342a5daddbd11a", size = 906108, upload-time = "2025-10-21T07:13:26.709Z" },
- { url = "https://files.pythonhosted.org/packages/77/28/ff007e689539d6924223565995db876ac044466b8859bade371696294659/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa992d72fbd38f09107430100b7688c03046d8c1994e4cff9bbbd2a825811796", size = 948580, upload-time = "2025-10-21T07:13:30.816Z" },
- { url = "https://files.pythonhosted.org/packages/a4/06/b423655446fb27e22bfc1ca5e5b11f3449e0350fe8fefa0ebd68675f7e85/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:88e608c7589412d3159be40d270a90994e38c9eafc125bf8ad5a9c92deffd6dd", size = 1079516, upload-time = "2025-10-21T07:13:32.288Z" },
- { url = "https://files.pythonhosted.org/packages/76/f5/c7132088978b712a5eddf1ca6bf64ae81335fbca9443ed486330519954c3/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:51a007801aef12f6bc93a545040a36df48e9af920a7da9ded915584ad9a002b1", size = 1152379, upload-time = "2025-10-21T07:13:33.739Z" },
- { url = "https://files.pythonhosted.org/packages/bd/c8/37a8d80b7e55e7cfbe649f7a92a7e838defc746aac12dca751aad5dd06a6/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b056db205e45ab9381990a5c15d869f1021c1262d065740c9cd296fc5d3fb248", size = 1080420, upload-time = "2025-10-21T07:13:35.33Z" },
- { url = "https://files.pythonhosted.org/packages/10/52/9a96d06e75862d356dc80a4a465ad88fba544a19823568b4ff484e7a12f2/python_calamine-0.5.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:dd8f4123b2403fc22c92ec4f5e51c495427cf3739c5cb614b9829745a80922db", size = 722350, upload-time = "2025-10-21T07:13:37.074Z" },
]
[[package]]
@@ -5686,9 +5071,6 @@ name = "pywin32"
version = "311"
source = { registry = "https://pypi.org/simple" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" },
- { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" },
- { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" },
{ url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" },
{ url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" },
{ url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" },
@@ -5709,15 +5091,6 @@ version = "6.0.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" },
- { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" },
- { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" },
- { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" },
- { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" },
- { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" },
- { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" },
- { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" },
- { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" },
{ url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
{ url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
{ url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
@@ -5754,17 +5127,6 @@ version = "3.14.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d3/28/9d808fe62375b9aab5ba92fa9b29371297b067c2790b2d7cda648b1e2f8d/rapidfuzz-3.14.3.tar.gz", hash = "sha256:2491937177868bc4b1e469087601d53f925e8d270ccc21e07404b4b5814b7b5f", size = 57863900, upload-time = "2025-11-01T11:54:52.321Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/76/25/5b0a33ad3332ee1213068c66f7c14e9e221be90bab434f0cb4defa9d6660/rapidfuzz-3.14.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dea2d113e260a5da0c4003e0a5e9fdf24a9dc2bb9eaa43abd030a1e46ce7837d", size = 1953885, upload-time = "2025-11-01T11:52:47.75Z" },
- { url = "https://files.pythonhosted.org/packages/2d/ab/f1181f500c32c8fcf7c966f5920c7e56b9b1d03193386d19c956505c312d/rapidfuzz-3.14.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e6c31a4aa68cfa75d7eede8b0ed24b9e458447db604c2db53f358be9843d81d3", size = 1390200, upload-time = "2025-11-01T11:52:49.491Z" },
- { url = "https://files.pythonhosted.org/packages/14/2a/0f2de974ececad873865c6bb3ea3ad07c976ac293d5025b2d73325aac1d4/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02821366d928e68ddcb567fed8723dad7ea3a979fada6283e6914d5858674850", size = 1389319, upload-time = "2025-11-01T11:52:51.224Z" },
- { url = "https://files.pythonhosted.org/packages/ed/69/309d8f3a0bb3031fd9b667174cc4af56000645298af7c2931be5c3d14bb4/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfe8df315ab4e6db4e1be72c5170f8e66021acde22cd2f9d04d2058a9fd8162e", size = 3178495, upload-time = "2025-11-01T11:52:53.005Z" },
- { url = "https://files.pythonhosted.org/packages/10/b7/f9c44a99269ea5bf6fd6a40b84e858414b6e241288b9f2b74af470d222b1/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:769f31c60cd79420188fcdb3c823227fc4a6deb35cafec9d14045c7f6743acae", size = 1228443, upload-time = "2025-11-01T11:52:54.991Z" },
- { url = "https://files.pythonhosted.org/packages/f2/0a/3b3137abac7f19c9220e14cd7ce993e35071a7655e7ef697785a3edfea1a/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54fa03062124e73086dae66a3451c553c1e20a39c077fd704dc7154092c34c63", size = 2411998, upload-time = "2025-11-01T11:52:56.629Z" },
- { url = "https://files.pythonhosted.org/packages/f3/b6/983805a844d44670eaae63831024cdc97ada4e9c62abc6b20703e81e7f9b/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:834d1e818005ed0d4ae38f6b87b86fad9b0a74085467ece0727d20e15077c094", size = 2530120, upload-time = "2025-11-01T11:52:58.298Z" },
- { url = "https://files.pythonhosted.org/packages/b4/cc/2c97beb2b1be2d7595d805682472f1b1b844111027d5ad89b65e16bdbaaa/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:948b00e8476a91f510dd1ec07272efc7d78c275d83b630455559671d4e33b678", size = 4283129, upload-time = "2025-11-01T11:53:00.188Z" },
- { url = "https://files.pythonhosted.org/packages/4d/03/2f0e5e94941045aefe7eafab72320e61285c07b752df9884ce88d6b8b835/rapidfuzz-3.14.3-cp311-cp311-win32.whl", hash = "sha256:43d0305c36f504232f18ea04e55f2059bb89f169d3119c4ea96a0e15b59e2a91", size = 1724224, upload-time = "2025-11-01T11:53:02.149Z" },
- { url = "https://files.pythonhosted.org/packages/cf/99/5fa23e204435803875daefda73fd61baeabc3c36b8fc0e34c1705aab8c7b/rapidfuzz-3.14.3-cp311-cp311-win_amd64.whl", hash = "sha256:ef6bf930b947bd0735c550683939a032090f1d688dfd8861d6b45307b96fd5c5", size = 1544259, upload-time = "2025-11-01T11:53:03.66Z" },
- { url = "https://files.pythonhosted.org/packages/48/35/d657b85fcc615a42661b98ac90ce8e95bd32af474603a105643963749886/rapidfuzz-3.14.3-cp311-cp311-win_arm64.whl", hash = "sha256:f3eb0ff3b75d6fdccd40b55e7414bb859a1cda77c52762c9c82b85569f5088e7", size = 814734, upload-time = "2025-11-01T11:53:05.008Z" },
{ url = "https://files.pythonhosted.org/packages/fa/8e/3c215e860b458cfbedb3ed73bc72e98eb7e0ed72f6b48099604a7a3260c2/rapidfuzz-3.14.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:685c93ea961d135893b5984a5a9851637d23767feabe414ec974f43babbd8226", size = 1945306, upload-time = "2025-11-01T11:53:06.452Z" },
{ url = "https://files.pythonhosted.org/packages/36/d9/31b33512015c899f4a6e6af64df8dfe8acddf4c8b40a4b3e0e6e1bcd00e5/rapidfuzz-3.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fa7c8f26f009f8c673fbfb443792f0cf8cf50c4e18121ff1e285b5e08a94fbdb", size = 1390788, upload-time = "2025-11-01T11:53:08.721Z" },
{ url = "https://files.pythonhosted.org/packages/a9/67/2ee6f8de6e2081ccd560a571d9c9063184fe467f484a17fa90311a7f4a2e/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57f878330c8d361b2ce76cebb8e3e1dc827293b6abf404e67d53260d27b5d941", size = 1374580, upload-time = "2025-11-01T11:53:10.164Z" },
@@ -5776,11 +5138,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/40/eb/9e3af4103d91788f81111af1b54a28de347cdbed8eaa6c91d5e98a889aab/rapidfuzz-3.14.3-cp312-cp312-win32.whl", hash = "sha256:dea97ac3ca18cd3ba8f3d04b5c1fe4aa60e58e8d9b7793d3bd595fdb04128d7a", size = 1709527, upload-time = "2025-11-01T11:53:20.949Z" },
{ url = "https://files.pythonhosted.org/packages/b8/63/d06ecce90e2cf1747e29aeab9f823d21e5877a4c51b79720b2d3be7848f8/rapidfuzz-3.14.3-cp312-cp312-win_amd64.whl", hash = "sha256:b5100fd6bcee4d27f28f4e0a1c6b5127bc8ba7c2a9959cad9eab0bf4a7ab3329", size = 1538989, upload-time = "2025-11-01T11:53:22.428Z" },
{ url = "https://files.pythonhosted.org/packages/fc/6d/beee32dcda64af8128aab3ace2ccb33d797ed58c434c6419eea015fec779/rapidfuzz-3.14.3-cp312-cp312-win_arm64.whl", hash = "sha256:4e49c9e992bc5fc873bd0fff7ef16a4405130ec42f2ce3d2b735ba5d3d4eb70f", size = 811161, upload-time = "2025-11-01T11:53:23.811Z" },
- { url = "https://files.pythonhosted.org/packages/c9/33/b5bd6475c7c27164b5becc9b0e3eb978f1e3640fea590dd3dced6006ee83/rapidfuzz-3.14.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7cf174b52cb3ef5d49e45d0a1133b7e7d0ecf770ed01f97ae9962c5c91d97d23", size = 1888499, upload-time = "2025-11-01T11:54:42.094Z" },
- { url = "https://files.pythonhosted.org/packages/30/d2/89d65d4db4bb931beade9121bc71ad916b5fa9396e807d11b33731494e8e/rapidfuzz-3.14.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:442cba39957a008dfc5bdef21a9c3f4379e30ffb4e41b8555dbaf4887eca9300", size = 1336747, upload-time = "2025-11-01T11:54:43.957Z" },
- { url = "https://files.pythonhosted.org/packages/85/33/cd87d92b23f0b06e8914a61cea6850c6d495ca027f669fab7a379041827a/rapidfuzz-3.14.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1faa0f8f76ba75fd7b142c984947c280ef6558b5067af2ae9b8729b0a0f99ede", size = 1352187, upload-time = "2025-11-01T11:54:45.518Z" },
- { url = "https://files.pythonhosted.org/packages/22/20/9d30b4a1ab26aac22fff17d21dec7e9089ccddfe25151d0a8bb57001dc3d/rapidfuzz-3.14.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e6eefec45625c634926a9fd46c9e4f31118ac8f3156fff9494422cee45207e6", size = 3101472, upload-time = "2025-11-01T11:54:47.255Z" },
- { url = "https://files.pythonhosted.org/packages/b1/ad/fa2d3e5c29a04ead7eaa731c7cd1f30f9ec3c77b3a578fdf90280797cbcb/rapidfuzz-3.14.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56fefb4382bb12250f164250240b9dd7772e41c5c8ae976fd598a32292449cc5", size = 1511361, upload-time = "2025-11-01T11:54:49.057Z" },
]
[[package]]
@@ -5816,9 +5173,6 @@ wheels = [
name = "redis"
version = "7.4.0"
source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "async-timeout", marker = "python_full_version < '3.11.3'" },
-]
sdist = { url = "https://files.pythonhosted.org/packages/7b/7f/3759b1d0d72b7c92f0d70ffd9dc962b7b7b5ee74e135f9d7d8ab06b8a318/redis-7.4.0.tar.gz", hash = "sha256:64a6ea7bf567ad43c964d2c30d82853f8df927c5c9017766c55a1d1ed95d18ad", size = 4943913, upload-time = "2026-03-24T09:14:37.53Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/74/3a/95deec7db1eb53979973ebd156f3369a72732208d1391cd2e5d127062a32/redis-7.4.0-py3-none-any.whl", hash = "sha256:a9c74a5c893a5ef8455a5adb793a31bb70feb821c86eccb62eebef5a19c429ec", size = 409772, upload-time = "2026-03-24T09:14:35.968Z" },
@@ -5849,20 +5203,6 @@ version = "2025.11.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f7/90/4fb5056e5f03a7048abd2b11f598d464f0c167de4f2a51aa868c376b8c70/regex-2025.11.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eadade04221641516fa25139273505a1c19f9bf97589a05bc4cfcd8b4a618031", size = 488081, upload-time = "2025-11-03T21:31:11.946Z" },
- { url = "https://files.pythonhosted.org/packages/85/23/63e481293fac8b069d84fba0299b6666df720d875110efd0338406b5d360/regex-2025.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:feff9e54ec0dd3833d659257f5c3f5322a12eee58ffa360984b716f8b92983f4", size = 290554, upload-time = "2025-11-03T21:31:13.387Z" },
- { url = "https://files.pythonhosted.org/packages/2b/9d/b101d0262ea293a0066b4522dfb722eb6a8785a8c3e084396a5f2c431a46/regex-2025.11.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b30bc921d50365775c09a7ed446359e5c0179e9e2512beec4a60cbcef6ddd50", size = 288407, upload-time = "2025-11-03T21:31:14.809Z" },
- { url = "https://files.pythonhosted.org/packages/0c/64/79241c8209d5b7e00577ec9dca35cd493cc6be35b7d147eda367d6179f6d/regex-2025.11.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f99be08cfead2020c7ca6e396c13543baea32343b7a9a5780c462e323bd8872f", size = 793418, upload-time = "2025-11-03T21:31:16.556Z" },
- { url = "https://files.pythonhosted.org/packages/3d/e2/23cd5d3573901ce8f9757c92ca4db4d09600b865919b6d3e7f69f03b1afd/regex-2025.11.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6dd329a1b61c0ee95ba95385fb0c07ea0d3fe1a21e1349fa2bec272636217118", size = 860448, upload-time = "2025-11-03T21:31:18.12Z" },
- { url = "https://files.pythonhosted.org/packages/2a/4c/aecf31beeaa416d0ae4ecb852148d38db35391aac19c687b5d56aedf3a8b/regex-2025.11.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c5238d32f3c5269d9e87be0cf096437b7622b6920f5eac4fd202468aaeb34d2", size = 907139, upload-time = "2025-11-03T21:31:20.753Z" },
- { url = "https://files.pythonhosted.org/packages/61/22/b8cb00df7d2b5e0875f60628594d44dba283e951b1ae17c12f99e332cc0a/regex-2025.11.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10483eefbfb0adb18ee9474498c9a32fcf4e594fbca0543bb94c48bac6183e2e", size = 800439, upload-time = "2025-11-03T21:31:22.069Z" },
- { url = "https://files.pythonhosted.org/packages/02/a8/c4b20330a5cdc7a8eb265f9ce593f389a6a88a0c5f280cf4d978f33966bc/regex-2025.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:78c2d02bb6e1da0720eedc0bad578049cad3f71050ef8cd065ecc87691bed2b0", size = 782965, upload-time = "2025-11-03T21:31:23.598Z" },
- { url = "https://files.pythonhosted.org/packages/b4/4c/ae3e52988ae74af4b04d2af32fee4e8077f26e51b62ec2d12d246876bea2/regex-2025.11.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e6b49cd2aad93a1790ce9cffb18964f6d3a4b0b3dbdbd5de094b65296fce6e58", size = 854398, upload-time = "2025-11-03T21:31:25.008Z" },
- { url = "https://files.pythonhosted.org/packages/06/d1/a8b9cf45874eda14b2e275157ce3b304c87e10fb38d9fc26a6e14eb18227/regex-2025.11.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:885b26aa3ee56433b630502dc3d36ba78d186a00cc535d3806e6bfd9ed3c70ab", size = 845897, upload-time = "2025-11-03T21:31:26.427Z" },
- { url = "https://files.pythonhosted.org/packages/ea/fe/1830eb0236be93d9b145e0bd8ab499f31602fe0999b1f19e99955aa8fe20/regex-2025.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd76a9f58e6a00f8772e72cff8ebcff78e022be95edf018766707c730593e1e", size = 788906, upload-time = "2025-11-03T21:31:28.078Z" },
- { url = "https://files.pythonhosted.org/packages/66/47/dc2577c1f95f188c1e13e2e69d8825a5ac582ac709942f8a03af42ed6e93/regex-2025.11.3-cp311-cp311-win32.whl", hash = "sha256:3e816cc9aac1cd3cc9a4ec4d860f06d40f994b5c7b4d03b93345f44e08cc68bf", size = 265812, upload-time = "2025-11-03T21:31:29.72Z" },
- { url = "https://files.pythonhosted.org/packages/50/1e/15f08b2f82a9bbb510621ec9042547b54d11e83cb620643ebb54e4eb7d71/regex-2025.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:087511f5c8b7dfbe3a03f5d5ad0c2a33861b1fc387f21f6f60825a44865a385a", size = 277737, upload-time = "2025-11-03T21:31:31.422Z" },
- { url = "https://files.pythonhosted.org/packages/f4/fc/6500eb39f5f76c5e47a398df82e6b535a5e345f839581012a418b16f9cc3/regex-2025.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:1ff0d190c7f68ae7769cd0313fe45820ba07ffebfddfaa89cc1eb70827ba0ddc", size = 270290, upload-time = "2025-11-03T21:31:33.041Z" },
{ url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" },
{ url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" },
{ url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" },
@@ -5964,21 +5304,6 @@ version = "0.29.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/98/33/23b3b3419b6a3e0f559c7c0d2ca8fc1b9448382b25245033788785921332/rpds_py-0.29.0.tar.gz", hash = "sha256:fe55fe686908f50154d1dc599232016e50c243b438c3b7432f24e2895b0e5359", size = 69359, upload-time = "2025-11-16T14:50:39.532Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/36/ab/7fb95163a53ab122c74a7c42d2d2f012819af2cf3deb43fb0d5acf45cc1a/rpds_py-0.29.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b9c764a11fd637e0322a488560533112837f5334ffeb48b1be20f6d98a7b437", size = 372344, upload-time = "2025-11-16T14:47:57.279Z" },
- { url = "https://files.pythonhosted.org/packages/b3/45/f3c30084c03b0d0f918cb4c5ae2c20b0a148b51ba2b3f6456765b629bedd/rpds_py-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fd2164d73812026ce970d44c3ebd51e019d2a26a4425a5dcbdfa93a34abc383", size = 363041, upload-time = "2025-11-16T14:47:58.908Z" },
- { url = "https://files.pythonhosted.org/packages/e3/e9/4d044a1662608c47a87cbb37b999d4d5af54c6d6ebdda93a4d8bbf8b2a10/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a097b7f7f7274164566ae90a221fd725363c0e9d243e2e9ed43d195ccc5495c", size = 391775, upload-time = "2025-11-16T14:48:00.197Z" },
- { url = "https://files.pythonhosted.org/packages/50/c9/7616d3ace4e6731aeb6e3cd85123e03aec58e439044e214b9c5c60fd8eb1/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cdc0490374e31cedefefaa1520d5fe38e82fde8748cbc926e7284574c714d6b", size = 405624, upload-time = "2025-11-16T14:48:01.496Z" },
- { url = "https://files.pythonhosted.org/packages/c2/e2/6d7d6941ca0843609fd2d72c966a438d6f22617baf22d46c3d2156c31350/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89ca2e673ddd5bde9b386da9a0aac0cab0e76f40c8f0aaf0d6311b6bbf2aa311", size = 527894, upload-time = "2025-11-16T14:48:03.167Z" },
- { url = "https://files.pythonhosted.org/packages/8d/f7/aee14dc2db61bb2ae1e3068f134ca9da5f28c586120889a70ff504bb026f/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5d9da3ff5af1ca1249b1adb8ef0573b94c76e6ae880ba1852f033bf429d4588", size = 412720, upload-time = "2025-11-16T14:48:04.413Z" },
- { url = "https://files.pythonhosted.org/packages/2f/e2/2293f236e887c0360c2723d90c00d48dee296406994d6271faf1712e94ec/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8238d1d310283e87376c12f658b61e1ee23a14c0e54c7c0ce953efdbdc72deed", size = 392945, upload-time = "2025-11-16T14:48:06.252Z" },
- { url = "https://files.pythonhosted.org/packages/14/cd/ceea6147acd3bd1fd028d1975228f08ff19d62098078d5ec3eed49703797/rpds_py-0.29.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:2d6fb2ad1c36f91c4646989811e84b1ea5e0c3cf9690b826b6e32b7965853a63", size = 406385, upload-time = "2025-11-16T14:48:07.575Z" },
- { url = "https://files.pythonhosted.org/packages/52/36/fe4dead19e45eb77a0524acfdbf51e6cda597b26fc5b6dddbff55fbbb1a5/rpds_py-0.29.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:534dc9df211387547267ccdb42253aa30527482acb38dd9b21c5c115d66a96d2", size = 423943, upload-time = "2025-11-16T14:48:10.175Z" },
- { url = "https://files.pythonhosted.org/packages/a1/7b/4551510803b582fa4abbc8645441a2d15aa0c962c3b21ebb380b7e74f6a1/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d456e64724a075441e4ed648d7f154dc62e9aabff29bcdf723d0c00e9e1d352f", size = 574204, upload-time = "2025-11-16T14:48:11.499Z" },
- { url = "https://files.pythonhosted.org/packages/64/ba/071ccdd7b171e727a6ae079f02c26f75790b41555f12ca8f1151336d2124/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a738f2da2f565989401bd6fd0b15990a4d1523c6d7fe83f300b7e7d17212feca", size = 600587, upload-time = "2025-11-16T14:48:12.822Z" },
- { url = "https://files.pythonhosted.org/packages/03/09/96983d48c8cf5a1e03c7d9cc1f4b48266adfb858ae48c7c2ce978dbba349/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a110e14508fd26fd2e472bb541f37c209409876ba601cf57e739e87d8a53cf95", size = 562287, upload-time = "2025-11-16T14:48:14.108Z" },
- { url = "https://files.pythonhosted.org/packages/40/f0/8c01aaedc0fa92156f0391f39ea93b5952bc0ec56b897763858f95da8168/rpds_py-0.29.0-cp311-cp311-win32.whl", hash = "sha256:923248a56dd8d158389a28934f6f69ebf89f218ef96a6b216a9be6861804d3f4", size = 221394, upload-time = "2025-11-16T14:48:15.374Z" },
- { url = "https://files.pythonhosted.org/packages/7e/a5/a8b21c54c7d234efdc83dc034a4d7cd9668e3613b6316876a29b49dece71/rpds_py-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:539eb77eb043afcc45314d1be09ea6d6cafb3addc73e0547c171c6d636957f60", size = 235713, upload-time = "2025-11-16T14:48:16.636Z" },
- { url = "https://files.pythonhosted.org/packages/a7/1f/df3c56219523947b1be402fa12e6323fe6d61d883cf35d6cb5d5bb6db9d9/rpds_py-0.29.0-cp311-cp311-win_arm64.whl", hash = "sha256:bdb67151ea81fcf02d8f494703fb728d4d34d24556cbff5f417d74f6f5792e7c", size = 229157, upload-time = "2025-11-16T14:48:17.891Z" },
{ url = "https://files.pythonhosted.org/packages/3c/50/bc0e6e736d94e420df79be4deb5c9476b63165c87bb8f19ef75d100d21b3/rpds_py-0.29.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a0891cfd8db43e085c0ab93ab7e9b0c8fee84780d436d3b266b113e51e79f954", size = 376000, upload-time = "2025-11-16T14:48:19.141Z" },
{ url = "https://files.pythonhosted.org/packages/3e/3a/46676277160f014ae95f24de53bed0e3b7ea66c235e7de0b9df7bd5d68ba/rpds_py-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3897924d3f9a0361472d884051f9a2460358f9a45b1d85a39a158d2f8f1ad71c", size = 360575, upload-time = "2025-11-16T14:48:20.443Z" },
{ url = "https://files.pythonhosted.org/packages/75/ba/411d414ed99ea1afdd185bbabeeaac00624bd1e4b22840b5e9967ade6337/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21deb8e0d1571508c6491ce5ea5e25669b1dd4adf1c9d64b6314842f708b5d", size = 392159, upload-time = "2025-11-16T14:48:22.12Z" },
@@ -5994,18 +5319,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/13/0494cecce4848f68501e0a229432620b4b57022388b071eeff95f3e1e75b/rpds_py-0.29.0-cp312-cp312-win32.whl", hash = "sha256:ba5e1aeaf8dd6d8f6caba1f5539cddda87d511331714b7b5fc908b6cfc3636b7", size = 223853, upload-time = "2025-11-16T14:48:36.419Z" },
{ url = "https://files.pythonhosted.org/packages/1f/6a/51e9aeb444a00cdc520b032a28b07e5f8dc7bc328b57760c53e7f96997b4/rpds_py-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f6134faf54b3cb83375db0f113506f8b7770785be1f95a631e7e2892101977", size = 239895, upload-time = "2025-11-16T14:48:37.956Z" },
{ url = "https://files.pythonhosted.org/packages/d1/d4/8bce56cdad1ab873e3f27cb31c6a51d8f384d66b022b820525b879f8bed1/rpds_py-0.29.0-cp312-cp312-win_arm64.whl", hash = "sha256:b016eddf00dca7944721bf0cd85b6af7f6c4efaf83ee0b37c4133bd39757a8c7", size = 230321, upload-time = "2025-11-16T14:48:39.71Z" },
- { url = "https://files.pythonhosted.org/packages/f2/ac/b97e80bf107159e5b9ba9c91df1ab95f69e5e41b435f27bdd737f0d583ac/rpds_py-0.29.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:acd82a9e39082dc5f4492d15a6b6c8599aa21db5c35aaf7d6889aea16502c07d", size = 373963, upload-time = "2025-11-16T14:50:16.205Z" },
- { url = "https://files.pythonhosted.org/packages/40/5a/55e72962d5d29bd912f40c594e68880d3c7a52774b0f75542775f9250712/rpds_py-0.29.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:715b67eac317bf1c7657508170a3e011a1ea6ccb1c9d5f296e20ba14196be6b3", size = 364644, upload-time = "2025-11-16T14:50:18.22Z" },
- { url = "https://files.pythonhosted.org/packages/99/2a/6b6524d0191b7fc1351c3c0840baac42250515afb48ae40c7ed15499a6a2/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b1b87a237cb2dba4db18bcfaaa44ba4cd5936b91121b62292ff21df577fc43", size = 393847, upload-time = "2025-11-16T14:50:20.012Z" },
- { url = "https://files.pythonhosted.org/packages/1c/b8/c5692a7df577b3c0c7faed7ac01ee3c608b81750fc5d89f84529229b6873/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c3c3e8101bb06e337c88eb0c0ede3187131f19d97d43ea0e1c5407ea74c0cbf", size = 407281, upload-time = "2025-11-16T14:50:21.64Z" },
- { url = "https://files.pythonhosted.org/packages/f0/57/0546c6f84031b7ea08b76646a8e33e45607cc6bd879ff1917dc077bb881e/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8e54d6e61f3ecd3abe032065ce83ea63417a24f437e4a3d73d2f85ce7b7cfe", size = 529213, upload-time = "2025-11-16T14:50:23.219Z" },
- { url = "https://files.pythonhosted.org/packages/fa/c1/01dd5f444233605555bc11fe5fed6a5c18f379f02013870c176c8e630a23/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fbd4e9aebf110473a420dea85a238b254cf8a15acb04b22a5a6b5ce8925b760", size = 413808, upload-time = "2025-11-16T14:50:25.262Z" },
- { url = "https://files.pythonhosted.org/packages/aa/0a/60f98b06156ea2a7af849fb148e00fbcfdb540909a5174a5ed10c93745c7/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fdf53d36e6c72819993e35d1ebeeb8e8fc688d0c6c2b391b55e335b3afba5a", size = 394600, upload-time = "2025-11-16T14:50:26.956Z" },
- { url = "https://files.pythonhosted.org/packages/37/f1/dc9312fc9bec040ece08396429f2bd9e0977924ba7a11c5ad7056428465e/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:ea7173df5d86f625f8dde6d5929629ad811ed8decda3b60ae603903839ac9ac0", size = 408634, upload-time = "2025-11-16T14:50:28.989Z" },
- { url = "https://files.pythonhosted.org/packages/ed/41/65024c9fd40c89bb7d604cf73beda4cbdbcebe92d8765345dd65855b6449/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:76054d540061eda273274f3d13a21a4abdde90e13eaefdc205db37c05230efce", size = 426064, upload-time = "2025-11-16T14:50:30.674Z" },
- { url = "https://files.pythonhosted.org/packages/a2/e0/cf95478881fc88ca2fdbf56381d7df36567cccc39a05394beac72182cd62/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9f84c549746a5be3bc7415830747a3a0312573afc9f95785eb35228bb17742ec", size = 575871, upload-time = "2025-11-16T14:50:33.428Z" },
- { url = "https://files.pythonhosted.org/packages/ea/c0/df88097e64339a0218b57bd5f9ca49898e4c394db756c67fccc64add850a/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:0ea962671af5cb9a260489e311fa22b2e97103e3f9f0caaea6f81390af96a9ed", size = 601702, upload-time = "2025-11-16T14:50:36.051Z" },
- { url = "https://files.pythonhosted.org/packages/87/f4/09ffb3ebd0cbb9e2c7c9b84d252557ecf434cd71584ee1e32f66013824df/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f7728653900035fb7b8d06e1e5900545d8088efc9d5d4545782da7df03ec803f", size = 564054, upload-time = "2025-11-16T14:50:37.733Z" },
]
[[package]]
@@ -6223,14 +5536,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/59/9f/424244b0e2656afc9ff82fb7a96931a47397bfce5ba382213827b198312a/spacy-3.8.11.tar.gz", hash = "sha256:54e1e87b74a2f9ea807ffd606166bf29ac45e2bd81ff7f608eadc7b05787d90d", size = 1326804, upload-time = "2025-11-17T20:40:03.079Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/74/d3/0c795e6f31ee3535b6e70d08e89fc22247b95b61f94fc8334a01d39bf871/spacy-3.8.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a12d83e8bfba07563300ae5e0086548e41aa4bfe3734c97dda87e0eec813df0d", size = 6487958, upload-time = "2025-11-17T20:38:40.378Z" },
- { url = "https://files.pythonhosted.org/packages/4e/2a/83ca9b4d0a2b31adcf0ced49fa667212d12958f75d4e238618a60eb50b10/spacy-3.8.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e07a50b69500ef376326545353a470f00d1ed7203c76341b97242af976e3681a", size = 6148078, upload-time = "2025-11-17T20:38:42.524Z" },
- { url = "https://files.pythonhosted.org/packages/2c/f0/ff520df18a6152ba2dbf808c964014308e71a48feb4c7563f2a6cd6e668d/spacy-3.8.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:718b7bb5e83c76cb841ed6e407f7b40255d0b46af7101a426c20e04af3afd64e", size = 32056451, upload-time = "2025-11-17T20:38:44.92Z" },
- { url = "https://files.pythonhosted.org/packages/9d/3a/6c44c0b9b6a70595888b8d021514ded065548a5b10718ac253bd39f9fd73/spacy-3.8.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f860f9d51c1aeb2d61852442b232576e4ca4d239cb3d1b40ac452118b8eb2c68", size = 32302908, upload-time = "2025-11-17T20:38:47.672Z" },
- { url = "https://files.pythonhosted.org/packages/db/77/00e99e00efd4c2456772befc48400c2e19255140660d663e16b6924a0f2e/spacy-3.8.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ff8d928ce70d751b7bb27f60ee5e3a308216efd4ab4517291e6ff05d9b194840", size = 32280936, upload-time = "2025-11-17T20:38:50.893Z" },
- { url = "https://files.pythonhosted.org/packages/d8/da/692b51e9e5be2766d2d1fb9a7c8122cfd99c337570e621f09c40ce94ad17/spacy-3.8.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3f3cb91d7d42fafd92b8d5bf9f696571170d2f0747f85724a2c5b997753e33c9", size = 33117270, upload-time = "2025-11-17T20:38:53.596Z" },
- { url = "https://files.pythonhosted.org/packages/9b/13/a542ac9b61d071f3328fda1fd8087b523fb7a4f2c340010bc70b1f762485/spacy-3.8.11-cp311-cp311-win_amd64.whl", hash = "sha256:745c190923584935272188c604e0cc170f4179aace1025814a25d92ee90cf3de", size = 15348350, upload-time = "2025-11-17T20:38:56.833Z" },
- { url = "https://files.pythonhosted.org/packages/23/53/975c16514322f6385d6caa5929771613d69f5458fb24f03e189ba533f279/spacy-3.8.11-cp311-cp311-win_arm64.whl", hash = "sha256:27535d81d9dee0483b66660cadd93d14c1668f55e4faf4386aca4a11a41a8b97", size = 14701913, upload-time = "2025-11-17T20:38:59.507Z" },
{ url = "https://files.pythonhosted.org/packages/51/fb/01eadf4ba70606b3054702dc41fc2ccf7d70fb14514b3cd57f0ff78ebea8/spacy-3.8.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aa1ee8362074c30098feaaf2dd888c829a1a79c4311eec1b117a0a61f16fa6dd", size = 6073726, upload-time = "2025-11-17T20:39:01.679Z" },
{ url = "https://files.pythonhosted.org/packages/3a/f8/07b03a2997fc2621aaeafae00af50f55522304a7da6926b07027bb6d0709/spacy-3.8.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:75a036d04c2cf11d6cb566c0a689860cc5a7a75b439e8fea1b3a6b673dabf25d", size = 5724702, upload-time = "2025-11-17T20:39:03.486Z" },
{ url = "https://files.pythonhosted.org/packages/13/0c/c4fa0f379dbe3258c305d2e2df3760604a9fcd71b34f8f65c23e43f4cf55/spacy-3.8.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cb599d2747d4a59a5f90e8a453c149b13db382a8297925cf126333141dbc4f7", size = 32727774, upload-time = "2025-11-17T20:39:05.894Z" },
@@ -6269,13 +5574,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/1f/73/b4a9737255583b5fa858e0bb8e116eb94b88c910164ed2ed719147bde3de/sqlalchemy-2.0.48.tar.gz", hash = "sha256:5ca74f37f3369b45e1f6b7b06afb182af1fd5dde009e4ffd831830d98cbe5fe7", size = 9886075, upload-time = "2026-03-02T15:28:51.474Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/d7/6d/b8b78b5b80f3c3ab3f7fa90faa195ec3401f6d884b60221260fd4d51864c/sqlalchemy-2.0.48-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b4c575df7368b3b13e0cebf01d4679f9a28ed2ae6c1cd0b1d5beffb6b2007dc", size = 2157184, upload-time = "2026-03-02T15:38:28.161Z" },
- { url = "https://files.pythonhosted.org/packages/21/4b/4f3d4a43743ab58b95b9ddf5580a265b593d017693df9e08bd55780af5bb/sqlalchemy-2.0.48-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e83e3f959aaa1c9df95c22c528096d94848a1bc819f5d0ebf7ee3df0ca63db6c", size = 3313555, upload-time = "2026-03-02T15:58:57.21Z" },
- { url = "https://files.pythonhosted.org/packages/21/dd/3b7c53f1dbbf736fd27041aee68f8ac52226b610f914085b1652c2323442/sqlalchemy-2.0.48-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f7b7243850edd0b8b97043f04748f31de50cf426e939def5c16bedb540698f7", size = 3313057, upload-time = "2026-03-02T15:52:29.366Z" },
- { url = "https://files.pythonhosted.org/packages/d9/cc/3e600a90ae64047f33313d7d32e5ad025417f09d2ded487e8284b5e21a15/sqlalchemy-2.0.48-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:82745b03b4043e04600a6b665cb98697c4339b24e34d74b0a2ac0a2488b6f94d", size = 3265431, upload-time = "2026-03-02T15:58:59.096Z" },
- { url = "https://files.pythonhosted.org/packages/8b/19/780138dacfe3f5024f4cf96e4005e91edf6653d53d3673be4844578faf1d/sqlalchemy-2.0.48-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5e088bf43f6ee6fec7dbf1ef7ff7774a616c236b5c0cb3e00662dd71a56b571", size = 3287646, upload-time = "2026-03-02T15:52:31.569Z" },
- { url = "https://files.pythonhosted.org/packages/40/fd/f32ced124f01a23151f4777e4c705f3a470adc7bd241d9f36a7c941a33bf/sqlalchemy-2.0.48-cp311-cp311-win32.whl", hash = "sha256:9c7d0a77e36b5f4b01ca398482230ab792061d243d715299b44a0b55c89fe617", size = 2116956, upload-time = "2026-03-02T15:46:54.535Z" },
- { url = "https://files.pythonhosted.org/packages/58/d5/dd767277f6feef12d05651538f280277e661698f617fa4d086cce6055416/sqlalchemy-2.0.48-cp311-cp311-win_amd64.whl", hash = "sha256:583849c743e0e3c9bb7446f5b5addeacedc168d657a69b418063dfdb2d90081c", size = 2141627, upload-time = "2026-03-02T15:46:55.849Z" },
{ url = "https://files.pythonhosted.org/packages/ef/91/a42ae716f8925e9659df2da21ba941f158686856107a61cc97a95e7647a3/sqlalchemy-2.0.48-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:348174f228b99f33ca1f773e85510e08927620caa59ffe7803b37170df30332b", size = 2155737, upload-time = "2026-03-02T15:49:13.207Z" },
{ url = "https://files.pythonhosted.org/packages/b9/52/f75f516a1f3888f027c1cfb5d22d4376f4b46236f2e8669dcb0cddc60275/sqlalchemy-2.0.48-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53667b5f668991e279d21f94ccfa6e45b4e3f4500e7591ae59a8012d0f010dcb", size = 3337020, upload-time = "2026-03-02T15:50:34.547Z" },
{ url = "https://files.pythonhosted.org/packages/37/9a/0c28b6371e0cdcb14f8f1930778cb3123acfcbd2c95bb9cf6b4a2ba0cce3/sqlalchemy-2.0.48-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34634e196f620c7a61d18d5cf7dc841ca6daa7961aed75d532b7e58b309ac894", size = 3349983, upload-time = "2026-03-02T15:53:25.542Z" },
@@ -6313,13 +5611,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/cf/77/5633c4ba65e3421b72b5b4bd93aa328360b351b3a1e5bf3c90eb224668e5/srsly-2.5.2.tar.gz", hash = "sha256:4092bc843c71b7595c6c90a0302a197858c5b9fe43067f62ae6a45bc3baa1c19", size = 492055, upload-time = "2025-11-17T14:11:02.543Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/59/6e/2e3d07b38c1c2e98487f0af92f93b392c6741062d85c65cdc18c7b77448a/srsly-2.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7e07babdcece2405b32c9eea25ef415749f214c889545e38965622bb66837ce", size = 655286, upload-time = "2025-11-17T14:09:52.468Z" },
- { url = "https://files.pythonhosted.org/packages/a1/e7/587bcade6b72f919133e587edf60e06039d88049aef9015cd0bdea8df189/srsly-2.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1718fe40b73e5cc73b14625233f57e15fb23643d146f53193e8fe653a49e9a0f", size = 653094, upload-time = "2025-11-17T14:09:53.837Z" },
- { url = "https://files.pythonhosted.org/packages/8d/24/5c3aabe292cb4eb906c828f2866624e3a65603ef0a73e964e486ff146b84/srsly-2.5.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d7b07e6103db7dd3199c0321935b0c8b9297fd6e018a66de97dc836068440111", size = 1141286, upload-time = "2025-11-17T14:09:55.535Z" },
- { url = "https://files.pythonhosted.org/packages/2a/fe/2cbdcef2495e0c40dafb96da205d9ab3b9e59f64938277800bf65f923281/srsly-2.5.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f2dedf03b2ae143dd70039f097d128fb901deba2482c3a749ac0a985ac735aad", size = 1144667, upload-time = "2025-11-17T14:09:57.24Z" },
- { url = "https://files.pythonhosted.org/packages/91/7c/9a2c9d8141daf7b7a6f092c2be403421a0ab280e7c03cc62c223f37fdf47/srsly-2.5.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d5be1d8b79a4c4180073461425cb49c8924a184ab49d976c9c81a7bf87731d9", size = 1103935, upload-time = "2025-11-17T14:09:58.576Z" },
- { url = "https://files.pythonhosted.org/packages/f1/ad/8ae727430368fedbb1a7fa41b62d7a86237558bc962c5c5a9aa8bfa82548/srsly-2.5.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c8e42d6bcddda2e6fc1a8438cc050c4a36d0e457a63bcc7117d23c5175dfedec", size = 1117985, upload-time = "2025-11-17T14:10:00.348Z" },
- { url = "https://files.pythonhosted.org/packages/60/69/d6afaef1a8d5192fd802752115c7c3cc104493a7d604b406112b8bc2b610/srsly-2.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:e7362981e687eead00248525c3ef3b8ddd95904c93362c481988d91b26b6aeef", size = 654148, upload-time = "2025-11-17T14:10:01.772Z" },
{ url = "https://files.pythonhosted.org/packages/8f/1c/21f658d98d602a559491b7886c7ca30245c2cd8987ff1b7709437c0f74b1/srsly-2.5.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f92b4f883e6be4ca77f15980b45d394d310f24903e25e1b2c46df783c7edcce", size = 656161, upload-time = "2025-11-17T14:10:03.181Z" },
{ url = "https://files.pythonhosted.org/packages/2f/a2/bc6fd484ed703857043ae9abd6c9aea9152f9480a6961186ee6c1e0c49e8/srsly-2.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ac4790a54b00203f1af5495b6b8ac214131139427f30fcf05cf971dde81930eb", size = 653237, upload-time = "2025-11-17T14:10:04.636Z" },
{ url = "https://files.pythonhosted.org/packages/ab/ea/e3895da29a15c8d325e050ad68a0d1238eece1d2648305796adf98dcba66/srsly-2.5.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ce5c6b016050857a7dd365c9dcdd00d96e7ac26317cfcb175db387e403de05bf", size = 1174418, upload-time = "2025-11-17T14:10:05.945Z" },
@@ -6535,14 +5826,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/2f/3a/2d0f0be132b9faaa6d56f04565ae122684273e4bf4eab8dee5f48dc00f68/thinc-8.3.10.tar.gz", hash = "sha256:5a75109f4ee1c968fc055ce651a17cb44b23b000d9e95f04a4d047ab3cb3e34e", size = 194196, upload-time = "2025-11-17T17:21:46.435Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/38/43/01b662540888140b5e9f76c957c7118c203cb91f17867ce78fc4f2d3800f/thinc-8.3.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72793e0bd3f0f391ca36ab0996b3c21db7045409bd3740840e7d6fcd9a044d81", size = 818632, upload-time = "2025-11-17T17:20:49.123Z" },
- { url = "https://files.pythonhosted.org/packages/f0/ba/e0edcc84014bdde1bc9a082408279616a061566a82b5e3b90b9e64f33c1b/thinc-8.3.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b13311acb061e04e3a0c4bd677b85ec2971e3a3674558252443b5446e378256", size = 770622, upload-time = "2025-11-17T17:20:50.467Z" },
- { url = "https://files.pythonhosted.org/packages/f3/51/0558f8cb69c13e1114428726a3fb36fe1adc5821a62ccd3fa7b7c1a5bd9a/thinc-8.3.10-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ffddcf311fb7c998eb8988d22c618dc0f33b26303853c0445edb8a69819ac60", size = 4094652, upload-time = "2025-11-17T17:20:52.104Z" },
- { url = "https://files.pythonhosted.org/packages/a0/c9/bb78601f74f9bcadb2d3d4d5b057c4dc3f2e52d9771bad3d93a4e38a9dc1/thinc-8.3.10-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9b1e0511e8421f20abe4f22d8c8073a0d7ce4a31597cc7a404fdbad72bf38058", size = 4124379, upload-time = "2025-11-17T17:20:53.781Z" },
- { url = "https://files.pythonhosted.org/packages/f6/3e/961e1b9794111c89f2ceadfef5692aba5097bec4aaaf89f1b8a04c5bc961/thinc-8.3.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e31e49441dfad8fd64b8ca5f5c9b8c33ee87a553bf79c830a15b4cd02efcc444", size = 5094221, upload-time = "2025-11-17T17:20:55.466Z" },
- { url = "https://files.pythonhosted.org/packages/e5/de/da163a1533faaef5b17dd11dfb9ffd9fd5627dbef56e1160da6edbe1b224/thinc-8.3.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9de5dd73ce7135dcf41d68625d35cd9f5cf8e5f55a3932001a188b45057c3379", size = 5262834, upload-time = "2025-11-17T17:20:57.459Z" },
- { url = "https://files.pythonhosted.org/packages/4c/4e/449d29e33f7ddda6ba1b9e06de3ea5155c2dc33c21f438f8faafebde4e13/thinc-8.3.10-cp311-cp311-win_amd64.whl", hash = "sha256:b6d64e390a1996d489872b9d99a584142542aba59ebdc60f941f473732582f6f", size = 1791864, upload-time = "2025-11-17T17:20:59.817Z" },
- { url = "https://files.pythonhosted.org/packages/4a/b3/68038d88d45d83a501c3f19bd654d275b7ac730c807f52bbb46f35f591bc/thinc-8.3.10-cp311-cp311-win_arm64.whl", hash = "sha256:3991b6ad72e611dfbfb58235de5b67bcc9f61426127cc023607f97e8c5f43e0e", size = 1717563, upload-time = "2025-11-17T17:21:01.634Z" },
{ url = "https://files.pythonhosted.org/packages/d3/34/ba3b386d92edf50784b60ee34318d47c7f49c198268746ef7851c5bbe8cf/thinc-8.3.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51bc6ef735bdbcab75ab2916731b8f61f94c66add6f9db213d900d3c6a244f95", size = 794509, upload-time = "2025-11-17T17:21:03.21Z" },
{ url = "https://files.pythonhosted.org/packages/07/f3/9f52d18115cd9d8d7b2590d226cb2752d2a5ffec61576b19462b48410184/thinc-8.3.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4f48b4d346915f98e9722c0c50ef911cc16c6790a2b7afebc6e1a2c96a6ce6c6", size = 741084, upload-time = "2025-11-17T17:21:04.568Z" },
{ url = "https://files.pythonhosted.org/packages/ad/9c/129c2b740c4e3d3624b6fb3dec1577ef27cb804bc1647f9bc3e1801ea20c/thinc-8.3.10-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5003f4db2db22cc8d686db8db83509acc3c50f4c55ebdcb2bbfcc1095096f7d2", size = 3846337, upload-time = "2025-11-17T17:21:06.079Z" },
@@ -6575,13 +5858,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" },
- { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" },
- { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" },
- { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" },
- { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" },
- { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" },
- { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" },
{ url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" },
{ url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" },
{ url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" },
@@ -6625,31 +5901,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" },
]
-[[package]]
-name = "tomli"
-version = "2.3.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" },
- { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" },
- { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" },
- { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" },
- { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" },
- { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" },
- { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" },
- { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" },
- { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" },
- { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" },
- { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" },
- { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" },
- { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" },
- { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" },
- { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" },
- { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" },
- { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" },
-]
-
[[package]]
name = "tos"
version = "2.9.0"
@@ -7204,17 +6455,6 @@ version = "5.12.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cb/3e/c35530c5ffc25b71c59ae0cd7b8f99df37313daa162ce1e2f7925f7c2877/ujson-5.12.0.tar.gz", hash = "sha256:14b2e1eb528d77bc0f4c5bd1a7ebc05e02b5b41beefb7e8567c9675b8b13bcf4", size = 7158451, upload-time = "2026-03-11T22:19:30.397Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/10/22/fd22e2f6766bae934d3050517ca47d463016bd8688508d1ecc1baa18a7ad/ujson-5.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58a11cb49482f1a095a2bd9a1d81dd7c8fb5d2357f959ece85db4e46a825fd00", size = 56139, upload-time = "2026-03-11T22:18:04.591Z" },
- { url = "https://files.pythonhosted.org/packages/c6/fd/6839adff4fc0164cbcecafa2857ba08a6eaeedd7e098d6713cb899a91383/ujson-5.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9b3cf13facf6f77c283af0e1713e5e8c47a0fe295af81326cb3cb4380212e797", size = 53836, upload-time = "2026-03-11T22:18:05.662Z" },
- { url = "https://files.pythonhosted.org/packages/f9/b0/0c19faac62d68ceeffa83a08dc3d71b8462cf5064d0e7e0b15ba19898dad/ujson-5.12.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb94245a715b4d6e24689de12772b85329a1f9946cbf6187923a64ecdea39e65", size = 57851, upload-time = "2026-03-11T22:18:06.744Z" },
- { url = "https://files.pythonhosted.org/packages/04/f6/e7fd283788de73b86e99e08256726bb385923249c21dcd306e59d532a1a1/ujson-5.12.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:0fe6b8b8968e11dd9b2348bd508f0f57cf49ab3512064b36bc4117328218718e", size = 59906, upload-time = "2026-03-11T22:18:07.791Z" },
- { url = "https://files.pythonhosted.org/packages/d7/3a/b100735a2b43ee6e8fe4c883768e362f53576f964d4ea841991060aeaf35/ujson-5.12.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:89e302abd3749f6d6699691747969a5d85f7c73081d5ed7e2624c7bd9721a2ab", size = 57409, upload-time = "2026-03-11T22:18:08.79Z" },
- { url = "https://files.pythonhosted.org/packages/5c/fa/f97cc20c99ca304662191b883ae13ae02912ca7244710016ba0cb8a5be34/ujson-5.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0727363b05ab05ee737a28f6200dc4078bce6b0508e10bd8aab507995a15df61", size = 1037339, upload-time = "2026-03-11T22:18:10.424Z" },
- { url = "https://files.pythonhosted.org/packages/10/7a/53ddeda0ffe1420db2f9999897b3cbb920fbcff1849d1f22b196d0f34785/ujson-5.12.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b62cb9a7501e1f5c9ffe190485501349c33e8862dde4377df774e40b8166871f", size = 1196625, upload-time = "2026-03-11T22:18:11.82Z" },
- { url = "https://files.pythonhosted.org/packages/0d/1a/4c64a6bef522e9baf195dd5be151bc815cd4896c50c6e2489599edcda85f/ujson-5.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a6ec5bf6bc361f2f0f9644907a36ce527715b488988a8df534120e5c34eeda94", size = 1089669, upload-time = "2026-03-11T22:18:13.343Z" },
- { url = "https://files.pythonhosted.org/packages/18/11/8ccb109f5777ec0d9fb826695a9e2ac36ae94c1949fc8b1e4d23a5bd067a/ujson-5.12.0-cp311-cp311-win32.whl", hash = "sha256:006428d3813b87477d72d306c40c09f898a41b968e57b15a7d88454ecc42a3fb", size = 39648, upload-time = "2026-03-11T22:18:14.785Z" },
- { url = "https://files.pythonhosted.org/packages/6f/e3/87fc4c27b20d5125cff7ce52d17ea7698b22b74426da0df238e3efcb0cf2/ujson-5.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:40aa43a7a3a8d2f05e79900858053d697a88a605e3887be178b43acbcd781161", size = 43876, upload-time = "2026-03-11T22:18:15.768Z" },
- { url = "https://files.pythonhosted.org/packages/9e/21/324f0548a8c8c48e3e222eaed15fb6d48c796593002b206b4a28a89e445f/ujson-5.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:561f89cc82deeae82e37d4a4764184926fb432f740a9691563a391b13f7339a4", size = 38553, upload-time = "2026-03-11T22:18:17.251Z" },
{ url = "https://files.pythonhosted.org/packages/84/f6/ac763d2108d28f3a40bb3ae7d2fafab52ca31b36c2908a4ad02cd3ceba2a/ujson-5.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:09b4beff9cc91d445d5818632907b85fb06943b61cb346919ce202668bf6794a", size = 56326, upload-time = "2026-03-11T22:18:18.467Z" },
{ url = "https://files.pythonhosted.org/packages/25/46/d0b3af64dcdc549f9996521c8be6d860ac843a18a190ffc8affeb7259687/ujson-5.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca0c7ce828bb76ab78b3991904b477c2fd0f711d7815c252d1ef28ff9450b052", size = 53910, upload-time = "2026-03-11T22:18:19.502Z" },
{ url = "https://files.pythonhosted.org/packages/9a/10/853c723bcabc3e9825a079019055fc99e71b85c6bae600607a2b9d31d18d/ujson-5.12.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2d79c6635ccffcbfc1d5c045874ba36b594589be81d50d43472570bb8de9c57", size = 57754, upload-time = "2026-03-11T22:18:20.874Z" },
@@ -7231,12 +6471,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/da/e9ae98133336e7c0d50b43626c3f2327937cecfa354d844e02ac17379ed1/ujson-5.12.0-graalpy312-graalpy250_312_native-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c0aed6a4439994c9666fb8a5b6c4eac94d4ef6ddc95f9b806a599ef83547e3b", size = 54518, upload-time = "2026-03-11T22:19:15.4Z" },
{ url = "https://files.pythonhosted.org/packages/58/10/978d89dded6bb1558cd46ba78f4351198bd2346db8a8ee1a94119022ce40/ujson-5.12.0-graalpy312-graalpy250_312_native-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:efae5df7a8cc8bdb1037b0f786b044ce281081441df5418c3a0f0e1f86fe7bb3", size = 55736, upload-time = "2026-03-11T22:19:16.496Z" },
{ url = "https://files.pythonhosted.org/packages/80/25/1df8e6217c92e57a1266bf5be750b1dddc126ee96e53fe959d5693503bc6/ujson-5.12.0-graalpy312-graalpy250_312_native-win_amd64.whl", hash = "sha256:8712b61eb1b74a4478cfd1c54f576056199e9f093659334aeb5c4a6b385338e5", size = 44615, upload-time = "2026-03-11T22:19:17.53Z" },
- { url = "https://files.pythonhosted.org/packages/19/fa/f4a957dddb99bd68c8be91928c0b6fefa7aa8aafc92c93f5d1e8b32f6702/ujson-5.12.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:871c0e5102e47995b0e37e8df7819a894a6c3da0d097545cd1f9f1f7d7079927", size = 52145, upload-time = "2026-03-11T22:19:18.566Z" },
- { url = "https://files.pythonhosted.org/packages/55/6e/50b5cf612de1ca06c7effdc5a5d7e815774dee85a5858f1882c425553b82/ujson-5.12.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:56ba3f7abbd6b0bb282a544dc38406d1a188d8bb9164f49fdb9c2fee62cb29da", size = 49577, upload-time = "2026-03-11T22:19:19.627Z" },
- { url = "https://files.pythonhosted.org/packages/6e/24/b6713fa9897774502cd4c2d6955bb4933349f7d84c3aa805531c382a4209/ujson-5.12.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c5a52987a990eb1bae55f9000994f1afdb0326c154fb089992f839ab3c30688", size = 50807, upload-time = "2026-03-11T22:19:20.778Z" },
- { url = "https://files.pythonhosted.org/packages/1f/b6/c0e0f7901180ef80d16f3a4bccb5dc8b01515a717336a62928963a07b80b/ujson-5.12.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:adf28d13a33f9d750fe7a78fb481cac298fa257d8863d8727b2ea4455ea41235", size = 56972, upload-time = "2026-03-11T22:19:21.84Z" },
- { url = "https://files.pythonhosted.org/packages/02/a9/05d91b4295ea7239151eb08cf240e5a2ba969012fda50bc27bcb1ea9cd71/ujson-5.12.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51acc750ec7a2df786cdc868fb16fa04abd6269a01d58cf59bafc57978773d8e", size = 52045, upload-time = "2026-03-11T22:19:22.879Z" },
- { url = "https://files.pythonhosted.org/packages/e3/7a/92047d32bf6f2d9db64605fc32e8eb0e0dd68b671eaafc12a464f69c4af4/ujson-5.12.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:ab9056d94e5db513d9313b34394f3a3b83e6301a581c28ad67773434f3faccab", size = 44053, upload-time = "2026-03-11T22:19:23.918Z" },
]
[[package]]
@@ -7358,13 +6592,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/65/36/2d24b2cbe78547c6532da33fb8613debd3126eccc33a6374ab788f5e46e9/uuid_utils-0.14.1-cp39-abi3-win32.whl", hash = "sha256:b54d6aa6252d96bac1fdbc80d26ba71bad9f220b2724d692ad2f2310c22ef523", size = 183476, upload-time = "2026-02-20T22:50:32.745Z" },
{ url = "https://files.pythonhosted.org/packages/83/92/2d7e90df8b1a69ec4cff33243ce02b7a62f926ef9e2f0eca5a026889cd73/uuid_utils-0.14.1-cp39-abi3-win_amd64.whl", hash = "sha256:fc27638c2ce267a0ce3e06828aff786f91367f093c80625ee21dad0208e0f5ba", size = 187147, upload-time = "2026-02-20T22:50:45.807Z" },
{ url = "https://files.pythonhosted.org/packages/d9/26/529f4beee17e5248e37e0bc17a2761d34c0fa3b1e5729c88adb2065bae6e/uuid_utils-0.14.1-cp39-abi3-win_arm64.whl", hash = "sha256:b04cb49b42afbc4ff8dbc60cf054930afc479d6f4dd7f1ec3bbe5dbfdde06b7a", size = 188132, upload-time = "2026-02-20T22:50:41.718Z" },
- { url = "https://files.pythonhosted.org/packages/91/f9/6c64bdbf71f58ccde7919e00491812556f446a5291573af92c49a5e9aaef/uuid_utils-0.14.1-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b197cd5424cf89fb019ca7f53641d05bfe34b1879614bed111c9c313b5574cd8", size = 591617, upload-time = "2026-02-20T22:50:24.532Z" },
- { url = "https://files.pythonhosted.org/packages/d0/f0/758c3b0fb0c4871c7704fef26a5bc861de4f8a68e4831669883bebe07b0f/uuid_utils-0.14.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:12c65020ba6cb6abe1d57fcbfc2d0ea0506c67049ee031714057f5caf0f9bc9c", size = 303702, upload-time = "2026-02-20T22:50:40.687Z" },
- { url = "https://files.pythonhosted.org/packages/85/89/d91862b544c695cd58855efe3201f83894ed82fffe34500774238ab8eba7/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b5d2ad28063d422ccc2c28d46471d47b61a58de885d35113a8f18cb547e25bf", size = 337678, upload-time = "2026-02-20T22:50:39.768Z" },
- { url = "https://files.pythonhosted.org/packages/ee/6b/cf342ba8a898f1de024be0243fac67c025cad530c79ea7f89c4ce718891a/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da2234387b45fde40b0fedfee64a0ba591caeea9c48c7698ab6e2d85c7991533", size = 343711, upload-time = "2026-02-20T22:50:43.965Z" },
- { url = "https://files.pythonhosted.org/packages/b3/20/049418d094d396dfa6606b30af925cc68a6670c3b9103b23e6990f84b589/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50fffc2827348c1e48972eed3d1c698959e63f9d030aa5dd82ba451113158a62", size = 476731, upload-time = "2026-02-20T22:50:30.589Z" },
- { url = "https://files.pythonhosted.org/packages/77/a1/0857f64d53a90321e6a46a3d4cc394f50e1366132dcd2ae147f9326ca98b/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1dbe718765f70f5b7f9b7f66b6a937802941b1cc56bcf642ce0274169741e01", size = 338902, upload-time = "2026-02-20T22:50:33.927Z" },
- { url = "https://files.pythonhosted.org/packages/ed/d0/5bf7cbf1ac138c92b9ac21066d18faf4d7e7f651047b700eb192ca4b9fdb/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:258186964039a8e36db10810c1ece879d229b01331e09e9030bc5dcabe231bd2", size = 364700, upload-time = "2026-02-20T22:50:21.732Z" },
]
[[package]]
@@ -7406,12 +6633,6 @@ version = "0.22.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" },
- { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" },
- { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" },
- { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" },
- { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" },
- { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" },
{ url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" },
{ url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" },
{ url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" },
@@ -7506,19 +6727,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" },
- { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" },
- { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" },
- { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" },
- { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" },
- { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" },
- { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" },
- { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" },
- { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" },
- { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" },
- { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" },
- { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" },
- { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" },
{ url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" },
{ url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" },
{ url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" },
@@ -7532,10 +6740,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" },
{ url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" },
{ url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" },
- { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" },
- { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" },
- { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" },
- { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" },
]
[[package]]
@@ -7632,17 +6836,6 @@ version = "15.0.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" },
- { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" },
- { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" },
- { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" },
- { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" },
- { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" },
- { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" },
- { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" },
- { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" },
- { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" },
- { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" },
{ url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" },
{ url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" },
{ url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" },
@@ -7684,16 +6877,6 @@ version = "1.16.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/95/4c/063a912e20bcef7124e0df97282a8af3ff3e4b603ce84c481d6d7346be0a/wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", size = 53972, upload-time = "2023-11-09T06:33:30.191Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/fd/03/c188ac517f402775b90d6f312955a5e53b866c964b32119f2ed76315697e/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", size = 37313, upload-time = "2023-11-09T06:31:52.168Z" },
- { url = "https://files.pythonhosted.org/packages/0f/16/ea627d7817394db04518f62934a5de59874b587b792300991b3c347ff5e0/wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", size = 38164, upload-time = "2023-11-09T06:31:53.522Z" },
- { url = "https://files.pythonhosted.org/packages/7f/a7/f1212ba098f3de0fd244e2de0f8791ad2539c03bef6c05a9fcb03e45b089/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", size = 80890, upload-time = "2023-11-09T06:31:55.247Z" },
- { url = "https://files.pythonhosted.org/packages/b7/96/bb5e08b3d6db003c9ab219c487714c13a237ee7dcc572a555eaf1ce7dc82/wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", size = 73118, upload-time = "2023-11-09T06:31:57.023Z" },
- { url = "https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", size = 80746, upload-time = "2023-11-09T06:31:58.686Z" },
- { url = "https://files.pythonhosted.org/packages/11/fb/18ec40265ab81c0e82a934de04596b6ce972c27ba2592c8b53d5585e6bcd/wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", size = 85668, upload-time = "2023-11-09T06:31:59.992Z" },
- { url = "https://files.pythonhosted.org/packages/0f/ef/0ecb1fa23145560431b970418dce575cfaec555ab08617d82eb92afc7ccf/wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", size = 78556, upload-time = "2023-11-09T06:32:01.942Z" },
- { url = "https://files.pythonhosted.org/packages/25/62/cd284b2b747f175b5a96cbd8092b32e7369edab0644c45784871528eb852/wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", size = 85712, upload-time = "2023-11-09T06:32:03.686Z" },
- { url = "https://files.pythonhosted.org/packages/e5/a7/47b7ff74fbadf81b696872d5ba504966591a3468f1bc86bca2f407baef68/wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", size = 35327, upload-time = "2023-11-09T06:32:05.284Z" },
- { url = "https://files.pythonhosted.org/packages/cf/c3/0084351951d9579ae83a3d9e38c140371e4c6b038136909235079f2e6e78/wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", size = 37523, upload-time = "2023-11-09T06:32:07.17Z" },
{ url = "https://files.pythonhosted.org/packages/92/17/224132494c1e23521868cdd57cd1e903f3b6a7ba6996b7b8f077ff8ac7fe/wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", size = 37614, upload-time = "2023-11-09T06:32:08.859Z" },
{ url = "https://files.pythonhosted.org/packages/6a/d7/cfcd73e8f4858079ac59d9db1ec5a1349bc486ae8e9ba55698cc1f4a1dff/wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", size = 38316, upload-time = "2023-11-09T06:32:10.719Z" },
{ url = "https://files.pythonhosted.org/packages/7e/79/5ff0a5c54bda5aec75b36453d06be4f83d5cd4932cc84b7cb2b52cee23e2/wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", size = 86322, upload-time = "2023-11-09T06:32:12.592Z" },
@@ -7755,21 +6938,6 @@ version = "3.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" },
- { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" },
- { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" },
- { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" },
- { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" },
- { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" },
- { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" },
- { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" },
- { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" },
- { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" },
- { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" },
- { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" },
- { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" },
- { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" },
- { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" },
{ url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" },
{ url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" },
{ url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" },
@@ -7785,11 +6953,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" },
{ url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" },
{ url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" },
- { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" },
- { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" },
- { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" },
- { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" },
- { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" },
]
[[package]]
@@ -7803,24 +6966,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/a2/aa/60da938b8f0997ba3a911263c40d82b6f645a67902a490b46f3355e10fae/yarl-1.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b35d13d549077713e4414f927cdc388d62e543987c572baee613bf82f11a4b99", size = 123641, upload-time = "2026-03-01T22:04:42.841Z" },
- { url = "https://files.pythonhosted.org/packages/24/84/e237607faf4e099dbb8a4f511cfd5efcb5f75918baad200ff7380635631b/yarl-1.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbb0fef01f0c6b38cb0f39b1f78fc90b807e0e3c86a7ff3ce74ad77ce5c7880c", size = 86248, upload-time = "2026-03-01T22:04:44.757Z" },
- { url = "https://files.pythonhosted.org/packages/b2/0d/71ceabc14c146ba8ee3804ca7b3d42b1664c8440439de5214d366fec7d3a/yarl-1.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc52310451fc7c629e13c4e061cbe2dd01684d91f2f8ee2821b083c58bd72432", size = 85988, upload-time = "2026-03-01T22:04:46.365Z" },
- { url = "https://files.pythonhosted.org/packages/8c/6c/4a90d59c572e46b270ca132aca66954f1175abd691f74c1ef4c6711828e2/yarl-1.23.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2c6b50c7b0464165472b56b42d4c76a7b864597007d9c085e8b63e185cf4a7a", size = 100566, upload-time = "2026-03-01T22:04:47.639Z" },
- { url = "https://files.pythonhosted.org/packages/49/fb/c438fb5108047e629f6282a371e6e91cf3f97ee087c4fb748a1f32ceef55/yarl-1.23.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aafe5dcfda86c8af00386d7781d4c2181b5011b7be3f2add5e99899ea925df05", size = 92079, upload-time = "2026-03-01T22:04:48.925Z" },
- { url = "https://files.pythonhosted.org/packages/d9/13/d269aa1aed3e4f50a5a103f96327210cc5fa5dd2d50882778f13c7a14606/yarl-1.23.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ee33b875f0b390564c1fb7bc528abf18c8ee6073b201c6ae8524aca778e2d83", size = 108741, upload-time = "2026-03-01T22:04:50.838Z" },
- { url = "https://files.pythonhosted.org/packages/85/fb/115b16f22c37ea4437d323e472945bea97301c8ec6089868fa560abab590/yarl-1.23.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c41e021bc6d7affb3364dc1e1e5fa9582b470f283748784bd6ea0558f87f42c", size = 108099, upload-time = "2026-03-01T22:04:52.499Z" },
- { url = "https://files.pythonhosted.org/packages/9a/64/c53487d9f4968045b8afa51aed7ca44f58b2589e772f32745f3744476c82/yarl-1.23.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99c8a9ed30f4164bc4c14b37a90208836cbf50d4ce2a57c71d0f52c7fb4f7598", size = 102678, upload-time = "2026-03-01T22:04:55.176Z" },
- { url = "https://files.pythonhosted.org/packages/85/59/cd98e556fbb2bf8fab29c1a722f67ad45c5f3447cac798ab85620d1e70af/yarl-1.23.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2af5c81a1f124609d5f33507082fc3f739959d4719b56877ab1ee7e7b3d602b", size = 100803, upload-time = "2026-03-01T22:04:56.588Z" },
- { url = "https://files.pythonhosted.org/packages/9e/c0/b39770b56d4a9f0bb5f77e2f1763cd2d75cc2f6c0131e3b4c360348fcd65/yarl-1.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6b41389c19b07c760c7e427a3462e8ab83c4bb087d127f0e854c706ce1b9215c", size = 100163, upload-time = "2026-03-01T22:04:58.492Z" },
- { url = "https://files.pythonhosted.org/packages/e7/64/6980f99ab00e1f0ff67cb84766c93d595b067eed07439cfccfc8fb28c1a6/yarl-1.23.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1dc702e42d0684f42d6519c8d581e49c96cefaaab16691f03566d30658ee8788", size = 93859, upload-time = "2026-03-01T22:05:00.268Z" },
- { url = "https://files.pythonhosted.org/packages/38/69/912e6c5e146793e5d4b5fe39ff5b00f4d22463dfd5a162bec565ac757673/yarl-1.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0e40111274f340d32ebcc0a5668d54d2b552a6cca84c9475859d364b380e3222", size = 108202, upload-time = "2026-03-01T22:05:02.273Z" },
- { url = "https://files.pythonhosted.org/packages/59/97/35ca6767524687ad64e5f5c31ad54bc76d585585a9fcb40f649e7e82ffed/yarl-1.23.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:4764a6a7588561a9aef92f65bda2c4fb58fe7c675c0883862e6df97559de0bfb", size = 99866, upload-time = "2026-03-01T22:05:03.597Z" },
- { url = "https://files.pythonhosted.org/packages/d3/1c/1a3387ee6d73589f6f2a220ae06f2984f6c20b40c734989b0a44f5987308/yarl-1.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:03214408cfa590df47728b84c679ae4ef00be2428e11630277be0727eba2d7cc", size = 107852, upload-time = "2026-03-01T22:05:04.986Z" },
- { url = "https://files.pythonhosted.org/packages/a4/b8/35c0750fcd5a3f781058bfd954515dd4b1eab45e218cbb85cf11132215f1/yarl-1.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:170e26584b060879e29fac213e4228ef063f39128723807a312e5c7fec28eff2", size = 102919, upload-time = "2026-03-01T22:05:06.397Z" },
- { url = "https://files.pythonhosted.org/packages/e5/1c/9a1979aec4a81896d597bcb2177827f2dbee3f5b7cc48b2d0dadb644b41d/yarl-1.23.0-cp311-cp311-win32.whl", hash = "sha256:51430653db848d258336cfa0244427b17d12db63d42603a55f0d4546f50f25b5", size = 82602, upload-time = "2026-03-01T22:05:08.444Z" },
- { url = "https://files.pythonhosted.org/packages/93/22/b85eca6fa2ad9491af48c973e4c8cf6b103a73dbb271fe3346949449fca0/yarl-1.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf49a3ae946a87083ef3a34c8f677ae4243f5b824bfc4c69672e72b3d6719d46", size = 87461, upload-time = "2026-03-01T22:05:10.145Z" },
- { url = "https://files.pythonhosted.org/packages/93/95/07e3553fe6f113e6864a20bdc53a78113cda3b9ced8784ee52a52c9f80d8/yarl-1.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:b39cb32a6582750b6cc77bfb3c49c0f8760dc18dc96ec9fb55fbb0f04e08b928", size = 82336, upload-time = "2026-03-01T22:05:11.554Z" },
{ url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" },
{ url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" },
{ url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" },
@@ -7866,12 +7011,6 @@ version = "8.1.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/71/c9/5ec8679a04d37c797d343f650c51ad67d178f0001c363e44b6ac5f97a9da/zope_interface-8.1.1.tar.gz", hash = "sha256:51b10e6e8e238d719636a401f44f1e366146912407b58453936b781a19be19ec", size = 254748, upload-time = "2025-11-15T08:32:52.404Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/77/fc/d84bac27332bdefe8c03f7289d932aeb13a5fd6aeedba72b0aa5b18276ff/zope_interface-8.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e8a0fdd5048c1bb733e4693eae9bc4145a19419ea6a1c95299318a93fe9f3d72", size = 207955, upload-time = "2025-11-15T08:36:45.902Z" },
- { url = "https://files.pythonhosted.org/packages/52/02/e1234eb08b10b5cf39e68372586acc7f7bbcd18176f6046433a8f6b8b263/zope_interface-8.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4cb0ea75a26b606f5bc8524fbce7b7d8628161b6da002c80e6417ce5ec757c0", size = 208398, upload-time = "2025-11-15T08:36:47.016Z" },
- { url = "https://files.pythonhosted.org/packages/3c/be/aabda44d4bc490f9966c2b77fa7822b0407d852cb909b723f2d9e05d2427/zope_interface-8.1.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:c267b00b5a49a12743f5e1d3b4beef45479d696dab090f11fe3faded078a5133", size = 255079, upload-time = "2025-11-15T08:36:48.157Z" },
- { url = "https://files.pythonhosted.org/packages/d8/7f/4fbc7c2d7cb310e5a91b55db3d98e98d12b262014c1fcad9714fe33c2adc/zope_interface-8.1.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e25d3e2b9299e7ec54b626573673bdf0d740cf628c22aef0a3afef85b438aa54", size = 259850, upload-time = "2025-11-15T08:36:49.544Z" },
- { url = "https://files.pythonhosted.org/packages/fe/2c/dc573fffe59cdbe8bbbdd2814709bdc71c4870893e7226700bc6a08c5e0c/zope_interface-8.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:63db1241804417aff95ac229c13376c8c12752b83cc06964d62581b493e6551b", size = 261033, upload-time = "2025-11-15T08:36:51.061Z" },
- { url = "https://files.pythonhosted.org/packages/0e/51/1ac50e5ee933d9e3902f3400bda399c128a5c46f9f209d16affe3d4facc5/zope_interface-8.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:9639bf4ed07b5277fb231e54109117c30d608254685e48a7104a34618bcbfc83", size = 212215, upload-time = "2025-11-15T08:36:52.553Z" },
{ url = "https://files.pythonhosted.org/packages/08/3d/f5b8dd2512f33bfab4faba71f66f6873603d625212206dd36f12403ae4ca/zope_interface-8.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a16715808408db7252b8c1597ed9008bdad7bf378ed48eb9b0595fad4170e49d", size = 208660, upload-time = "2025-11-15T08:36:53.579Z" },
{ url = "https://files.pythonhosted.org/packages/e5/41/c331adea9b11e05ff9ac4eb7d3032b24c36a3654ae9f2bf4ef2997048211/zope_interface-8.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce6b58752acc3352c4aa0b55bbeae2a941d61537e6afdad2467a624219025aae", size = 208851, upload-time = "2025-11-15T08:36:54.854Z" },
{ url = "https://files.pythonhosted.org/packages/25/00/7a8019c3bb8b119c5f50f0a4869183a4b699ca004a7f87ce98382e6b364c/zope_interface-8.1.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:807778883d07177713136479de7fd566f9056a13aef63b686f0ab4807c6be259", size = 259292, upload-time = "2025-11-15T08:36:56.409Z" },
@@ -7886,23 +7025,6 @@ version = "0.25.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" },
- { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" },
- { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" },
- { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" },
- { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" },
- { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" },
- { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" },
- { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" },
- { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" },
- { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" },
- { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" },
- { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" },
- { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" },
- { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" },
- { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" },
- { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" },
- { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" },
{ url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" },
{ url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" },
{ url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" },
diff --git a/dev/pytest/pytest_config_tests.py b/dev/pytest/pytest_config_tests.py
index 1ae115f85c..d56cceff5e 100644
--- a/dev/pytest/pytest_config_tests.py
+++ b/dev/pytest/pytest_config_tests.py
@@ -3,89 +3,93 @@ from pathlib import Path
import yaml # type: ignore
from dotenv import dotenv_values
-BASE_API_AND_DOCKER_CONFIG_SET_DIFF = {
- "APP_MAX_EXECUTION_TIME",
- "BATCH_UPLOAD_LIMIT",
- "CELERY_BEAT_SCHEDULER_TIME",
- "CODE_EXECUTION_API_KEY",
- "HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
- "HTTP_REQUEST_MAX_READ_TIMEOUT",
- "HTTP_REQUEST_MAX_WRITE_TIMEOUT",
- "INNER_API_KEY",
- "INNER_API_KEY_FOR_PLUGIN",
- "KEYWORD_DATA_SOURCE_TYPE",
- "LOGIN_LOCKOUT_DURATION",
- "LOG_FORMAT",
- "OCI_ACCESS_KEY",
- "OCI_BUCKET_NAME",
- "OCI_ENDPOINT",
- "OCI_REGION",
- "OCI_SECRET_KEY",
- "PLUGIN_DAEMON_KEY",
- "PLUGIN_DAEMON_URL",
- "PLUGIN_REMOTE_INSTALL_HOST",
- "PLUGIN_REMOTE_INSTALL_PORT",
- "REDIS_DB",
- "RESEND_API_URL",
- "RESPECT_XFORWARD_HEADERS_ENABLED",
- "SENTRY_DSN",
- "SSRF_DEFAULT_CONNECT_TIME_OUT",
- "SSRF_DEFAULT_MAX_RETRIES",
- "SSRF_DEFAULT_READ_TIME_OUT",
- "SSRF_DEFAULT_TIME_OUT",
- "SSRF_DEFAULT_WRITE_TIME_OUT",
- "UPSTASH_VECTOR_TOKEN",
- "UPSTASH_VECTOR_URL",
- "USING_UGC_INDEX",
- "WEAVIATE_BATCH_SIZE",
-}
+BASE_API_AND_DOCKER_CONFIG_SET_DIFF: frozenset[str] = frozenset(
+ (
+ "APP_MAX_EXECUTION_TIME",
+ "BATCH_UPLOAD_LIMIT",
+ "CELERY_BEAT_SCHEDULER_TIME",
+ "CODE_EXECUTION_API_KEY",
+ "HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
+ "HTTP_REQUEST_MAX_READ_TIMEOUT",
+ "HTTP_REQUEST_MAX_WRITE_TIMEOUT",
+ "INNER_API_KEY",
+ "INNER_API_KEY_FOR_PLUGIN",
+ "KEYWORD_DATA_SOURCE_TYPE",
+ "LOGIN_LOCKOUT_DURATION",
+ "LOG_FORMAT",
+ "OCI_ACCESS_KEY",
+ "OCI_BUCKET_NAME",
+ "OCI_ENDPOINT",
+ "OCI_REGION",
+ "OCI_SECRET_KEY",
+ "PLUGIN_DAEMON_KEY",
+ "PLUGIN_DAEMON_URL",
+ "PLUGIN_REMOTE_INSTALL_HOST",
+ "PLUGIN_REMOTE_INSTALL_PORT",
+ "REDIS_DB",
+ "RESEND_API_URL",
+ "RESPECT_XFORWARD_HEADERS_ENABLED",
+ "SENTRY_DSN",
+ "SSRF_DEFAULT_CONNECT_TIME_OUT",
+ "SSRF_DEFAULT_MAX_RETRIES",
+ "SSRF_DEFAULT_READ_TIME_OUT",
+ "SSRF_DEFAULT_TIME_OUT",
+ "SSRF_DEFAULT_WRITE_TIME_OUT",
+ "UPSTASH_VECTOR_TOKEN",
+ "UPSTASH_VECTOR_URL",
+ "USING_UGC_INDEX",
+ "WEAVIATE_BATCH_SIZE",
+ )
+)
-BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF = {
- "BATCH_UPLOAD_LIMIT",
- "CELERY_BEAT_SCHEDULER_TIME",
- "HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
- "HTTP_REQUEST_MAX_READ_TIMEOUT",
- "HTTP_REQUEST_MAX_WRITE_TIMEOUT",
- "INNER_API_KEY",
- "INNER_API_KEY_FOR_PLUGIN",
- "KEYWORD_DATA_SOURCE_TYPE",
- "LOGIN_LOCKOUT_DURATION",
- "LOG_FORMAT",
- "OPENDAL_FS_ROOT",
- "OPENDAL_S3_ACCESS_KEY_ID",
- "OPENDAL_S3_BUCKET",
- "OPENDAL_S3_ENDPOINT",
- "OPENDAL_S3_REGION",
- "OPENDAL_S3_ROOT",
- "OPENDAL_S3_SECRET_ACCESS_KEY",
- "OPENDAL_S3_SERVER_SIDE_ENCRYPTION",
- "PGVECTOR_MAX_CONNECTION",
- "PGVECTOR_MIN_CONNECTION",
- "PGVECTO_RS_DATABASE",
- "PGVECTO_RS_HOST",
- "PGVECTO_RS_PASSWORD",
- "PGVECTO_RS_PORT",
- "PGVECTO_RS_USER",
- "PLUGIN_DAEMON_KEY",
- "PLUGIN_DAEMON_URL",
- "PLUGIN_REMOTE_INSTALL_HOST",
- "PLUGIN_REMOTE_INSTALL_PORT",
- "RESPECT_XFORWARD_HEADERS_ENABLED",
- "SCARF_NO_ANALYTICS",
- "SSRF_DEFAULT_CONNECT_TIME_OUT",
- "SSRF_DEFAULT_MAX_RETRIES",
- "SSRF_DEFAULT_READ_TIME_OUT",
- "SSRF_DEFAULT_TIME_OUT",
- "SSRF_DEFAULT_WRITE_TIME_OUT",
- "STORAGE_OPENDAL_SCHEME",
- "SUPABASE_API_KEY",
- "SUPABASE_BUCKET_NAME",
- "SUPABASE_URL",
- "USING_UGC_INDEX",
- "VIKINGDB_CONNECTION_TIMEOUT",
- "VIKINGDB_SOCKET_TIMEOUT",
- "WEAVIATE_BATCH_SIZE",
-}
+BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF: frozenset[str] = frozenset(
+ (
+ "BATCH_UPLOAD_LIMIT",
+ "CELERY_BEAT_SCHEDULER_TIME",
+ "HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
+ "HTTP_REQUEST_MAX_READ_TIMEOUT",
+ "HTTP_REQUEST_MAX_WRITE_TIMEOUT",
+ "INNER_API_KEY",
+ "INNER_API_KEY_FOR_PLUGIN",
+ "KEYWORD_DATA_SOURCE_TYPE",
+ "LOGIN_LOCKOUT_DURATION",
+ "LOG_FORMAT",
+ "OPENDAL_FS_ROOT",
+ "OPENDAL_S3_ACCESS_KEY_ID",
+ "OPENDAL_S3_BUCKET",
+ "OPENDAL_S3_ENDPOINT",
+ "OPENDAL_S3_REGION",
+ "OPENDAL_S3_ROOT",
+ "OPENDAL_S3_SECRET_ACCESS_KEY",
+ "OPENDAL_S3_SERVER_SIDE_ENCRYPTION",
+ "PGVECTOR_MAX_CONNECTION",
+ "PGVECTOR_MIN_CONNECTION",
+ "PGVECTO_RS_DATABASE",
+ "PGVECTO_RS_HOST",
+ "PGVECTO_RS_PASSWORD",
+ "PGVECTO_RS_PORT",
+ "PGVECTO_RS_USER",
+ "PLUGIN_DAEMON_KEY",
+ "PLUGIN_DAEMON_URL",
+ "PLUGIN_REMOTE_INSTALL_HOST",
+ "PLUGIN_REMOTE_INSTALL_PORT",
+ "RESPECT_XFORWARD_HEADERS_ENABLED",
+ "SCARF_NO_ANALYTICS",
+ "SSRF_DEFAULT_CONNECT_TIME_OUT",
+ "SSRF_DEFAULT_MAX_RETRIES",
+ "SSRF_DEFAULT_READ_TIME_OUT",
+ "SSRF_DEFAULT_TIME_OUT",
+ "SSRF_DEFAULT_WRITE_TIME_OUT",
+ "STORAGE_OPENDAL_SCHEME",
+ "SUPABASE_API_KEY",
+ "SUPABASE_BUCKET_NAME",
+ "SUPABASE_URL",
+ "USING_UGC_INDEX",
+ "VIKINGDB_CONNECTION_TIMEOUT",
+ "VIKINGDB_SOCKET_TIMEOUT",
+ "WEAVIATE_BATCH_SIZE",
+ )
+)
API_CONFIG_SET = set(dotenv_values(Path("api") / Path(".env.example")).keys())
DOCKER_CONFIG_SET = set(dotenv_values(Path("docker") / Path(".env.example")).keys())
diff --git a/dev/start-docker-compose b/dev/start-docker-compose
index 9652be169d..aa4f66a6cf 100755
--- a/dev/start-docker-compose
+++ b/dev/start-docker-compose
@@ -1,8 +1,8 @@
-#!/usr/bin/env bash
-set -euo pipefail
-
-SCRIPT_DIR="$(dirname "$(realpath "$0")")"
-ROOT="$(dirname "$SCRIPT_DIR")"
-
-cd "$ROOT/docker"
-docker compose -f docker-compose.middleware.yaml --profile postgresql --profile weaviate -p dify up -d
+#!/usr/bin/env bash
+set -euo pipefail
+
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+ROOT="$(dirname "$SCRIPT_DIR")"
+
+cd "$ROOT/docker"
+docker compose --env-file middleware.env -f docker-compose.middleware.yaml -p dify up -d
diff --git a/docker/.env.example b/docker/.env.example
index 9fbf9a9e72..b2d6244b46 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -186,8 +186,10 @@ CELERY_WORKER_CLASS=
# it is recommended to set it to 360 to support a longer sse connection time.
GUNICORN_TIMEOUT=360
-# The number of Celery workers. The default is 1, and can be set as needed.
-CELERY_WORKER_AMOUNT=
+# The number of Celery workers. The default is 4 for development environments
+# to allow parallel processing of workflows, document indexing, and other async tasks.
+# Adjust based on your system resources and workload requirements.
+CELERY_WORKER_AMOUNT=4
# Flag indicating whether to enable autoscaling of Celery workers.
#
diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml
index e55cf942c3..57584cb829 100644
--- a/docker/docker-compose-template.yaml
+++ b/docker/docker-compose-template.yaml
@@ -56,6 +56,12 @@ services:
volumes:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:5001/health"]
+ interval: 30s
+ timeout: 5s
+ retries: 3
+ start_period: 30s
networks:
- ssrf_proxy_network
- default
@@ -95,6 +101,12 @@ services:
volumes:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
+ healthcheck:
+ test: ["CMD-SHELL", "celery -A celery_entrypoint.celery inspect ping"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 60s
networks:
- ssrf_proxy_network
- default
@@ -126,6 +138,12 @@ services:
required: false
redis:
condition: service_started
+ healthcheck:
+ test: ["CMD-SHELL", "celery -A app.celery inspect ping"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 60s
networks:
- ssrf_proxy_network
- default
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index 737a62020c..097fadc959 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -46,7 +46,7 @@ x-shared-env: &shared-api-worker-env
SERVER_WORKER_CONNECTIONS: ${SERVER_WORKER_CONNECTIONS:-10}
CELERY_WORKER_CLASS: ${CELERY_WORKER_CLASS:-}
GUNICORN_TIMEOUT: ${GUNICORN_TIMEOUT:-360}
- CELERY_WORKER_AMOUNT: ${CELERY_WORKER_AMOUNT:-}
+ CELERY_WORKER_AMOUNT: ${CELERY_WORKER_AMOUNT:-4}
CELERY_AUTO_SCALE: ${CELERY_AUTO_SCALE:-false}
CELERY_MAX_WORKERS: ${CELERY_MAX_WORKERS:-}
CELERY_MIN_WORKERS: ${CELERY_MIN_WORKERS:-}
@@ -765,6 +765,12 @@ services:
volumes:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:5001/health"]
+ interval: 30s
+ timeout: 5s
+ retries: 3
+ start_period: 30s
networks:
- ssrf_proxy_network
- default
@@ -804,6 +810,12 @@ services:
volumes:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
+ healthcheck:
+ test: ["CMD-SHELL", "celery -A celery_entrypoint.celery inspect ping"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 60s
networks:
- ssrf_proxy_network
- default
@@ -835,6 +847,12 @@ services:
required: false
redis:
condition: service_started
+ healthcheck:
+ test: ["CMD-SHELL", "celery -A app.celery inspect ping"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 60s
networks:
- ssrf_proxy_network
- default
diff --git a/package.json b/package.json
index 07f1e16153..48c3acef02 100644
--- a/package.json
+++ b/package.json
@@ -1,11 +1,15 @@
{
"name": "dify",
"private": true,
+ "scripts": {
+ "prepare": "vp config"
+ },
+ "devDependencies": {
+ "taze": "catalog:",
+ "vite-plus": "catalog:"
+ },
"engines": {
"node": "^22.22.1"
},
- "packageManager": "pnpm@10.33.0",
- "devDependencies": {
- "taze": "catalog:"
- }
+ "packageManager": "pnpm@10.33.0"
}
diff --git a/web/app/components/base/icons/assets/public/avatar/robot.svg b/packages/iconify-collections/assets/public/avatar/robot.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/avatar/robot.svg
rename to packages/iconify-collections/assets/public/avatar/robot.svg
diff --git a/web/app/components/base/icons/assets/public/avatar/user.svg b/packages/iconify-collections/assets/public/avatar/user.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/avatar/user.svg
rename to packages/iconify-collections/assets/public/avatar/user.svg
diff --git a/web/app/components/base/icons/assets/public/billing/ar-cube-1.svg b/packages/iconify-collections/assets/public/billing/ar-cube-1.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/ar-cube-1.svg
rename to packages/iconify-collections/assets/public/billing/ar-cube-1.svg
diff --git a/web/app/components/base/icons/assets/public/billing/asterisk.svg b/packages/iconify-collections/assets/public/billing/asterisk.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/asterisk.svg
rename to packages/iconify-collections/assets/public/billing/asterisk.svg
diff --git a/web/app/components/base/icons/assets/public/billing/aws-marketplace-dark.svg b/packages/iconify-collections/assets/public/billing/aws-marketplace-dark.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/aws-marketplace-dark.svg
rename to packages/iconify-collections/assets/public/billing/aws-marketplace-dark.svg
diff --git a/web/app/components/base/icons/assets/public/billing/aws-marketplace-light.svg b/packages/iconify-collections/assets/public/billing/aws-marketplace-light.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/aws-marketplace-light.svg
rename to packages/iconify-collections/assets/public/billing/aws-marketplace-light.svg
diff --git a/web/app/components/base/icons/assets/public/billing/azure.svg b/packages/iconify-collections/assets/public/billing/azure.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/azure.svg
rename to packages/iconify-collections/assets/public/billing/azure.svg
diff --git a/web/app/components/base/icons/assets/public/billing/buildings.svg b/packages/iconify-collections/assets/public/billing/buildings.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/buildings.svg
rename to packages/iconify-collections/assets/public/billing/buildings.svg
diff --git a/web/app/components/base/icons/assets/public/billing/diamond.svg b/packages/iconify-collections/assets/public/billing/diamond.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/diamond.svg
rename to packages/iconify-collections/assets/public/billing/diamond.svg
diff --git a/web/app/components/base/icons/assets/public/billing/google-cloud.svg b/packages/iconify-collections/assets/public/billing/google-cloud.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/google-cloud.svg
rename to packages/iconify-collections/assets/public/billing/google-cloud.svg
diff --git a/web/app/components/base/icons/assets/public/billing/group-2.svg b/packages/iconify-collections/assets/public/billing/group-2.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/group-2.svg
rename to packages/iconify-collections/assets/public/billing/group-2.svg
diff --git a/web/app/components/base/icons/assets/public/billing/keyframe.svg b/packages/iconify-collections/assets/public/billing/keyframe.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/keyframe.svg
rename to packages/iconify-collections/assets/public/billing/keyframe.svg
diff --git a/web/app/components/base/icons/assets/public/billing/sparkles-soft.svg b/packages/iconify-collections/assets/public/billing/sparkles-soft.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/sparkles-soft.svg
rename to packages/iconify-collections/assets/public/billing/sparkles-soft.svg
diff --git a/web/app/components/base/icons/assets/public/billing/sparkles.svg b/packages/iconify-collections/assets/public/billing/sparkles.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/billing/sparkles.svg
rename to packages/iconify-collections/assets/public/billing/sparkles.svg
diff --git a/web/app/components/base/icons/assets/public/common/d.svg b/packages/iconify-collections/assets/public/common/d.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/d.svg
rename to packages/iconify-collections/assets/public/common/d.svg
diff --git a/web/app/components/base/icons/assets/public/common/diagonal-dividing-line.svg b/packages/iconify-collections/assets/public/common/diagonal-dividing-line.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/diagonal-dividing-line.svg
rename to packages/iconify-collections/assets/public/common/diagonal-dividing-line.svg
diff --git a/web/app/components/base/icons/assets/public/common/dify.svg b/packages/iconify-collections/assets/public/common/dify.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/dify.svg
rename to packages/iconify-collections/assets/public/common/dify.svg
diff --git a/web/app/components/base/icons/assets/public/common/gdpr.svg b/packages/iconify-collections/assets/public/common/gdpr.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/gdpr.svg
rename to packages/iconify-collections/assets/public/common/gdpr.svg
diff --git a/web/app/components/base/icons/assets/public/common/github.svg b/packages/iconify-collections/assets/public/common/github.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/github.svg
rename to packages/iconify-collections/assets/public/common/github.svg
diff --git a/web/app/components/base/icons/assets/public/common/highlight.svg b/packages/iconify-collections/assets/public/common/highlight.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/highlight.svg
rename to packages/iconify-collections/assets/public/common/highlight.svg
diff --git a/web/app/components/base/icons/assets/public/common/iso.svg b/packages/iconify-collections/assets/public/common/iso.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/iso.svg
rename to packages/iconify-collections/assets/public/common/iso.svg
diff --git a/web/app/components/base/icons/assets/public/common/line-3.svg b/packages/iconify-collections/assets/public/common/line-3.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/line-3.svg
rename to packages/iconify-collections/assets/public/common/line-3.svg
diff --git a/web/app/components/base/icons/assets/public/common/lock.svg b/packages/iconify-collections/assets/public/common/lock.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/lock.svg
rename to packages/iconify-collections/assets/public/common/lock.svg
diff --git a/web/app/components/base/icons/assets/public/common/message-chat-square.svg b/packages/iconify-collections/assets/public/common/message-chat-square.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/message-chat-square.svg
rename to packages/iconify-collections/assets/public/common/message-chat-square.svg
diff --git a/web/app/components/base/icons/assets/public/common/multi-path-retrieval.svg b/packages/iconify-collections/assets/public/common/multi-path-retrieval.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/multi-path-retrieval.svg
rename to packages/iconify-collections/assets/public/common/multi-path-retrieval.svg
diff --git a/web/app/components/base/icons/assets/public/common/n-to-1-retrieval.svg b/packages/iconify-collections/assets/public/common/n-to-1-retrieval.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/n-to-1-retrieval.svg
rename to packages/iconify-collections/assets/public/common/n-to-1-retrieval.svg
diff --git a/web/app/components/base/icons/assets/public/common/notion.svg b/packages/iconify-collections/assets/public/common/notion.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/notion.svg
rename to packages/iconify-collections/assets/public/common/notion.svg
diff --git a/web/app/components/base/icons/assets/public/common/soc2.svg b/packages/iconify-collections/assets/public/common/soc2.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/soc2.svg
rename to packages/iconify-collections/assets/public/common/soc2.svg
diff --git a/web/app/components/base/icons/assets/public/common/sparkles-soft-accent.svg b/packages/iconify-collections/assets/public/common/sparkles-soft-accent.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/sparkles-soft-accent.svg
rename to packages/iconify-collections/assets/public/common/sparkles-soft-accent.svg
diff --git a/web/app/components/base/icons/assets/public/common/sparkles-soft.svg b/packages/iconify-collections/assets/public/common/sparkles-soft.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/common/sparkles-soft.svg
rename to packages/iconify-collections/assets/public/common/sparkles-soft.svg
diff --git a/web/app/components/base/icons/assets/public/education/triangle.svg b/packages/iconify-collections/assets/public/education/triangle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/education/triangle.svg
rename to packages/iconify-collections/assets/public/education/triangle.svg
diff --git a/web/app/components/base/icons/assets/public/files/csv.svg b/packages/iconify-collections/assets/public/files/csv.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/csv.svg
rename to packages/iconify-collections/assets/public/files/csv.svg
diff --git a/web/app/components/base/icons/assets/public/files/doc.svg b/packages/iconify-collections/assets/public/files/doc.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/doc.svg
rename to packages/iconify-collections/assets/public/files/doc.svg
diff --git a/web/app/components/base/icons/assets/public/files/docx.svg b/packages/iconify-collections/assets/public/files/docx.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/docx.svg
rename to packages/iconify-collections/assets/public/files/docx.svg
diff --git a/web/app/components/base/icons/assets/public/files/html.svg b/packages/iconify-collections/assets/public/files/html.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/html.svg
rename to packages/iconify-collections/assets/public/files/html.svg
diff --git a/web/app/components/base/icons/assets/public/files/json.svg b/packages/iconify-collections/assets/public/files/json.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/json.svg
rename to packages/iconify-collections/assets/public/files/json.svg
diff --git a/web/app/components/base/icons/assets/public/files/md.svg b/packages/iconify-collections/assets/public/files/md.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/md.svg
rename to packages/iconify-collections/assets/public/files/md.svg
diff --git a/web/app/components/base/icons/assets/public/files/pdf.svg b/packages/iconify-collections/assets/public/files/pdf.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/pdf.svg
rename to packages/iconify-collections/assets/public/files/pdf.svg
diff --git a/web/app/components/base/icons/assets/public/files/txt.svg b/packages/iconify-collections/assets/public/files/txt.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/txt.svg
rename to packages/iconify-collections/assets/public/files/txt.svg
diff --git a/web/app/components/base/icons/assets/public/files/unknown.svg b/packages/iconify-collections/assets/public/files/unknown.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/unknown.svg
rename to packages/iconify-collections/assets/public/files/unknown.svg
diff --git a/web/app/components/base/icons/assets/public/files/xlsx.svg b/packages/iconify-collections/assets/public/files/xlsx.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/xlsx.svg
rename to packages/iconify-collections/assets/public/files/xlsx.svg
diff --git a/web/app/components/base/icons/assets/public/files/yaml.svg b/packages/iconify-collections/assets/public/files/yaml.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/files/yaml.svg
rename to packages/iconify-collections/assets/public/files/yaml.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/dataset-card/external-knowledge-base.svg b/packages/iconify-collections/assets/public/knowledge/dataset-card/external-knowledge-base.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/dataset-card/external-knowledge-base.svg
rename to packages/iconify-collections/assets/public/knowledge/dataset-card/external-knowledge-base.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/dataset-card/general.svg b/packages/iconify-collections/assets/public/knowledge/dataset-card/general.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/dataset-card/general.svg
rename to packages/iconify-collections/assets/public/knowledge/dataset-card/general.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/dataset-card/graph.svg b/packages/iconify-collections/assets/public/knowledge/dataset-card/graph.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/dataset-card/graph.svg
rename to packages/iconify-collections/assets/public/knowledge/dataset-card/graph.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/dataset-card/parent-child.svg b/packages/iconify-collections/assets/public/knowledge/dataset-card/parent-child.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/dataset-card/parent-child.svg
rename to packages/iconify-collections/assets/public/knowledge/dataset-card/parent-child.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/dataset-card/qa.svg b/packages/iconify-collections/assets/public/knowledge/dataset-card/qa.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/dataset-card/qa.svg
rename to packages/iconify-collections/assets/public/knowledge/dataset-card/qa.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/file.svg b/packages/iconify-collections/assets/public/knowledge/file.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/file.svg
rename to packages/iconify-collections/assets/public/knowledge/file.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/online-drive/buckets-blue.svg b/packages/iconify-collections/assets/public/knowledge/online-drive/buckets-blue.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/online-drive/buckets-blue.svg
rename to packages/iconify-collections/assets/public/knowledge/online-drive/buckets-blue.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/online-drive/buckets-gray.svg b/packages/iconify-collections/assets/public/knowledge/online-drive/buckets-gray.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/online-drive/buckets-gray.svg
rename to packages/iconify-collections/assets/public/knowledge/online-drive/buckets-gray.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/online-drive/folder.svg b/packages/iconify-collections/assets/public/knowledge/online-drive/folder.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/online-drive/folder.svg
rename to packages/iconify-collections/assets/public/knowledge/online-drive/folder.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/option-card-effect-blue-light.svg b/packages/iconify-collections/assets/public/knowledge/option-card-effect-blue-light.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/option-card-effect-blue-light.svg
rename to packages/iconify-collections/assets/public/knowledge/option-card-effect-blue-light.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/option-card-effect-blue.svg b/packages/iconify-collections/assets/public/knowledge/option-card-effect-blue.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/option-card-effect-blue.svg
rename to packages/iconify-collections/assets/public/knowledge/option-card-effect-blue.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/option-card-effect-orange.svg b/packages/iconify-collections/assets/public/knowledge/option-card-effect-orange.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/option-card-effect-orange.svg
rename to packages/iconify-collections/assets/public/knowledge/option-card-effect-orange.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/option-card-effect-purple.svg b/packages/iconify-collections/assets/public/knowledge/option-card-effect-purple.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/option-card-effect-purple.svg
rename to packages/iconify-collections/assets/public/knowledge/option-card-effect-purple.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/option-card-effect-teal.svg b/packages/iconify-collections/assets/public/knowledge/option-card-effect-teal.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/option-card-effect-teal.svg
rename to packages/iconify-collections/assets/public/knowledge/option-card-effect-teal.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/selection-mod.svg b/packages/iconify-collections/assets/public/knowledge/selection-mod.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/selection-mod.svg
rename to packages/iconify-collections/assets/public/knowledge/selection-mod.svg
diff --git a/web/app/components/base/icons/assets/public/knowledge/watercrawl.svg b/packages/iconify-collections/assets/public/knowledge/watercrawl.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/knowledge/watercrawl.svg
rename to packages/iconify-collections/assets/public/knowledge/watercrawl.svg
diff --git a/web/app/components/base/icons/assets/public/llm/Anthropic-dark.svg b/packages/iconify-collections/assets/public/llm/Anthropic-dark.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/Anthropic-dark.svg
rename to packages/iconify-collections/assets/public/llm/Anthropic-dark.svg
diff --git a/web/app/components/base/icons/assets/public/llm/Anthropic-light.svg b/packages/iconify-collections/assets/public/llm/Anthropic-light.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/Anthropic-light.svg
rename to packages/iconify-collections/assets/public/llm/Anthropic-light.svg
diff --git a/web/app/components/base/icons/assets/public/llm/Tongyi.svg b/packages/iconify-collections/assets/public/llm/Tongyi.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/Tongyi.svg
rename to packages/iconify-collections/assets/public/llm/Tongyi.svg
diff --git a/web/app/components/base/icons/assets/public/llm/anthropic-short-light.svg b/packages/iconify-collections/assets/public/llm/anthropic-short-light.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/anthropic-short-light.svg
rename to packages/iconify-collections/assets/public/llm/anthropic-short-light.svg
diff --git a/web/app/components/base/icons/assets/public/llm/anthropic-text.svg b/packages/iconify-collections/assets/public/llm/anthropic-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/anthropic-text.svg
rename to packages/iconify-collections/assets/public/llm/anthropic-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/anthropic.svg b/packages/iconify-collections/assets/public/llm/anthropic.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/anthropic.svg
rename to packages/iconify-collections/assets/public/llm/anthropic.svg
diff --git a/web/app/components/base/icons/assets/public/llm/azure-openai-service-text.svg b/packages/iconify-collections/assets/public/llm/azure-openai-service-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/azure-openai-service-text.svg
rename to packages/iconify-collections/assets/public/llm/azure-openai-service-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/azure-openai-service.svg b/packages/iconify-collections/assets/public/llm/azure-openai-service.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/azure-openai-service.svg
rename to packages/iconify-collections/assets/public/llm/azure-openai-service.svg
diff --git a/web/app/components/base/icons/assets/public/llm/azureai-text.svg b/packages/iconify-collections/assets/public/llm/azureai-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/azureai-text.svg
rename to packages/iconify-collections/assets/public/llm/azureai-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/azureai.svg b/packages/iconify-collections/assets/public/llm/azureai.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/azureai.svg
rename to packages/iconify-collections/assets/public/llm/azureai.svg
diff --git a/web/app/components/base/icons/assets/public/llm/baichuan-text.svg b/packages/iconify-collections/assets/public/llm/baichuan-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/baichuan-text.svg
rename to packages/iconify-collections/assets/public/llm/baichuan-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/baichuan.svg b/packages/iconify-collections/assets/public/llm/baichuan.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/baichuan.svg
rename to packages/iconify-collections/assets/public/llm/baichuan.svg
diff --git a/web/app/components/base/icons/assets/public/llm/chatglm-text.svg b/packages/iconify-collections/assets/public/llm/chatglm-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/chatglm-text.svg
rename to packages/iconify-collections/assets/public/llm/chatglm-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/chatglm.svg b/packages/iconify-collections/assets/public/llm/chatglm.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/chatglm.svg
rename to packages/iconify-collections/assets/public/llm/chatglm.svg
diff --git a/web/app/components/base/icons/assets/public/llm/cohere-text.svg b/packages/iconify-collections/assets/public/llm/cohere-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/cohere-text.svg
rename to packages/iconify-collections/assets/public/llm/cohere-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/cohere.svg b/packages/iconify-collections/assets/public/llm/cohere.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/cohere.svg
rename to packages/iconify-collections/assets/public/llm/cohere.svg
diff --git a/web/app/components/base/icons/assets/public/llm/deepseek.svg b/packages/iconify-collections/assets/public/llm/deepseek.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/deepseek.svg
rename to packages/iconify-collections/assets/public/llm/deepseek.svg
diff --git a/web/app/components/base/icons/assets/public/llm/gemini.svg b/packages/iconify-collections/assets/public/llm/gemini.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/gemini.svg
rename to packages/iconify-collections/assets/public/llm/gemini.svg
diff --git a/web/app/components/base/icons/assets/public/llm/gpt-3.svg b/packages/iconify-collections/assets/public/llm/gpt-3.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/gpt-3.svg
rename to packages/iconify-collections/assets/public/llm/gpt-3.svg
diff --git a/web/app/components/base/icons/assets/public/llm/gpt-4.svg b/packages/iconify-collections/assets/public/llm/gpt-4.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/gpt-4.svg
rename to packages/iconify-collections/assets/public/llm/gpt-4.svg
diff --git a/web/app/components/base/icons/assets/public/llm/grok.svg b/packages/iconify-collections/assets/public/llm/grok.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/grok.svg
rename to packages/iconify-collections/assets/public/llm/grok.svg
diff --git a/web/app/components/base/icons/assets/public/llm/huggingface-text-hub.svg b/packages/iconify-collections/assets/public/llm/huggingface-text-hub.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/huggingface-text-hub.svg
rename to packages/iconify-collections/assets/public/llm/huggingface-text-hub.svg
diff --git a/web/app/components/base/icons/assets/public/llm/huggingface-text.svg b/packages/iconify-collections/assets/public/llm/huggingface-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/huggingface-text.svg
rename to packages/iconify-collections/assets/public/llm/huggingface-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/huggingface.svg b/packages/iconify-collections/assets/public/llm/huggingface.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/huggingface.svg
rename to packages/iconify-collections/assets/public/llm/huggingface.svg
diff --git a/web/app/components/base/icons/assets/public/llm/iflytek-spark-text-cn.svg b/packages/iconify-collections/assets/public/llm/iflytek-spark-text-cn.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/iflytek-spark-text-cn.svg
rename to packages/iconify-collections/assets/public/llm/iflytek-spark-text-cn.svg
diff --git a/web/app/components/base/icons/assets/public/llm/iflytek-spark-text.svg b/packages/iconify-collections/assets/public/llm/iflytek-spark-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/iflytek-spark-text.svg
rename to packages/iconify-collections/assets/public/llm/iflytek-spark-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/iflytek-spark.svg b/packages/iconify-collections/assets/public/llm/iflytek-spark.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/iflytek-spark.svg
rename to packages/iconify-collections/assets/public/llm/iflytek-spark.svg
diff --git a/web/app/components/base/icons/assets/public/llm/jina-text.svg b/packages/iconify-collections/assets/public/llm/jina-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/jina-text.svg
rename to packages/iconify-collections/assets/public/llm/jina-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/jina.svg b/packages/iconify-collections/assets/public/llm/jina.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/jina.svg
rename to packages/iconify-collections/assets/public/llm/jina.svg
diff --git a/web/app/components/base/icons/assets/public/llm/localai-text.svg b/packages/iconify-collections/assets/public/llm/localai-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/localai-text.svg
rename to packages/iconify-collections/assets/public/llm/localai-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/localai.svg b/packages/iconify-collections/assets/public/llm/localai.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/localai.svg
rename to packages/iconify-collections/assets/public/llm/localai.svg
diff --git a/web/app/components/base/icons/assets/public/llm/microsoft.svg b/packages/iconify-collections/assets/public/llm/microsoft.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/microsoft.svg
rename to packages/iconify-collections/assets/public/llm/microsoft.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-black.svg b/packages/iconify-collections/assets/public/llm/openai-black.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-black.svg
rename to packages/iconify-collections/assets/public/llm/openai-black.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-blue.svg b/packages/iconify-collections/assets/public/llm/openai-blue.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-blue.svg
rename to packages/iconify-collections/assets/public/llm/openai-blue.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-green.svg b/packages/iconify-collections/assets/public/llm/openai-green.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-green.svg
rename to packages/iconify-collections/assets/public/llm/openai-green.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-small.svg b/packages/iconify-collections/assets/public/llm/openai-small.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-small.svg
rename to packages/iconify-collections/assets/public/llm/openai-small.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-teal.svg b/packages/iconify-collections/assets/public/llm/openai-teal.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-teal.svg
rename to packages/iconify-collections/assets/public/llm/openai-teal.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-text.svg b/packages/iconify-collections/assets/public/llm/openai-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-text.svg
rename to packages/iconify-collections/assets/public/llm/openai-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-transparent.svg b/packages/iconify-collections/assets/public/llm/openai-transparent.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-transparent.svg
rename to packages/iconify-collections/assets/public/llm/openai-transparent.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-violet.svg b/packages/iconify-collections/assets/public/llm/openai-violet.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-violet.svg
rename to packages/iconify-collections/assets/public/llm/openai-violet.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openai-yellow.svg b/packages/iconify-collections/assets/public/llm/openai-yellow.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openai-yellow.svg
rename to packages/iconify-collections/assets/public/llm/openai-yellow.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openllm-text.svg b/packages/iconify-collections/assets/public/llm/openllm-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openllm-text.svg
rename to packages/iconify-collections/assets/public/llm/openllm-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/openllm.svg b/packages/iconify-collections/assets/public/llm/openllm.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/openllm.svg
rename to packages/iconify-collections/assets/public/llm/openllm.svg
diff --git a/web/app/components/base/icons/assets/public/llm/replicate-text.svg b/packages/iconify-collections/assets/public/llm/replicate-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/replicate-text.svg
rename to packages/iconify-collections/assets/public/llm/replicate-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/replicate.svg b/packages/iconify-collections/assets/public/llm/replicate.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/replicate.svg
rename to packages/iconify-collections/assets/public/llm/replicate.svg
diff --git a/web/app/components/base/icons/assets/public/llm/xorbits-inference-text.svg b/packages/iconify-collections/assets/public/llm/xorbits-inference-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/xorbits-inference-text.svg
rename to packages/iconify-collections/assets/public/llm/xorbits-inference-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/xorbits-inference.svg b/packages/iconify-collections/assets/public/llm/xorbits-inference.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/xorbits-inference.svg
rename to packages/iconify-collections/assets/public/llm/xorbits-inference.svg
diff --git a/web/app/components/base/icons/assets/public/llm/zhipuai-text-cn.svg b/packages/iconify-collections/assets/public/llm/zhipuai-text-cn.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/zhipuai-text-cn.svg
rename to packages/iconify-collections/assets/public/llm/zhipuai-text-cn.svg
diff --git a/web/app/components/base/icons/assets/public/llm/zhipuai-text.svg b/packages/iconify-collections/assets/public/llm/zhipuai-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/zhipuai-text.svg
rename to packages/iconify-collections/assets/public/llm/zhipuai-text.svg
diff --git a/web/app/components/base/icons/assets/public/llm/zhipuai.svg b/packages/iconify-collections/assets/public/llm/zhipuai.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/llm/zhipuai.svg
rename to packages/iconify-collections/assets/public/llm/zhipuai.svg
diff --git a/web/app/components/base/icons/assets/public/model/checked.svg b/packages/iconify-collections/assets/public/model/checked.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/model/checked.svg
rename to packages/iconify-collections/assets/public/model/checked.svg
diff --git a/web/app/components/base/icons/assets/public/other/Icon-3-dots.svg b/packages/iconify-collections/assets/public/other/Icon-3-dots.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/other/Icon-3-dots.svg
rename to packages/iconify-collections/assets/public/other/Icon-3-dots.svg
diff --git a/web/app/components/base/icons/assets/public/other/default-tool-icon.svg b/packages/iconify-collections/assets/public/other/default-tool-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/other/default-tool-icon.svg
rename to packages/iconify-collections/assets/public/other/default-tool-icon.svg
diff --git a/web/app/components/base/icons/assets/public/other/message-3-fill.svg b/packages/iconify-collections/assets/public/other/message-3-fill.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/other/message-3-fill.svg
rename to packages/iconify-collections/assets/public/other/message-3-fill.svg
diff --git a/web/app/components/base/icons/assets/public/other/row-struct.svg b/packages/iconify-collections/assets/public/other/row-struct.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/other/row-struct.svg
rename to packages/iconify-collections/assets/public/other/row-struct.svg
diff --git a/web/app/components/base/icons/assets/public/other/slack.svg b/packages/iconify-collections/assets/public/other/slack.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/other/slack.svg
rename to packages/iconify-collections/assets/public/other/slack.svg
diff --git a/web/app/components/base/icons/assets/public/other/teams.svg b/packages/iconify-collections/assets/public/other/teams.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/other/teams.svg
rename to packages/iconify-collections/assets/public/other/teams.svg
diff --git a/web/app/components/base/icons/assets/public/plugins/google.svg b/packages/iconify-collections/assets/public/plugins/google.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/plugins/google.svg
rename to packages/iconify-collections/assets/public/plugins/google.svg
diff --git a/web/app/components/base/icons/assets/public/plugins/partner-dark.svg b/packages/iconify-collections/assets/public/plugins/partner-dark.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/plugins/partner-dark.svg
rename to packages/iconify-collections/assets/public/plugins/partner-dark.svg
diff --git a/web/app/components/base/icons/assets/public/plugins/partner-light.svg b/packages/iconify-collections/assets/public/plugins/partner-light.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/plugins/partner-light.svg
rename to packages/iconify-collections/assets/public/plugins/partner-light.svg
diff --git a/web/app/components/base/icons/assets/public/plugins/verified-dark.svg b/packages/iconify-collections/assets/public/plugins/verified-dark.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/plugins/verified-dark.svg
rename to packages/iconify-collections/assets/public/plugins/verified-dark.svg
diff --git a/web/app/components/base/icons/assets/public/plugins/verified-light.svg b/packages/iconify-collections/assets/public/plugins/verified-light.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/plugins/verified-light.svg
rename to packages/iconify-collections/assets/public/plugins/verified-light.svg
diff --git a/web/app/components/base/icons/assets/public/plugins/web-reader.svg b/packages/iconify-collections/assets/public/plugins/web-reader.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/plugins/web-reader.svg
rename to packages/iconify-collections/assets/public/plugins/web-reader.svg
diff --git a/web/app/components/base/icons/assets/public/plugins/wikipedia.svg b/packages/iconify-collections/assets/public/plugins/wikipedia.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/plugins/wikipedia.svg
rename to packages/iconify-collections/assets/public/plugins/wikipedia.svg
diff --git a/web/app/components/base/icons/assets/public/thought/data-set.svg b/packages/iconify-collections/assets/public/thought/data-set.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/thought/data-set.svg
rename to packages/iconify-collections/assets/public/thought/data-set.svg
diff --git a/web/app/components/base/icons/assets/public/thought/loading.svg b/packages/iconify-collections/assets/public/thought/loading.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/thought/loading.svg
rename to packages/iconify-collections/assets/public/thought/loading.svg
diff --git a/web/app/components/base/icons/assets/public/thought/search.svg b/packages/iconify-collections/assets/public/thought/search.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/thought/search.svg
rename to packages/iconify-collections/assets/public/thought/search.svg
diff --git a/web/app/components/base/icons/assets/public/thought/thought-list.svg b/packages/iconify-collections/assets/public/thought/thought-list.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/thought/thought-list.svg
rename to packages/iconify-collections/assets/public/thought/thought-list.svg
diff --git a/web/app/components/base/icons/assets/public/thought/web-reader.svg b/packages/iconify-collections/assets/public/thought/web-reader.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/thought/web-reader.svg
rename to packages/iconify-collections/assets/public/thought/web-reader.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/aliyun-icon-big.svg b/packages/iconify-collections/assets/public/tracing/aliyun-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/aliyun-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/aliyun-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/aliyun-icon.svg b/packages/iconify-collections/assets/public/tracing/aliyun-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/aliyun-icon.svg
rename to packages/iconify-collections/assets/public/tracing/aliyun-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/arize-icon-big.svg b/packages/iconify-collections/assets/public/tracing/arize-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/arize-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/arize-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/arize-icon.svg b/packages/iconify-collections/assets/public/tracing/arize-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/arize-icon.svg
rename to packages/iconify-collections/assets/public/tracing/arize-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/databricks-icon-big.svg b/packages/iconify-collections/assets/public/tracing/databricks-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/databricks-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/databricks-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/databricks-icon.svg b/packages/iconify-collections/assets/public/tracing/databricks-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/databricks-icon.svg
rename to packages/iconify-collections/assets/public/tracing/databricks-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/langfuse-icon-big.svg b/packages/iconify-collections/assets/public/tracing/langfuse-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/langfuse-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/langfuse-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/langfuse-icon.svg b/packages/iconify-collections/assets/public/tracing/langfuse-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/langfuse-icon.svg
rename to packages/iconify-collections/assets/public/tracing/langfuse-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/langsmith-icon-big.svg b/packages/iconify-collections/assets/public/tracing/langsmith-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/langsmith-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/langsmith-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/langsmith-icon.svg b/packages/iconify-collections/assets/public/tracing/langsmith-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/langsmith-icon.svg
rename to packages/iconify-collections/assets/public/tracing/langsmith-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/mlflow-icon-big.svg b/packages/iconify-collections/assets/public/tracing/mlflow-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/mlflow-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/mlflow-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/mlflow-icon.svg b/packages/iconify-collections/assets/public/tracing/mlflow-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/mlflow-icon.svg
rename to packages/iconify-collections/assets/public/tracing/mlflow-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/opik-icon-big.svg b/packages/iconify-collections/assets/public/tracing/opik-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/opik-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/opik-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/opik-icon.svg b/packages/iconify-collections/assets/public/tracing/opik-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/opik-icon.svg
rename to packages/iconify-collections/assets/public/tracing/opik-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/phoenix-icon-big.svg b/packages/iconify-collections/assets/public/tracing/phoenix-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/phoenix-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/phoenix-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/phoenix-icon.svg b/packages/iconify-collections/assets/public/tracing/phoenix-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/phoenix-icon.svg
rename to packages/iconify-collections/assets/public/tracing/phoenix-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/tencent-icon-big.svg b/packages/iconify-collections/assets/public/tracing/tencent-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/tencent-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/tencent-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/tencent-icon.svg b/packages/iconify-collections/assets/public/tracing/tencent-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/tencent-icon.svg
rename to packages/iconify-collections/assets/public/tracing/tencent-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/tracing-icon.svg b/packages/iconify-collections/assets/public/tracing/tracing-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/tracing-icon.svg
rename to packages/iconify-collections/assets/public/tracing/tracing-icon.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/weave-icon-big.svg b/packages/iconify-collections/assets/public/tracing/weave-icon-big.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/weave-icon-big.svg
rename to packages/iconify-collections/assets/public/tracing/weave-icon-big.svg
diff --git a/web/app/components/base/icons/assets/public/tracing/weave-icon.svg b/packages/iconify-collections/assets/public/tracing/weave-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/public/tracing/weave-icon.svg
rename to packages/iconify-collections/assets/public/tracing/weave-icon.svg
diff --git a/web/app/components/base/icons/assets/vender/features/citations.svg b/packages/iconify-collections/assets/vender/features/citations.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/citations.svg
rename to packages/iconify-collections/assets/vender/features/citations.svg
diff --git a/web/app/components/base/icons/assets/vender/features/content-moderation.svg b/packages/iconify-collections/assets/vender/features/content-moderation.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/content-moderation.svg
rename to packages/iconify-collections/assets/vender/features/content-moderation.svg
diff --git a/web/app/components/base/icons/assets/vender/features/document.svg b/packages/iconify-collections/assets/vender/features/document.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/document.svg
rename to packages/iconify-collections/assets/vender/features/document.svg
diff --git a/web/app/components/base/icons/assets/vender/features/folder-upload.svg b/packages/iconify-collections/assets/vender/features/folder-upload.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/folder-upload.svg
rename to packages/iconify-collections/assets/vender/features/folder-upload.svg
diff --git a/web/app/components/base/icons/assets/vender/features/love-message.svg b/packages/iconify-collections/assets/vender/features/love-message.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/love-message.svg
rename to packages/iconify-collections/assets/vender/features/love-message.svg
diff --git a/web/app/components/base/icons/assets/vender/features/message-fast.svg b/packages/iconify-collections/assets/vender/features/message-fast.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/message-fast.svg
rename to packages/iconify-collections/assets/vender/features/message-fast.svg
diff --git a/web/app/components/base/icons/assets/vender/features/microphone-01.svg b/packages/iconify-collections/assets/vender/features/microphone-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/microphone-01.svg
rename to packages/iconify-collections/assets/vender/features/microphone-01.svg
diff --git a/web/app/components/base/icons/assets/vender/features/text-to-audio.svg b/packages/iconify-collections/assets/vender/features/text-to-audio.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/text-to-audio.svg
rename to packages/iconify-collections/assets/vender/features/text-to-audio.svg
diff --git a/web/app/components/base/icons/assets/vender/features/virtual-assistant.svg b/packages/iconify-collections/assets/vender/features/virtual-assistant.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/virtual-assistant.svg
rename to packages/iconify-collections/assets/vender/features/virtual-assistant.svg
diff --git a/web/app/components/base/icons/assets/vender/features/vision.svg b/packages/iconify-collections/assets/vender/features/vision.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/features/vision.svg
rename to packages/iconify-collections/assets/vender/features/vision.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/add-chunks.svg b/packages/iconify-collections/assets/vender/knowledge/add-chunks.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/add-chunks.svg
rename to packages/iconify-collections/assets/vender/knowledge/add-chunks.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/api-aggregate.svg b/packages/iconify-collections/assets/vender/knowledge/api-aggregate.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/api-aggregate.svg
rename to packages/iconify-collections/assets/vender/knowledge/api-aggregate.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/arrow-shape.svg b/packages/iconify-collections/assets/vender/knowledge/arrow-shape.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/arrow-shape.svg
rename to packages/iconify-collections/assets/vender/knowledge/arrow-shape.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/chunk.svg b/packages/iconify-collections/assets/vender/knowledge/chunk.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/chunk.svg
rename to packages/iconify-collections/assets/vender/knowledge/chunk.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/collapse.svg b/packages/iconify-collections/assets/vender/knowledge/collapse.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/collapse.svg
rename to packages/iconify-collections/assets/vender/knowledge/collapse.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/divider.svg b/packages/iconify-collections/assets/vender/knowledge/divider.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/divider.svg
rename to packages/iconify-collections/assets/vender/knowledge/divider.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/economic.svg b/packages/iconify-collections/assets/vender/knowledge/economic.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/economic.svg
rename to packages/iconify-collections/assets/vender/knowledge/economic.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/full-text-search.svg b/packages/iconify-collections/assets/vender/knowledge/full-text-search.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/full-text-search.svg
rename to packages/iconify-collections/assets/vender/knowledge/full-text-search.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/general-chunk.svg b/packages/iconify-collections/assets/vender/knowledge/general-chunk.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/general-chunk.svg
rename to packages/iconify-collections/assets/vender/knowledge/general-chunk.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/high-quality.svg b/packages/iconify-collections/assets/vender/knowledge/high-quality.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/high-quality.svg
rename to packages/iconify-collections/assets/vender/knowledge/high-quality.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/hybrid-search.svg b/packages/iconify-collections/assets/vender/knowledge/hybrid-search.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/hybrid-search.svg
rename to packages/iconify-collections/assets/vender/knowledge/hybrid-search.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/parent-child-chunk.svg b/packages/iconify-collections/assets/vender/knowledge/parent-child-chunk.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/parent-child-chunk.svg
rename to packages/iconify-collections/assets/vender/knowledge/parent-child-chunk.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/question-and-answer.svg b/packages/iconify-collections/assets/vender/knowledge/question-and-answer.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/question-and-answer.svg
rename to packages/iconify-collections/assets/vender/knowledge/question-and-answer.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/search-lines-sparkle.svg b/packages/iconify-collections/assets/vender/knowledge/search-lines-sparkle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/search-lines-sparkle.svg
rename to packages/iconify-collections/assets/vender/knowledge/search-lines-sparkle.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/search-menu.svg b/packages/iconify-collections/assets/vender/knowledge/search-menu.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/search-menu.svg
rename to packages/iconify-collections/assets/vender/knowledge/search-menu.svg
diff --git a/web/app/components/base/icons/assets/vender/knowledge/vector-search.svg b/packages/iconify-collections/assets/vender/knowledge/vector-search.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/knowledge/vector-search.svg
rename to packages/iconify-collections/assets/vender/knowledge/vector-search.svg
diff --git a/web/app/components/base/icons/assets/vender/line/alertsAndFeedback/alert-triangle.svg b/packages/iconify-collections/assets/vender/line/alertsAndFeedback/alert-triangle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/alertsAndFeedback/alert-triangle.svg
rename to packages/iconify-collections/assets/vender/line/alertsAndFeedback/alert-triangle.svg
diff --git a/web/app/components/base/icons/assets/vender/line/alertsAndFeedback/thumbs-down.svg b/packages/iconify-collections/assets/vender/line/alertsAndFeedback/thumbs-down.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/alertsAndFeedback/thumbs-down.svg
rename to packages/iconify-collections/assets/vender/line/alertsAndFeedback/thumbs-down.svg
diff --git a/web/app/components/base/icons/assets/vender/line/alertsAndFeedback/thumbs-up.svg b/packages/iconify-collections/assets/vender/line/alertsAndFeedback/thumbs-up.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/alertsAndFeedback/thumbs-up.svg
rename to packages/iconify-collections/assets/vender/line/alertsAndFeedback/thumbs-up.svg
diff --git a/web/app/components/base/icons/assets/vender/line/alertsAndFeedback/warning.svg b/packages/iconify-collections/assets/vender/line/alertsAndFeedback/warning.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/alertsAndFeedback/warning.svg
rename to packages/iconify-collections/assets/vender/line/alertsAndFeedback/warning.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/IconR.svg b/packages/iconify-collections/assets/vender/line/arrows/IconR.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/IconR.svg
rename to packages/iconify-collections/assets/vender/line/arrows/IconR.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/arrow-narrow-left.svg b/packages/iconify-collections/assets/vender/line/arrows/arrow-narrow-left.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/arrow-narrow-left.svg
rename to packages/iconify-collections/assets/vender/line/arrows/arrow-narrow-left.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/arrow-up-right.svg b/packages/iconify-collections/assets/vender/line/arrows/arrow-up-right.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/arrow-up-right.svg
rename to packages/iconify-collections/assets/vender/line/arrows/arrow-up-right.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/chevron-down-double.svg b/packages/iconify-collections/assets/vender/line/arrows/chevron-down-double.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/chevron-down-double.svg
rename to packages/iconify-collections/assets/vender/line/arrows/chevron-down-double.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/chevron-right.svg b/packages/iconify-collections/assets/vender/line/arrows/chevron-right.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/chevron-right.svg
rename to packages/iconify-collections/assets/vender/line/arrows/chevron-right.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/chevron-selector-vertical.svg b/packages/iconify-collections/assets/vender/line/arrows/chevron-selector-vertical.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/chevron-selector-vertical.svg
rename to packages/iconify-collections/assets/vender/line/arrows/chevron-selector-vertical.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/refresh-ccw-01.svg b/packages/iconify-collections/assets/vender/line/arrows/refresh-ccw-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/refresh-ccw-01.svg
rename to packages/iconify-collections/assets/vender/line/arrows/refresh-ccw-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/refresh-cw-05.svg b/packages/iconify-collections/assets/vender/line/arrows/refresh-cw-05.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/refresh-cw-05.svg
rename to packages/iconify-collections/assets/vender/line/arrows/refresh-cw-05.svg
diff --git a/web/app/components/base/icons/assets/vender/line/arrows/reverse-left.svg b/packages/iconify-collections/assets/vender/line/arrows/reverse-left.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/arrows/reverse-left.svg
rename to packages/iconify-collections/assets/vender/line/arrows/reverse-left.svg
diff --git a/web/app/components/base/icons/assets/vender/line/communication/ai-text.svg b/packages/iconify-collections/assets/vender/line/communication/ai-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/communication/ai-text.svg
rename to packages/iconify-collections/assets/vender/line/communication/ai-text.svg
diff --git a/web/app/components/base/icons/assets/vender/line/communication/chat-bot-slim.svg b/packages/iconify-collections/assets/vender/line/communication/chat-bot-slim.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/communication/chat-bot-slim.svg
rename to packages/iconify-collections/assets/vender/line/communication/chat-bot-slim.svg
diff --git a/web/app/components/base/icons/assets/vender/line/communication/chat-bot.svg b/packages/iconify-collections/assets/vender/line/communication/chat-bot.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/communication/chat-bot.svg
rename to packages/iconify-collections/assets/vender/line/communication/chat-bot.svg
diff --git a/web/app/components/base/icons/assets/vender/line/communication/cute-robot.svg b/packages/iconify-collections/assets/vender/line/communication/cute-robot.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/communication/cute-robot.svg
rename to packages/iconify-collections/assets/vender/line/communication/cute-robot.svg
diff --git a/web/app/components/base/icons/assets/vender/line/communication/message-check-remove.svg b/packages/iconify-collections/assets/vender/line/communication/message-check-remove.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/communication/message-check-remove.svg
rename to packages/iconify-collections/assets/vender/line/communication/message-check-remove.svg
diff --git a/web/app/components/base/icons/assets/vender/line/communication/message-fast-plus.svg b/packages/iconify-collections/assets/vender/line/communication/message-fast-plus.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/communication/message-fast-plus.svg
rename to packages/iconify-collections/assets/vender/line/communication/message-fast-plus.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/artificial-brain.svg b/packages/iconify-collections/assets/vender/line/development/artificial-brain.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/artificial-brain.svg
rename to packages/iconify-collections/assets/vender/line/development/artificial-brain.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/bar-chart-square-02.svg b/packages/iconify-collections/assets/vender/line/development/bar-chart-square-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/bar-chart-square-02.svg
rename to packages/iconify-collections/assets/vender/line/development/bar-chart-square-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/brackets-x.svg b/packages/iconify-collections/assets/vender/line/development/brackets-x.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/brackets-x.svg
rename to packages/iconify-collections/assets/vender/line/development/brackets-x.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/code-browser.svg b/packages/iconify-collections/assets/vender/line/development/code-browser.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/code-browser.svg
rename to packages/iconify-collections/assets/vender/line/development/code-browser.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/container.svg b/packages/iconify-collections/assets/vender/line/development/container.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/container.svg
rename to packages/iconify-collections/assets/vender/line/development/container.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/database-01.svg b/packages/iconify-collections/assets/vender/line/development/database-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/database-01.svg
rename to packages/iconify-collections/assets/vender/line/development/database-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/database-03.svg b/packages/iconify-collections/assets/vender/line/development/database-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/database-03.svg
rename to packages/iconify-collections/assets/vender/line/development/database-03.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/file-heart-02.svg b/packages/iconify-collections/assets/vender/line/development/file-heart-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/file-heart-02.svg
rename to packages/iconify-collections/assets/vender/line/development/file-heart-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/git-branch-01.svg b/packages/iconify-collections/assets/vender/line/development/git-branch-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/git-branch-01.svg
rename to packages/iconify-collections/assets/vender/line/development/git-branch-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/prompt-engineering.svg b/packages/iconify-collections/assets/vender/line/development/prompt-engineering.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/prompt-engineering.svg
rename to packages/iconify-collections/assets/vender/line/development/prompt-engineering.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/puzzle-piece-01.svg b/packages/iconify-collections/assets/vender/line/development/puzzle-piece-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/puzzle-piece-01.svg
rename to packages/iconify-collections/assets/vender/line/development/puzzle-piece-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/terminal-square.svg b/packages/iconify-collections/assets/vender/line/development/terminal-square.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/terminal-square.svg
rename to packages/iconify-collections/assets/vender/line/development/terminal-square.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/variable.svg b/packages/iconify-collections/assets/vender/line/development/variable.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/variable.svg
rename to packages/iconify-collections/assets/vender/line/development/variable.svg
diff --git a/web/app/components/base/icons/assets/vender/line/development/webhooks.svg b/packages/iconify-collections/assets/vender/line/development/webhooks.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/development/webhooks.svg
rename to packages/iconify-collections/assets/vender/line/development/webhooks.svg
diff --git a/web/app/components/base/icons/assets/vender/line/editor/align-left.svg b/packages/iconify-collections/assets/vender/line/editor/align-left.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/editor/align-left.svg
rename to packages/iconify-collections/assets/vender/line/editor/align-left.svg
diff --git a/web/app/components/base/icons/assets/vender/line/editor/bezier-curve-03.svg b/packages/iconify-collections/assets/vender/line/editor/bezier-curve-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/editor/bezier-curve-03.svg
rename to packages/iconify-collections/assets/vender/line/editor/bezier-curve-03.svg
diff --git a/web/app/components/base/icons/assets/vender/line/editor/collapse.svg b/packages/iconify-collections/assets/vender/line/editor/collapse.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/editor/collapse.svg
rename to packages/iconify-collections/assets/vender/line/editor/collapse.svg
diff --git a/web/app/components/base/icons/assets/vender/line/editor/colors.svg b/packages/iconify-collections/assets/vender/line/editor/colors.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/editor/colors.svg
rename to packages/iconify-collections/assets/vender/line/editor/colors.svg
diff --git a/web/app/components/base/icons/assets/vender/line/editor/image-indent-left.svg b/packages/iconify-collections/assets/vender/line/editor/image-indent-left.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/editor/image-indent-left.svg
rename to packages/iconify-collections/assets/vender/line/editor/image-indent-left.svg
diff --git a/web/app/components/base/icons/assets/vender/line/editor/left-indent-02.svg b/packages/iconify-collections/assets/vender/line/editor/left-indent-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/editor/left-indent-02.svg
rename to packages/iconify-collections/assets/vender/line/editor/left-indent-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/editor/letter-spacing-01.svg b/packages/iconify-collections/assets/vender/line/editor/letter-spacing-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/editor/letter-spacing-01.svg
rename to packages/iconify-collections/assets/vender/line/editor/letter-spacing-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/editor/type-square.svg b/packages/iconify-collections/assets/vender/line/editor/type-square.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/editor/type-square.svg
rename to packages/iconify-collections/assets/vender/line/editor/type-square.svg
diff --git a/web/app/components/base/icons/assets/vender/line/education/book-open-01.svg b/packages/iconify-collections/assets/vender/line/education/book-open-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/education/book-open-01.svg
rename to packages/iconify-collections/assets/vender/line/education/book-open-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/copy-check.svg b/packages/iconify-collections/assets/vender/line/files/copy-check.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/copy-check.svg
rename to packages/iconify-collections/assets/vender/line/files/copy-check.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/copy.svg b/packages/iconify-collections/assets/vender/line/files/copy.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/copy.svg
rename to packages/iconify-collections/assets/vender/line/files/copy.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/file-02.svg b/packages/iconify-collections/assets/vender/line/files/file-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/file-02.svg
rename to packages/iconify-collections/assets/vender/line/files/file-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/file-arrow-01.svg b/packages/iconify-collections/assets/vender/line/files/file-arrow-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/file-arrow-01.svg
rename to packages/iconify-collections/assets/vender/line/files/file-arrow-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/file-check-02.svg b/packages/iconify-collections/assets/vender/line/files/file-check-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/file-check-02.svg
rename to packages/iconify-collections/assets/vender/line/files/file-check-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/file-download-02.svg b/packages/iconify-collections/assets/vender/line/files/file-download-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/file-download-02.svg
rename to packages/iconify-collections/assets/vender/line/files/file-download-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/file-plus-01.svg b/packages/iconify-collections/assets/vender/line/files/file-plus-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/file-plus-01.svg
rename to packages/iconify-collections/assets/vender/line/files/file-plus-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/file-plus-02.svg b/packages/iconify-collections/assets/vender/line/files/file-plus-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/file-plus-02.svg
rename to packages/iconify-collections/assets/vender/line/files/file-plus-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/file-text.svg b/packages/iconify-collections/assets/vender/line/files/file-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/file-text.svg
rename to packages/iconify-collections/assets/vender/line/files/file-text.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/file-upload.svg b/packages/iconify-collections/assets/vender/line/files/file-upload.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/file-upload.svg
rename to packages/iconify-collections/assets/vender/line/files/file-upload.svg
diff --git a/web/app/components/base/icons/assets/vender/line/files/folder.svg b/packages/iconify-collections/assets/vender/line/files/folder.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/files/folder.svg
rename to packages/iconify-collections/assets/vender/line/files/folder.svg
diff --git a/web/app/components/base/icons/assets/vender/line/financeAndECommerce/balance.svg b/packages/iconify-collections/assets/vender/line/financeAndECommerce/balance.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/financeAndECommerce/balance.svg
rename to packages/iconify-collections/assets/vender/line/financeAndECommerce/balance.svg
diff --git a/web/app/components/base/icons/assets/vender/line/financeAndECommerce/coins-stacked-01.svg b/packages/iconify-collections/assets/vender/line/financeAndECommerce/coins-stacked-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/financeAndECommerce/coins-stacked-01.svg
rename to packages/iconify-collections/assets/vender/line/financeAndECommerce/coins-stacked-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/financeAndECommerce/credits-coin.svg b/packages/iconify-collections/assets/vender/line/financeAndECommerce/credits-coin.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/financeAndECommerce/credits-coin.svg
rename to packages/iconify-collections/assets/vender/line/financeAndECommerce/credits-coin.svg
diff --git a/web/app/components/base/icons/assets/vender/line/financeAndECommerce/gold-coin.svg b/packages/iconify-collections/assets/vender/line/financeAndECommerce/gold-coin.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/financeAndECommerce/gold-coin.svg
rename to packages/iconify-collections/assets/vender/line/financeAndECommerce/gold-coin.svg
diff --git a/web/app/components/base/icons/assets/vender/line/financeAndECommerce/receipt-list.svg b/packages/iconify-collections/assets/vender/line/financeAndECommerce/receipt-list.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/financeAndECommerce/receipt-list.svg
rename to packages/iconify-collections/assets/vender/line/financeAndECommerce/receipt-list.svg
diff --git a/web/app/components/base/icons/assets/vender/line/financeAndECommerce/tag-01.svg b/packages/iconify-collections/assets/vender/line/financeAndECommerce/tag-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/financeAndECommerce/tag-01.svg
rename to packages/iconify-collections/assets/vender/line/financeAndECommerce/tag-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/financeAndECommerce/tag-03.svg b/packages/iconify-collections/assets/vender/line/financeAndECommerce/tag-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/financeAndECommerce/tag-03.svg
rename to packages/iconify-collections/assets/vender/line/financeAndECommerce/tag-03.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/at-sign.svg b/packages/iconify-collections/assets/vender/line/general/at-sign.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/at-sign.svg
rename to packages/iconify-collections/assets/vender/line/general/at-sign.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/bookmark.svg b/packages/iconify-collections/assets/vender/line/general/bookmark.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/bookmark.svg
rename to packages/iconify-collections/assets/vender/line/general/bookmark.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/check-done-01.svg b/packages/iconify-collections/assets/vender/line/general/check-done-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/check-done-01.svg
rename to packages/iconify-collections/assets/vender/line/general/check-done-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/check.svg b/packages/iconify-collections/assets/vender/line/general/check.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/check.svg
rename to packages/iconify-collections/assets/vender/line/general/check.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/checklist-square.svg b/packages/iconify-collections/assets/vender/line/general/checklist-square.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/checklist-square.svg
rename to packages/iconify-collections/assets/vender/line/general/checklist-square.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/code-assistant.svg b/packages/iconify-collections/assets/vender/line/general/code-assistant.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/code-assistant.svg
rename to packages/iconify-collections/assets/vender/line/general/code-assistant.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/dots-grid.svg b/packages/iconify-collections/assets/vender/line/general/dots-grid.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/dots-grid.svg
rename to packages/iconify-collections/assets/vender/line/general/dots-grid.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/edit-02.svg b/packages/iconify-collections/assets/vender/line/general/edit-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/edit-02.svg
rename to packages/iconify-collections/assets/vender/line/general/edit-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/edit-04.svg b/packages/iconify-collections/assets/vender/line/general/edit-04.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/edit-04.svg
rename to packages/iconify-collections/assets/vender/line/general/edit-04.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/edit-05.svg b/packages/iconify-collections/assets/vender/line/general/edit-05.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/edit-05.svg
rename to packages/iconify-collections/assets/vender/line/general/edit-05.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/hash-02.svg b/packages/iconify-collections/assets/vender/line/general/hash-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/hash-02.svg
rename to packages/iconify-collections/assets/vender/line/general/hash-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/info-circle.svg b/packages/iconify-collections/assets/vender/line/general/info-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/info-circle.svg
rename to packages/iconify-collections/assets/vender/line/general/info-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/link-03.svg b/packages/iconify-collections/assets/vender/line/general/link-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/link-03.svg
rename to packages/iconify-collections/assets/vender/line/general/link-03.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/link-external-02.svg b/packages/iconify-collections/assets/vender/line/general/link-external-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/link-external-02.svg
rename to packages/iconify-collections/assets/vender/line/general/link-external-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/log-in-04.svg b/packages/iconify-collections/assets/vender/line/general/log-in-04.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/log-in-04.svg
rename to packages/iconify-collections/assets/vender/line/general/log-in-04.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/log-out-01.svg b/packages/iconify-collections/assets/vender/line/general/log-out-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/log-out-01.svg
rename to packages/iconify-collections/assets/vender/line/general/log-out-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/log-out-04.svg b/packages/iconify-collections/assets/vender/line/general/log-out-04.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/log-out-04.svg
rename to packages/iconify-collections/assets/vender/line/general/log-out-04.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/magic-edit.svg b/packages/iconify-collections/assets/vender/line/general/magic-edit.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/magic-edit.svg
rename to packages/iconify-collections/assets/vender/line/general/magic-edit.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/menu-01.svg b/packages/iconify-collections/assets/vender/line/general/menu-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/menu-01.svg
rename to packages/iconify-collections/assets/vender/line/general/menu-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/pin-01.svg b/packages/iconify-collections/assets/vender/line/general/pin-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/pin-01.svg
rename to packages/iconify-collections/assets/vender/line/general/pin-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/pin-02.svg b/packages/iconify-collections/assets/vender/line/general/pin-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/pin-02.svg
rename to packages/iconify-collections/assets/vender/line/general/pin-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/plus-02.svg b/packages/iconify-collections/assets/vender/line/general/plus-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/plus-02.svg
rename to packages/iconify-collections/assets/vender/line/general/plus-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/refresh.svg b/packages/iconify-collections/assets/vender/line/general/refresh.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/refresh.svg
rename to packages/iconify-collections/assets/vender/line/general/refresh.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/search-menu.svg b/packages/iconify-collections/assets/vender/line/general/search-menu.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/search-menu.svg
rename to packages/iconify-collections/assets/vender/line/general/search-menu.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/settings-01.svg b/packages/iconify-collections/assets/vender/line/general/settings-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/settings-01.svg
rename to packages/iconify-collections/assets/vender/line/general/settings-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/settings-04.svg b/packages/iconify-collections/assets/vender/line/general/settings-04.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/settings-04.svg
rename to packages/iconify-collections/assets/vender/line/general/settings-04.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/target-04.svg b/packages/iconify-collections/assets/vender/line/general/target-04.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/target-04.svg
rename to packages/iconify-collections/assets/vender/line/general/target-04.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/upload-03.svg b/packages/iconify-collections/assets/vender/line/general/upload-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/upload-03.svg
rename to packages/iconify-collections/assets/vender/line/general/upload-03.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/upload-cloud-01.svg b/packages/iconify-collections/assets/vender/line/general/upload-cloud-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/upload-cloud-01.svg
rename to packages/iconify-collections/assets/vender/line/general/upload-cloud-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/general/x.svg b/packages/iconify-collections/assets/vender/line/general/x.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/general/x.svg
rename to packages/iconify-collections/assets/vender/line/general/x.svg
diff --git a/web/app/components/base/icons/assets/vender/line/images/image-plus.svg b/packages/iconify-collections/assets/vender/line/images/image-plus.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/images/image-plus.svg
rename to packages/iconify-collections/assets/vender/line/images/image-plus.svg
diff --git a/web/app/components/base/icons/assets/vender/line/layout/align-left-01.svg b/packages/iconify-collections/assets/vender/line/layout/align-left-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/layout/align-left-01.svg
rename to packages/iconify-collections/assets/vender/line/layout/align-left-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/layout/align-right-01.svg b/packages/iconify-collections/assets/vender/line/layout/align-right-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/layout/align-right-01.svg
rename to packages/iconify-collections/assets/vender/line/layout/align-right-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/layout/grid-01.svg b/packages/iconify-collections/assets/vender/line/layout/grid-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/layout/grid-01.svg
rename to packages/iconify-collections/assets/vender/line/layout/grid-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/layout/layout-grid-02.svg b/packages/iconify-collections/assets/vender/line/layout/layout-grid-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/layout/layout-grid-02.svg
rename to packages/iconify-collections/assets/vender/line/layout/layout-grid-02.svg
diff --git a/web/app/components/base/icons/assets/vender/line/mediaAndDevices/microphone-01.svg b/packages/iconify-collections/assets/vender/line/mediaAndDevices/microphone-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/mediaAndDevices/microphone-01.svg
rename to packages/iconify-collections/assets/vender/line/mediaAndDevices/microphone-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/mediaAndDevices/play-circle.svg b/packages/iconify-collections/assets/vender/line/mediaAndDevices/play-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/mediaAndDevices/play-circle.svg
rename to packages/iconify-collections/assets/vender/line/mediaAndDevices/play-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/line/mediaAndDevices/sliders-h.svg b/packages/iconify-collections/assets/vender/line/mediaAndDevices/sliders-h.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/mediaAndDevices/sliders-h.svg
rename to packages/iconify-collections/assets/vender/line/mediaAndDevices/sliders-h.svg
diff --git a/web/app/components/base/icons/assets/vender/line/mediaAndDevices/speaker.svg b/packages/iconify-collections/assets/vender/line/mediaAndDevices/speaker.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/mediaAndDevices/speaker.svg
rename to packages/iconify-collections/assets/vender/line/mediaAndDevices/speaker.svg
diff --git a/web/app/components/base/icons/assets/vender/line/mediaAndDevices/stop-circle.svg b/packages/iconify-collections/assets/vender/line/mediaAndDevices/stop-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/mediaAndDevices/stop-circle.svg
rename to packages/iconify-collections/assets/vender/line/mediaAndDevices/stop-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/line/mediaAndDevices/stop.svg b/packages/iconify-collections/assets/vender/line/mediaAndDevices/stop.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/mediaAndDevices/stop.svg
rename to packages/iconify-collections/assets/vender/line/mediaAndDevices/stop.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/bubble-x.svg b/packages/iconify-collections/assets/vender/line/others/bubble-x.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/bubble-x.svg
rename to packages/iconify-collections/assets/vender/line/others/bubble-x.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/colors.svg b/packages/iconify-collections/assets/vender/line/others/colors.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/colors.svg
rename to packages/iconify-collections/assets/vender/line/others/colors.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/drag-handle.svg b/packages/iconify-collections/assets/vender/line/others/drag-handle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/drag-handle.svg
rename to packages/iconify-collections/assets/vender/line/others/drag-handle.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/env.svg b/packages/iconify-collections/assets/vender/line/others/env.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/env.svg
rename to packages/iconify-collections/assets/vender/line/others/env.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/global-variable.svg b/packages/iconify-collections/assets/vender/line/others/global-variable.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/global-variable.svg
rename to packages/iconify-collections/assets/vender/line/others/global-variable.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/icon-3-dots.svg b/packages/iconify-collections/assets/vender/line/others/icon-3-dots.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/icon-3-dots.svg
rename to packages/iconify-collections/assets/vender/line/others/icon-3-dots.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/long-arrow-left.svg b/packages/iconify-collections/assets/vender/line/others/long-arrow-left.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/long-arrow-left.svg
rename to packages/iconify-collections/assets/vender/line/others/long-arrow-left.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/long-arrow-right.svg b/packages/iconify-collections/assets/vender/line/others/long-arrow-right.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/long-arrow-right.svg
rename to packages/iconify-collections/assets/vender/line/others/long-arrow-right.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/search-menu.svg b/packages/iconify-collections/assets/vender/line/others/search-menu.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/search-menu.svg
rename to packages/iconify-collections/assets/vender/line/others/search-menu.svg
diff --git a/web/app/components/base/icons/assets/vender/line/others/tools.svg b/packages/iconify-collections/assets/vender/line/others/tools.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/others/tools.svg
rename to packages/iconify-collections/assets/vender/line/others/tools.svg
diff --git a/web/app/components/base/icons/assets/vender/line/shapes/cube-outline.svg b/packages/iconify-collections/assets/vender/line/shapes/cube-outline.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/shapes/cube-outline.svg
rename to packages/iconify-collections/assets/vender/line/shapes/cube-outline.svg
diff --git a/web/app/components/base/icons/assets/vender/line/time/clock-fast-forward.svg b/packages/iconify-collections/assets/vender/line/time/clock-fast-forward.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/time/clock-fast-forward.svg
rename to packages/iconify-collections/assets/vender/line/time/clock-fast-forward.svg
diff --git a/web/app/components/base/icons/assets/vender/line/time/clock-play-slim.svg b/packages/iconify-collections/assets/vender/line/time/clock-play-slim.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/time/clock-play-slim.svg
rename to packages/iconify-collections/assets/vender/line/time/clock-play-slim.svg
diff --git a/web/app/components/base/icons/assets/vender/line/time/clock-play.svg b/packages/iconify-collections/assets/vender/line/time/clock-play.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/time/clock-play.svg
rename to packages/iconify-collections/assets/vender/line/time/clock-play.svg
diff --git a/web/app/components/base/icons/assets/vender/line/time/clock-refresh.svg b/packages/iconify-collections/assets/vender/line/time/clock-refresh.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/time/clock-refresh.svg
rename to packages/iconify-collections/assets/vender/line/time/clock-refresh.svg
diff --git a/web/app/components/base/icons/assets/vender/line/users/user-01.svg b/packages/iconify-collections/assets/vender/line/users/user-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/users/user-01.svg
rename to packages/iconify-collections/assets/vender/line/users/user-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/users/users-01.svg b/packages/iconify-collections/assets/vender/line/users/users-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/users/users-01.svg
rename to packages/iconify-collections/assets/vender/line/users/users-01.svg
diff --git a/web/app/components/base/icons/assets/vender/line/weather/stars-02.svg b/packages/iconify-collections/assets/vender/line/weather/stars-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/line/weather/stars-02.svg
rename to packages/iconify-collections/assets/vender/line/weather/stars-02.svg
diff --git a/web/app/components/base/icons/assets/vender/other/anthropic-text.svg b/packages/iconify-collections/assets/vender/other/anthropic-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/anthropic-text.svg
rename to packages/iconify-collections/assets/vender/other/anthropic-text.svg
diff --git a/web/app/components/base/icons/assets/vender/other/generator.svg b/packages/iconify-collections/assets/vender/other/generator.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/generator.svg
rename to packages/iconify-collections/assets/vender/other/generator.svg
diff --git a/web/app/components/base/icons/assets/vender/other/group.svg b/packages/iconify-collections/assets/vender/other/group.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/group.svg
rename to packages/iconify-collections/assets/vender/other/group.svg
diff --git a/web/app/components/base/icons/assets/vender/other/hourglass-shape.svg b/packages/iconify-collections/assets/vender/other/hourglass-shape.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/hourglass-shape.svg
rename to packages/iconify-collections/assets/vender/other/hourglass-shape.svg
diff --git a/web/app/components/base/icons/assets/vender/other/mcp.svg b/packages/iconify-collections/assets/vender/other/mcp.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/mcp.svg
rename to packages/iconify-collections/assets/vender/other/mcp.svg
diff --git a/web/app/components/base/icons/assets/vender/other/no-tool-placeholder.svg b/packages/iconify-collections/assets/vender/other/no-tool-placeholder.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/no-tool-placeholder.svg
rename to packages/iconify-collections/assets/vender/other/no-tool-placeholder.svg
diff --git a/web/app/components/base/icons/assets/vender/other/openai.svg b/packages/iconify-collections/assets/vender/other/openai.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/openai.svg
rename to packages/iconify-collections/assets/vender/other/openai.svg
diff --git a/web/app/components/base/icons/assets/vender/other/replay-line.svg b/packages/iconify-collections/assets/vender/other/replay-line.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/replay-line.svg
rename to packages/iconify-collections/assets/vender/other/replay-line.svg
diff --git a/web/app/components/base/icons/assets/vender/other/square-checklist.svg b/packages/iconify-collections/assets/vender/other/square-checklist.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/other/square-checklist.svg
rename to packages/iconify-collections/assets/vender/other/square-checklist.svg
diff --git a/web/app/components/base/icons/assets/vender/pipeline/input-field.svg b/packages/iconify-collections/assets/vender/pipeline/input-field.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/pipeline/input-field.svg
rename to packages/iconify-collections/assets/vender/pipeline/input-field.svg
diff --git a/web/app/components/base/icons/assets/vender/pipeline/pipeline-fill.svg b/packages/iconify-collections/assets/vender/pipeline/pipeline-fill.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/pipeline/pipeline-fill.svg
rename to packages/iconify-collections/assets/vender/pipeline/pipeline-fill.svg
diff --git a/web/app/components/base/icons/assets/vender/pipeline/pipeline-line.svg b/packages/iconify-collections/assets/vender/pipeline/pipeline-line.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/pipeline/pipeline-line.svg
rename to packages/iconify-collections/assets/vender/pipeline/pipeline-line.svg
diff --git a/web/app/components/base/icons/assets/vender/plugin/box-sparkle-fill.svg b/packages/iconify-collections/assets/vender/plugin/box-sparkle-fill.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/plugin/box-sparkle-fill.svg
rename to packages/iconify-collections/assets/vender/plugin/box-sparkle-fill.svg
diff --git a/web/app/components/base/icons/assets/vender/plugin/left-corner.svg b/packages/iconify-collections/assets/vender/plugin/left-corner.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/plugin/left-corner.svg
rename to packages/iconify-collections/assets/vender/plugin/left-corner.svg
diff --git a/web/app/components/base/icons/assets/vender/plugin/trigger.svg b/packages/iconify-collections/assets/vender/plugin/trigger.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/plugin/trigger.svg
rename to packages/iconify-collections/assets/vender/plugin/trigger.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/FinanceAndECommerce/gold-coin.svg b/packages/iconify-collections/assets/vender/solid/FinanceAndECommerce/gold-coin.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/FinanceAndECommerce/gold-coin.svg
rename to packages/iconify-collections/assets/vender/solid/FinanceAndECommerce/gold-coin.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/FinanceAndECommerce/scales-02.svg b/packages/iconify-collections/assets/vender/solid/FinanceAndECommerce/scales-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/FinanceAndECommerce/scales-02.svg
rename to packages/iconify-collections/assets/vender/solid/FinanceAndECommerce/scales-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/alertsAndFeedback/alert-triangle.svg b/packages/iconify-collections/assets/vender/solid/alertsAndFeedback/alert-triangle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/alertsAndFeedback/alert-triangle.svg
rename to packages/iconify-collections/assets/vender/solid/alertsAndFeedback/alert-triangle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/arrows/arrow-down-double-line.svg b/packages/iconify-collections/assets/vender/solid/arrows/arrow-down-double-line.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/arrows/arrow-down-double-line.svg
rename to packages/iconify-collections/assets/vender/solid/arrows/arrow-down-double-line.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/arrows/arrow-down-round-fill.svg b/packages/iconify-collections/assets/vender/solid/arrows/arrow-down-round-fill.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/arrows/arrow-down-round-fill.svg
rename to packages/iconify-collections/assets/vender/solid/arrows/arrow-down-round-fill.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/arrows/arrow-up-double-line.svg b/packages/iconify-collections/assets/vender/solid/arrows/arrow-up-double-line.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/arrows/arrow-up-double-line.svg
rename to packages/iconify-collections/assets/vender/solid/arrows/arrow-up-double-line.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/arrows/chevron-down.svg b/packages/iconify-collections/assets/vender/solid/arrows/chevron-down.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/arrows/chevron-down.svg
rename to packages/iconify-collections/assets/vender/solid/arrows/chevron-down.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/arrows/high-priority.svg b/packages/iconify-collections/assets/vender/solid/arrows/high-priority.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/arrows/high-priority.svg
rename to packages/iconify-collections/assets/vender/solid/arrows/high-priority.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/ai-text.svg b/packages/iconify-collections/assets/vender/solid/communication/ai-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/ai-text.svg
rename to packages/iconify-collections/assets/vender/solid/communication/ai-text.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/bubble-text-mod.svg b/packages/iconify-collections/assets/vender/solid/communication/bubble-text-mod.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/bubble-text-mod.svg
rename to packages/iconify-collections/assets/vender/solid/communication/bubble-text-mod.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/chat-bot.svg b/packages/iconify-collections/assets/vender/solid/communication/chat-bot.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/chat-bot.svg
rename to packages/iconify-collections/assets/vender/solid/communication/chat-bot.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/cute-robot.svg b/packages/iconify-collections/assets/vender/solid/communication/cute-robot.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/cute-robot.svg
rename to packages/iconify-collections/assets/vender/solid/communication/cute-robot.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/edit-list.svg b/packages/iconify-collections/assets/vender/solid/communication/edit-list.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/edit-list.svg
rename to packages/iconify-collections/assets/vender/solid/communication/edit-list.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/list-sparkle.svg b/packages/iconify-collections/assets/vender/solid/communication/list-sparkle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/list-sparkle.svg
rename to packages/iconify-collections/assets/vender/solid/communication/list-sparkle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/logic.svg b/packages/iconify-collections/assets/vender/solid/communication/logic.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/logic.svg
rename to packages/iconify-collections/assets/vender/solid/communication/logic.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/message-dots-circle.svg b/packages/iconify-collections/assets/vender/solid/communication/message-dots-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/message-dots-circle.svg
rename to packages/iconify-collections/assets/vender/solid/communication/message-dots-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/message-fast.svg b/packages/iconify-collections/assets/vender/solid/communication/message-fast.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/message-fast.svg
rename to packages/iconify-collections/assets/vender/solid/communication/message-fast.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/message-heart-circle.svg b/packages/iconify-collections/assets/vender/solid/communication/message-heart-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/message-heart-circle.svg
rename to packages/iconify-collections/assets/vender/solid/communication/message-heart-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/message-smile-square.svg b/packages/iconify-collections/assets/vender/solid/communication/message-smile-square.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/message-smile-square.svg
rename to packages/iconify-collections/assets/vender/solid/communication/message-smile-square.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/communication/send-03.svg b/packages/iconify-collections/assets/vender/solid/communication/send-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/communication/send-03.svg
rename to packages/iconify-collections/assets/vender/solid/communication/send-03.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/api-connection-mod.svg b/packages/iconify-collections/assets/vender/solid/development/api-connection-mod.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/api-connection-mod.svg
rename to packages/iconify-collections/assets/vender/solid/development/api-connection-mod.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/api-connection.svg b/packages/iconify-collections/assets/vender/solid/development/api-connection.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/api-connection.svg
rename to packages/iconify-collections/assets/vender/solid/development/api-connection.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/bar-chart-square-02.svg b/packages/iconify-collections/assets/vender/solid/development/bar-chart-square-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/bar-chart-square-02.svg
rename to packages/iconify-collections/assets/vender/solid/development/bar-chart-square-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/container.svg b/packages/iconify-collections/assets/vender/solid/development/container.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/container.svg
rename to packages/iconify-collections/assets/vender/solid/development/container.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/database-02.svg b/packages/iconify-collections/assets/vender/solid/development/database-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/database-02.svg
rename to packages/iconify-collections/assets/vender/solid/development/database-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/database-03.svg b/packages/iconify-collections/assets/vender/solid/development/database-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/database-03.svg
rename to packages/iconify-collections/assets/vender/solid/development/database-03.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/file-heart-02.svg b/packages/iconify-collections/assets/vender/solid/development/file-heart-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/file-heart-02.svg
rename to packages/iconify-collections/assets/vender/solid/development/file-heart-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/pattern-recognition.svg b/packages/iconify-collections/assets/vender/solid/development/pattern-recognition.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/pattern-recognition.svg
rename to packages/iconify-collections/assets/vender/solid/development/pattern-recognition.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/prompt-engineering.svg b/packages/iconify-collections/assets/vender/solid/development/prompt-engineering.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/prompt-engineering.svg
rename to packages/iconify-collections/assets/vender/solid/development/prompt-engineering.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/puzzle-piece-01.svg b/packages/iconify-collections/assets/vender/solid/development/puzzle-piece-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/puzzle-piece-01.svg
rename to packages/iconify-collections/assets/vender/solid/development/puzzle-piece-01.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/semantic.svg b/packages/iconify-collections/assets/vender/solid/development/semantic.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/semantic.svg
rename to packages/iconify-collections/assets/vender/solid/development/semantic.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/terminal-square.svg b/packages/iconify-collections/assets/vender/solid/development/terminal-square.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/terminal-square.svg
rename to packages/iconify-collections/assets/vender/solid/development/terminal-square.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/development/variable-02.svg b/packages/iconify-collections/assets/vender/solid/development/variable-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/development/variable-02.svg
rename to packages/iconify-collections/assets/vender/solid/development/variable-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/editor/brush-01.svg b/packages/iconify-collections/assets/vender/solid/editor/brush-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/editor/brush-01.svg
rename to packages/iconify-collections/assets/vender/solid/editor/brush-01.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/editor/citations.svg b/packages/iconify-collections/assets/vender/solid/editor/citations.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/editor/citations.svg
rename to packages/iconify-collections/assets/vender/solid/editor/citations.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/editor/colors.svg b/packages/iconify-collections/assets/vender/solid/editor/colors.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/editor/colors.svg
rename to packages/iconify-collections/assets/vender/solid/editor/colors.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/editor/paragraph.svg b/packages/iconify-collections/assets/vender/solid/editor/paragraph.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/editor/paragraph.svg
rename to packages/iconify-collections/assets/vender/solid/editor/paragraph.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/editor/type-square.svg b/packages/iconify-collections/assets/vender/solid/editor/type-square.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/editor/type-square.svg
rename to packages/iconify-collections/assets/vender/solid/editor/type-square.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/education/beaker-02.svg b/packages/iconify-collections/assets/vender/solid/education/beaker-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/education/beaker-02.svg
rename to packages/iconify-collections/assets/vender/solid/education/beaker-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/education/bubble-text.svg b/packages/iconify-collections/assets/vender/solid/education/bubble-text.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/education/bubble-text.svg
rename to packages/iconify-collections/assets/vender/solid/education/bubble-text.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/education/heart-02.svg b/packages/iconify-collections/assets/vender/solid/education/heart-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/education/heart-02.svg
rename to packages/iconify-collections/assets/vender/solid/education/heart-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/education/unblur.svg b/packages/iconify-collections/assets/vender/solid/education/unblur.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/education/unblur.svg
rename to packages/iconify-collections/assets/vender/solid/education/unblur.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/files/file-05.svg b/packages/iconify-collections/assets/vender/solid/files/file-05.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/files/file-05.svg
rename to packages/iconify-collections/assets/vender/solid/files/file-05.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/files/file-search-02.svg b/packages/iconify-collections/assets/vender/solid/files/file-search-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/files/file-search-02.svg
rename to packages/iconify-collections/assets/vender/solid/files/file-search-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/files/file-zip.svg b/packages/iconify-collections/assets/vender/solid/files/file-zip.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/files/file-zip.svg
rename to packages/iconify-collections/assets/vender/solid/files/file-zip.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/files/folder.svg b/packages/iconify-collections/assets/vender/solid/files/folder.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/files/folder.svg
rename to packages/iconify-collections/assets/vender/solid/files/folder.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/answer-triangle.svg b/packages/iconify-collections/assets/vender/solid/general/answer-triangle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/answer-triangle.svg
rename to packages/iconify-collections/assets/vender/solid/general/answer-triangle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/arrow-down-round-fill.svg b/packages/iconify-collections/assets/vender/solid/general/arrow-down-round-fill.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/arrow-down-round-fill.svg
rename to packages/iconify-collections/assets/vender/solid/general/arrow-down-round-fill.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/check-circle.svg b/packages/iconify-collections/assets/vender/solid/general/check-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/check-circle.svg
rename to packages/iconify-collections/assets/vender/solid/general/check-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/check-done-01.svg b/packages/iconify-collections/assets/vender/solid/general/check-done-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/check-done-01.svg
rename to packages/iconify-collections/assets/vender/solid/general/check-done-01.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/download-02.svg b/packages/iconify-collections/assets/vender/solid/general/download-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/download-02.svg
rename to packages/iconify-collections/assets/vender/solid/general/download-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/edit-03.svg b/packages/iconify-collections/assets/vender/solid/general/edit-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/edit-03.svg
rename to packages/iconify-collections/assets/vender/solid/general/edit-03.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/edit-04.svg b/packages/iconify-collections/assets/vender/solid/general/edit-04.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/edit-04.svg
rename to packages/iconify-collections/assets/vender/solid/general/edit-04.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/eye.svg b/packages/iconify-collections/assets/vender/solid/general/eye.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/eye.svg
rename to packages/iconify-collections/assets/vender/solid/general/eye.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/github.svg b/packages/iconify-collections/assets/vender/solid/general/github.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/github.svg
rename to packages/iconify-collections/assets/vender/solid/general/github.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/message-clock-circle.svg b/packages/iconify-collections/assets/vender/solid/general/message-clock-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/message-clock-circle.svg
rename to packages/iconify-collections/assets/vender/solid/general/message-clock-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/plus-circle.svg b/packages/iconify-collections/assets/vender/solid/general/plus-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/plus-circle.svg
rename to packages/iconify-collections/assets/vender/solid/general/plus-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/question-triangle.svg b/packages/iconify-collections/assets/vender/solid/general/question-triangle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/question-triangle.svg
rename to packages/iconify-collections/assets/vender/solid/general/question-triangle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/search-md.svg b/packages/iconify-collections/assets/vender/solid/general/search-md.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/search-md.svg
rename to packages/iconify-collections/assets/vender/solid/general/search-md.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/target-04.svg b/packages/iconify-collections/assets/vender/solid/general/target-04.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/target-04.svg
rename to packages/iconify-collections/assets/vender/solid/general/target-04.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/tool-03.svg b/packages/iconify-collections/assets/vender/solid/general/tool-03.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/tool-03.svg
rename to packages/iconify-collections/assets/vender/solid/general/tool-03.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/x-circle.svg b/packages/iconify-collections/assets/vender/solid/general/x-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/x-circle.svg
rename to packages/iconify-collections/assets/vender/solid/general/x-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/zap-fast.svg b/packages/iconify-collections/assets/vender/solid/general/zap-fast.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/zap-fast.svg
rename to packages/iconify-collections/assets/vender/solid/general/zap-fast.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/general/zap-narrow.svg b/packages/iconify-collections/assets/vender/solid/general/zap-narrow.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/general/zap-narrow.svg
rename to packages/iconify-collections/assets/vender/solid/general/zap-narrow.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/layout/grid-01.svg b/packages/iconify-collections/assets/vender/solid/layout/grid-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/layout/grid-01.svg
rename to packages/iconify-collections/assets/vender/solid/layout/grid-01.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/audio-support-icon.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/audio-support-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/audio-support-icon.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/audio-support-icon.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/document-support-icon.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/document-support-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/document-support-icon.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/document-support-icon.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/magic-box.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/magic-box.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/magic-box.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/magic-box.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/magic-eyes.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/magic-eyes.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/magic-eyes.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/magic-eyes.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/magic-wand.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/magic-wand.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/magic-wand.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/magic-wand.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/microphone-01.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/microphone-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/microphone-01.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/microphone-01.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/play.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/play.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/play.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/play.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/robot.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/robot.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/robot.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/robot.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/sliders-02.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/sliders-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/sliders-02.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/sliders-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/speaker.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/speaker.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/speaker.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/speaker.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/stop-circle.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/stop-circle.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/stop-circle.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/stop-circle.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/mediaAndDevices/video-support-icon.svg b/packages/iconify-collections/assets/vender/solid/mediaAndDevices/video-support-icon.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/mediaAndDevices/video-support-icon.svg
rename to packages/iconify-collections/assets/vender/solid/mediaAndDevices/video-support-icon.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/security/lock-01.svg b/packages/iconify-collections/assets/vender/solid/security/lock-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/security/lock-01.svg
rename to packages/iconify-collections/assets/vender/solid/security/lock-01.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/shapes/corner.svg b/packages/iconify-collections/assets/vender/solid/shapes/corner.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/shapes/corner.svg
rename to packages/iconify-collections/assets/vender/solid/shapes/corner.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/shapes/star-04.svg b/packages/iconify-collections/assets/vender/solid/shapes/star-04.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/shapes/star-04.svg
rename to packages/iconify-collections/assets/vender/solid/shapes/star-04.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/shapes/star-06.svg b/packages/iconify-collections/assets/vender/solid/shapes/star-06.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/shapes/star-06.svg
rename to packages/iconify-collections/assets/vender/solid/shapes/star-06.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/users/user-01.svg b/packages/iconify-collections/assets/vender/solid/users/user-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/users/user-01.svg
rename to packages/iconify-collections/assets/vender/solid/users/user-01.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/users/user-edit-02.svg b/packages/iconify-collections/assets/vender/solid/users/user-edit-02.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/users/user-edit-02.svg
rename to packages/iconify-collections/assets/vender/solid/users/user-edit-02.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/users/users-01.svg b/packages/iconify-collections/assets/vender/solid/users/users-01.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/users/users-01.svg
rename to packages/iconify-collections/assets/vender/solid/users/users-01.svg
diff --git a/web/app/components/base/icons/assets/vender/solid/users/users-plus.svg b/packages/iconify-collections/assets/vender/solid/users/users-plus.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/solid/users/users-plus.svg
rename to packages/iconify-collections/assets/vender/solid/users/users-plus.svg
diff --git a/web/app/components/base/icons/assets/vender/system/auto-update-line.svg b/packages/iconify-collections/assets/vender/system/auto-update-line.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/system/auto-update-line.svg
rename to packages/iconify-collections/assets/vender/system/auto-update-line.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/agent.svg b/packages/iconify-collections/assets/vender/workflow/agent.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/agent.svg
rename to packages/iconify-collections/assets/vender/workflow/agent.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/answer.svg b/packages/iconify-collections/assets/vender/workflow/answer.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/answer.svg
rename to packages/iconify-collections/assets/vender/workflow/answer.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/api-aggregate.svg b/packages/iconify-collections/assets/vender/workflow/api-aggregate.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/api-aggregate.svg
rename to packages/iconify-collections/assets/vender/workflow/api-aggregate.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/assigner.svg b/packages/iconify-collections/assets/vender/workflow/assigner.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/assigner.svg
rename to packages/iconify-collections/assets/vender/workflow/assigner.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/asterisk.svg b/packages/iconify-collections/assets/vender/workflow/asterisk.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/asterisk.svg
rename to packages/iconify-collections/assets/vender/workflow/asterisk.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/calendar-check-line.svg b/packages/iconify-collections/assets/vender/workflow/calendar-check-line.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/calendar-check-line.svg
rename to packages/iconify-collections/assets/vender/workflow/calendar-check-line.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/code.svg b/packages/iconify-collections/assets/vender/workflow/code.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/code.svg
rename to packages/iconify-collections/assets/vender/workflow/code.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/datasource.svg b/packages/iconify-collections/assets/vender/workflow/datasource.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/datasource.svg
rename to packages/iconify-collections/assets/vender/workflow/datasource.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/docs-extractor.svg b/packages/iconify-collections/assets/vender/workflow/docs-extractor.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/docs-extractor.svg
rename to packages/iconify-collections/assets/vender/workflow/docs-extractor.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/end.svg b/packages/iconify-collections/assets/vender/workflow/end.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/end.svg
rename to packages/iconify-collections/assets/vender/workflow/end.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/home.svg b/packages/iconify-collections/assets/vender/workflow/home.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/home.svg
rename to packages/iconify-collections/assets/vender/workflow/home.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/http.svg b/packages/iconify-collections/assets/vender/workflow/http.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/http.svg
rename to packages/iconify-collections/assets/vender/workflow/http.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/human-in-loop.svg b/packages/iconify-collections/assets/vender/workflow/human-in-loop.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/human-in-loop.svg
rename to packages/iconify-collections/assets/vender/workflow/human-in-loop.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/if-else.svg b/packages/iconify-collections/assets/vender/workflow/if-else.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/if-else.svg
rename to packages/iconify-collections/assets/vender/workflow/if-else.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/iteration-start.svg b/packages/iconify-collections/assets/vender/workflow/iteration-start.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/iteration-start.svg
rename to packages/iconify-collections/assets/vender/workflow/iteration-start.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/iteration.svg b/packages/iconify-collections/assets/vender/workflow/iteration.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/iteration.svg
rename to packages/iconify-collections/assets/vender/workflow/iteration.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/jinja.svg b/packages/iconify-collections/assets/vender/workflow/jinja.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/jinja.svg
rename to packages/iconify-collections/assets/vender/workflow/jinja.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/knowledge-base.svg b/packages/iconify-collections/assets/vender/workflow/knowledge-base.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/knowledge-base.svg
rename to packages/iconify-collections/assets/vender/workflow/knowledge-base.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/knowledge-retrieval.svg b/packages/iconify-collections/assets/vender/workflow/knowledge-retrieval.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/knowledge-retrieval.svg
rename to packages/iconify-collections/assets/vender/workflow/knowledge-retrieval.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/list-filter.svg b/packages/iconify-collections/assets/vender/workflow/list-filter.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/list-filter.svg
rename to packages/iconify-collections/assets/vender/workflow/list-filter.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/llm.svg b/packages/iconify-collections/assets/vender/workflow/llm.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/llm.svg
rename to packages/iconify-collections/assets/vender/workflow/llm.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/loop-end.svg b/packages/iconify-collections/assets/vender/workflow/loop-end.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/loop-end.svg
rename to packages/iconify-collections/assets/vender/workflow/loop-end.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/loop.svg b/packages/iconify-collections/assets/vender/workflow/loop.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/loop.svg
rename to packages/iconify-collections/assets/vender/workflow/loop.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/parameter-extractor.svg b/packages/iconify-collections/assets/vender/workflow/parameter-extractor.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/parameter-extractor.svg
rename to packages/iconify-collections/assets/vender/workflow/parameter-extractor.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/question-classifier.svg b/packages/iconify-collections/assets/vender/workflow/question-classifier.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/question-classifier.svg
rename to packages/iconify-collections/assets/vender/workflow/question-classifier.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/schedule.svg b/packages/iconify-collections/assets/vender/workflow/schedule.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/schedule.svg
rename to packages/iconify-collections/assets/vender/workflow/schedule.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/templating-transform.svg b/packages/iconify-collections/assets/vender/workflow/templating-transform.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/templating-transform.svg
rename to packages/iconify-collections/assets/vender/workflow/templating-transform.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/trigger-all.svg b/packages/iconify-collections/assets/vender/workflow/trigger-all.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/trigger-all.svg
rename to packages/iconify-collections/assets/vender/workflow/trigger-all.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/variable-x.svg b/packages/iconify-collections/assets/vender/workflow/variable-x.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/variable-x.svg
rename to packages/iconify-collections/assets/vender/workflow/variable-x.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/webhook-line.svg b/packages/iconify-collections/assets/vender/workflow/webhook-line.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/webhook-line.svg
rename to packages/iconify-collections/assets/vender/workflow/webhook-line.svg
diff --git a/web/app/components/base/icons/assets/vender/workflow/window-cursor.svg b/packages/iconify-collections/assets/vender/workflow/window-cursor.svg
similarity index 100%
rename from web/app/components/base/icons/assets/vender/workflow/window-cursor.svg
rename to packages/iconify-collections/assets/vender/workflow/window-cursor.svg
diff --git a/packages/iconify-collections/custom-public/chars.json b/packages/iconify-collections/custom-public/chars.json
new file mode 100644
index 0000000000..0967ef424b
--- /dev/null
+++ b/packages/iconify-collections/custom-public/chars.json
@@ -0,0 +1 @@
+{}
diff --git a/packages/iconify-collections/custom-public/icons.json b/packages/iconify-collections/custom-public/icons.json
new file mode 100644
index 0000000000..347b6145e2
--- /dev/null
+++ b/packages/iconify-collections/custom-public/icons.json
@@ -0,0 +1,572 @@
+{
+ "prefix": "custom-public",
+ "lastModified": 1775115796,
+ "icons": {
+ "avatar-user": {
+ "body": "",
+ "width": 512,
+ "height": 512
+ },
+ "billing-ar-cube-1": {
+ "body": "",
+ "width": 28
+ },
+ "billing-asterisk": {
+ "body": "",
+ "width": 28
+ },
+ "billing-aws-marketplace-dark": {
+ "body": "",
+ "width": 126,
+ "height": 25
+ },
+ "billing-aws-marketplace-light": {
+ "body": "",
+ "width": 126,
+ "height": 24
+ },
+ "billing-azure": {
+ "body": "",
+ "width": 21,
+ "height": 20
+ },
+ "billing-buildings": {
+ "body": ""
+ },
+ "billing-diamond": {
+ "body": ""
+ },
+ "billing-google-cloud": {
+ "body": "",
+ "width": 22,
+ "height": 18
+ },
+ "billing-group-2": {
+ "body": ""
+ },
+ "billing-keyframe": {
+ "body": ""
+ },
+ "billing-sparkles-soft": {
+ "body": "",
+ "width": 13,
+ "height": 13
+ },
+ "common-d": {
+ "body": ""
+ },
+ "common-diagonal-dividing-line": {
+ "body": "",
+ "width": 7,
+ "height": 20
+ },
+ "common-dify": {
+ "body": "",
+ "width": 50,
+ "height": 26
+ },
+ "common-gdpr": {
+ "body": "",
+ "width": 23,
+ "height": 28
+ },
+ "common-github": {
+ "body": "",
+ "width": 18,
+ "height": 18
+ },
+ "common-highlight": {
+ "body": "",
+ "width": 46,
+ "height": 24
+ },
+ "common-iso": {
+ "body": "",
+ "width": 64,
+ "height": 64
+ },
+ "common-line-3": {
+ "body": "",
+ "width": 5,
+ "height": 12
+ },
+ "common-lock": {
+ "body": ""
+ },
+ "common-message-chat-square": {
+ "body": ""
+ },
+ "common-multi-path-retrieval": {
+ "body": "",
+ "width": 36,
+ "height": 36
+ },
+ "common-n-to-1-retrieval": {
+ "body": "",
+ "width": 36,
+ "height": 36
+ },
+ "common-notion": {
+ "body": "",
+ "width": 20,
+ "height": 20
+ },
+ "common-soc2": {
+ "body": "",
+ "width": 28,
+ "height": 28
+ },
+ "common-sparkles-soft": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "common-sparkles-soft-accent": {
+ "body": ""
+ },
+ "education-triangle": {
+ "body": "",
+ "height": 22
+ },
+ "files-csv": {
+ "body": ""
+ },
+ "files-doc": {
+ "body": ""
+ },
+ "files-docx": {
+ "body": ""
+ },
+ "files-html": {
+ "body": ""
+ },
+ "files-json": {
+ "body": ""
+ },
+ "files-md": {
+ "body": ""
+ },
+ "files-pdf": {
+ "body": ""
+ },
+ "files-txt": {
+ "body": ""
+ },
+ "files-unknown": {
+ "body": ""
+ },
+ "files-xlsx": {
+ "body": "",
+ "width": 24,
+ "height": 26
+ },
+ "files-yaml": {
+ "body": "",
+ "width": 24,
+ "height": 26
+ },
+ "knowledge-file": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "knowledge-option-card-effect-blue": {
+ "body": "",
+ "width": 214,
+ "height": 124
+ },
+ "knowledge-option-card-effect-blue-light": {
+ "body": "",
+ "width": 212,
+ "height": 74
+ },
+ "knowledge-option-card-effect-orange": {
+ "body": ""
+ },
+ "knowledge-option-card-effect-purple": {
+ "body": ""
+ },
+ "knowledge-option-card-effect-teal": {
+ "body": "",
+ "width": 212,
+ "height": 92
+ },
+ "knowledge-selection-mod": {
+ "body": "",
+ "width": 10,
+ "height": 10
+ },
+ "knowledge-watercrawl": {
+ "body": "",
+ "width": 500,
+ "height": 500
+ },
+ "knowledge-dataset-card-external-knowledge-base": {
+ "body": ""
+ },
+ "knowledge-dataset-card-general": {
+ "body": ""
+ },
+ "knowledge-dataset-card-graph": {
+ "body": ""
+ },
+ "knowledge-dataset-card-parent-child": {
+ "body": ""
+ },
+ "knowledge-dataset-card-qa": {
+ "body": ""
+ },
+ "knowledge-online-drive-buckets-blue": {
+ "body": "",
+ "height": 21
+ },
+ "knowledge-online-drive-buckets-gray": {
+ "body": "",
+ "width": 18
+ },
+ "knowledge-online-drive-folder": {
+ "body": ""
+ },
+ "llm-anthropic": {
+ "body": ""
+ },
+ "llm-anthropic-dark": {
+ "body": "",
+ "width": 90,
+ "height": 10
+ },
+ "llm-anthropic-light": {
+ "body": "",
+ "width": 90,
+ "height": 10
+ },
+ "llm-anthropic-short-light": {
+ "body": "",
+ "width": 40,
+ "height": 40
+ },
+ "llm-anthropic-text": {
+ "body": "",
+ "width": 90,
+ "height": 20
+ },
+ "llm-azure-openai-service": {
+ "body": "",
+ "width": 56
+ },
+ "llm-azure-openai-service-text": {
+ "body": "",
+ "width": 212
+ },
+ "llm-azureai": {
+ "body": ""
+ },
+ "llm-azureai-text": {
+ "body": "",
+ "width": 92
+ },
+ "llm-baichuan": {
+ "body": ""
+ },
+ "llm-baichuan-text": {
+ "body": "",
+ "width": 130
+ },
+ "llm-chatglm": {
+ "body": ""
+ },
+ "llm-chatglm-text": {
+ "body": "",
+ "width": 100
+ },
+ "llm-cohere": {
+ "body": "",
+ "width": 22,
+ "height": 22
+ },
+ "llm-cohere-text": {
+ "body": "",
+ "width": 120
+ },
+ "llm-deepseek": {
+ "body": "",
+ "width": 40,
+ "height": 40
+ },
+ "llm-gemini": {
+ "body": "",
+ "width": 40,
+ "height": 40
+ },
+ "llm-gpt-3": {
+ "body": ""
+ },
+ "llm-gpt-4": {
+ "body": ""
+ },
+ "llm-grok": {
+ "body": "",
+ "width": 40,
+ "height": 40
+ },
+ "llm-huggingface": {
+ "body": ""
+ },
+ "llm-huggingface-text": {
+ "body": "",
+ "width": 120
+ },
+ "llm-huggingface-text-hub": {
+ "body": "",
+ "width": 151
+ },
+ "llm-iflytek-spark": {
+ "body": ""
+ },
+ "llm-iflytek-spark-text": {
+ "body": "",
+ "width": 150
+ },
+ "llm-iflytek-spark-text-cn": {
+ "body": "",
+ "width": 84
+ },
+ "llm-jina": {
+ "body": ""
+ },
+ "llm-jina-text": {
+ "body": "",
+ "width": 58
+ },
+ "llm-microsoft": {
+ "body": "",
+ "width": 21,
+ "height": 22
+ },
+ "llm-openai-black": {
+ "body": ""
+ },
+ "llm-openai-blue": {
+ "body": ""
+ },
+ "llm-openai-green": {
+ "body": ""
+ },
+ "llm-openai-teal": {
+ "body": ""
+ },
+ "llm-openai-text": {
+ "body": "",
+ "width": 52,
+ "height": 20
+ },
+ "llm-openai-transparent": {
+ "body": ""
+ },
+ "llm-openai-violet": {
+ "body": ""
+ },
+ "llm-openai-yellow": {
+ "body": ""
+ },
+ "llm-openllm": {
+ "body": ""
+ },
+ "llm-openllm-text": {
+ "body": "",
+ "width": 92,
+ "height": 25
+ },
+ "llm-replicate": {
+ "body": ""
+ },
+ "llm-replicate-text": {
+ "body": "",
+ "width": 92
+ },
+ "llm-xorbits-inference": {
+ "body": ""
+ },
+ "llm-xorbits-inference-text": {
+ "body": "",
+ "width": 152
+ },
+ "llm-zhipuai": {
+ "body": ""
+ },
+ "llm-zhipuai-text": {
+ "body": "",
+ "width": 89,
+ "height": 32
+ },
+ "llm-zhipuai-text-cn": {
+ "body": "",
+ "width": 86,
+ "height": 32
+ },
+ "model-checked": {
+ "body": ""
+ },
+ "other-default-tool-icon": {
+ "body": ""
+ },
+ "other-icon-3-dots": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "other-message-3-fill": {
+ "body": ""
+ },
+ "other-row-struct": {
+ "body": "",
+ "width": 624,
+ "height": 48
+ },
+ "other-slack": {
+ "body": "",
+ "width": 27,
+ "height": 27
+ },
+ "other-teams": {
+ "body": "",
+ "width": 28,
+ "height": 28
+ },
+ "plugins-google": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "plugins-partner-dark": {
+ "body": ""
+ },
+ "plugins-partner-light": {
+ "body": ""
+ },
+ "plugins-verified-dark": {
+ "body": ""
+ },
+ "plugins-verified-light": {
+ "body": ""
+ },
+ "plugins-web-reader": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "plugins-wikipedia": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "thought-data-set": {
+ "body": ""
+ },
+ "thought-loading": {
+ "body": ""
+ },
+ "thought-search": {
+ "body": ""
+ },
+ "thought-thought-list": {
+ "body": ""
+ },
+ "thought-web-reader": {
+ "body": ""
+ },
+ "tracing-aliyun-icon": {
+ "body": "",
+ "width": 65
+ },
+ "tracing-aliyun-icon-big": {
+ "body": "",
+ "width": 96,
+ "height": 24
+ },
+ "tracing-arize-icon": {
+ "body": ""
+ },
+ "tracing-arize-icon-big": {
+ "body": "",
+ "width": 111,
+ "height": 24
+ },
+ "tracing-databricks-icon": {
+ "body": "",
+ "width": 100
+ },
+ "tracing-databricks-icon-big": {
+ "body": "",
+ "width": 151,
+ "height": 24
+ },
+ "tracing-langfuse-icon": {
+ "body": ""
+ },
+ "tracing-langfuse-icon-big": {
+ "body": "",
+ "width": 111,
+ "height": 24
+ },
+ "tracing-langsmith-icon": {
+ "body": "",
+ "width": 84,
+ "height": 14
+ },
+ "tracing-langsmith-icon-big": {
+ "body": "",
+ "width": 124,
+ "height": 20
+ },
+ "tracing-mlflow-icon": {
+ "body": "",
+ "width": 43
+ },
+ "tracing-mlflow-icon-big": {
+ "body": "",
+ "width": 65,
+ "height": 24
+ },
+ "tracing-opik-icon": {
+ "body": "",
+ "width": 47.134
+ },
+ "tracing-opik-icon-big": {
+ "body": "",
+ "width": 70.701,
+ "height": 24
+ },
+ "tracing-phoenix-icon": {
+ "body": ""
+ },
+ "tracing-phoenix-icon-big": {
+ "body": "",
+ "width": 111,
+ "height": 24
+ },
+ "tracing-tencent-icon": {
+ "body": "",
+ "width": 80,
+ "height": 18
+ },
+ "tracing-tencent-icon-big": {
+ "body": "",
+ "width": 80,
+ "height": 18
+ },
+ "tracing-tracing-icon": {
+ "body": "",
+ "width": 20,
+ "height": 20
+ },
+ "tracing-weave-icon": {
+ "body": "",
+ "width": 120
+ },
+ "tracing-weave-icon-big": {
+ "body": "",
+ "width": 120
+ }
+ }
+}
diff --git a/packages/iconify-collections/custom-public/index.d.ts b/packages/iconify-collections/custom-public/index.d.ts
new file mode 100644
index 0000000000..ecca5633d4
--- /dev/null
+++ b/packages/iconify-collections/custom-public/index.d.ts
@@ -0,0 +1,55 @@
+export interface IconifyJSON {
+ prefix: string
+ icons: Record
+ aliases?: Record
+ width?: number
+ height?: number
+ lastModified?: number
+}
+
+export interface IconifyIcon {
+ body: string
+ left?: number
+ top?: number
+ width?: number
+ height?: number
+ rotate?: 0 | 1 | 2 | 3
+ hFlip?: boolean
+ vFlip?: boolean
+}
+
+export interface IconifyAlias extends Omit {
+ parent: string
+}
+
+export interface IconifyInfo {
+ prefix: string
+ name: string
+ total: number
+ version: string
+ author?: {
+ name: string
+ url?: string
+ }
+ license?: {
+ title: string
+ spdx?: string
+ url?: string
+ }
+ samples?: string[]
+ palette?: boolean
+}
+
+export interface IconifyMetaData {
+ [key: string]: unknown
+}
+
+export interface IconifyChars {
+ [key: string]: string
+}
+
+export declare const icons: IconifyJSON
+export declare const info: IconifyInfo
+export declare const metadata: IconifyMetaData
+export declare const chars: IconifyChars
+
diff --git a/packages/iconify-collections/custom-public/index.js b/packages/iconify-collections/custom-public/index.js
new file mode 100644
index 0000000000..81c1d0f5c4
--- /dev/null
+++ b/packages/iconify-collections/custom-public/index.js
@@ -0,0 +1,9 @@
+'use strict'
+
+const icons = require('./icons.json')
+const info = require('./info.json')
+const metadata = require('./metadata.json')
+const chars = require('./chars.json')
+
+module.exports = { icons, info, metadata, chars }
+
diff --git a/packages/iconify-collections/custom-public/index.mjs b/packages/iconify-collections/custom-public/index.mjs
new file mode 100644
index 0000000000..6c1108a92d
--- /dev/null
+++ b/packages/iconify-collections/custom-public/index.mjs
@@ -0,0 +1,7 @@
+import icons from './icons.json' with { type: 'json' }
+import info from './info.json' with { type: 'json' }
+import metadata from './metadata.json' with { type: 'json' }
+import chars from './chars.json' with { type: 'json' }
+
+export { icons, info, metadata, chars }
+
diff --git a/packages/iconify-collections/custom-public/info.json b/packages/iconify-collections/custom-public/info.json
new file mode 100644
index 0000000000..8b5572de6f
--- /dev/null
+++ b/packages/iconify-collections/custom-public/info.json
@@ -0,0 +1,24 @@
+{
+ "prefix": "custom-public",
+ "name": "Dify Custom Public",
+ "total": 142,
+ "version": "0.0.0-private",
+ "author": {
+ "name": "LangGenius, Inc.",
+ "url": "https://github.com/langgenius/dify"
+ },
+ "license": {
+ "title": "Modified Apache 2.0",
+ "spdx": "Apache-2.0",
+ "url": "https://github.com/langgenius/dify/blob/main/LICENSE"
+ },
+ "samples": [
+ "avatar-user",
+ "billing-ar-cube-1",
+ "billing-asterisk",
+ "billing-aws-marketplace-dark",
+ "billing-aws-marketplace-light",
+ "billing-azure"
+ ],
+ "palette": false
+}
diff --git a/packages/iconify-collections/custom-public/metadata.json b/packages/iconify-collections/custom-public/metadata.json
new file mode 100644
index 0000000000..0967ef424b
--- /dev/null
+++ b/packages/iconify-collections/custom-public/metadata.json
@@ -0,0 +1 @@
+{}
diff --git a/packages/iconify-collections/custom-vender/chars.json b/packages/iconify-collections/custom-vender/chars.json
new file mode 100644
index 0000000000..0967ef424b
--- /dev/null
+++ b/packages/iconify-collections/custom-vender/chars.json
@@ -0,0 +1 @@
+{}
diff --git a/packages/iconify-collections/custom-vender/icons.json b/packages/iconify-collections/custom-vender/icons.json
new file mode 100644
index 0000000000..a7dc8e75e0
--- /dev/null
+++ b/packages/iconify-collections/custom-vender/icons.json
@@ -0,0 +1,1098 @@
+{
+ "prefix": "custom-vender",
+ "lastModified": 1775115796,
+ "icons": {
+ "features-citations": {
+ "body": ""
+ },
+ "features-content-moderation": {
+ "body": ""
+ },
+ "features-document": {
+ "body": ""
+ },
+ "features-folder-upload": {
+ "body": ""
+ },
+ "features-love-message": {
+ "body": ""
+ },
+ "features-message-fast": {
+ "body": ""
+ },
+ "features-microphone-01": {
+ "body": ""
+ },
+ "features-text-to-audio": {
+ "body": ""
+ },
+ "features-virtual-assistant": {
+ "body": ""
+ },
+ "features-vision": {
+ "body": ""
+ },
+ "knowledge-add-chunks": {
+ "body": "",
+ "width": 20,
+ "height": 20
+ },
+ "knowledge-api-aggregate": {
+ "body": "",
+ "width": 16
+ },
+ "knowledge-arrow-shape": {
+ "body": "",
+ "width": 24,
+ "height": 11
+ },
+ "knowledge-chunk": {
+ "body": "",
+ "width": 10,
+ "height": 10
+ },
+ "knowledge-collapse": {
+ "body": "",
+ "width": 16
+ },
+ "knowledge-divider": {
+ "body": "",
+ "width": 6,
+ "height": 30
+ },
+ "knowledge-economic": {
+ "body": "",
+ "height": 18
+ },
+ "knowledge-full-text-search": {
+ "body": "",
+ "width": 15
+ },
+ "knowledge-general-chunk": {
+ "body": "",
+ "height": 18
+ },
+ "knowledge-high-quality": {
+ "body": "",
+ "height": 18
+ },
+ "knowledge-hybrid-search": {
+ "body": "",
+ "width": 16
+ },
+ "knowledge-parent-child-chunk": {
+ "body": "",
+ "height": 18
+ },
+ "knowledge-question-and-answer": {
+ "body": "",
+ "height": 18
+ },
+ "knowledge-search-lines-sparkle": {
+ "body": "",
+ "width": 16
+ },
+ "knowledge-search-menu": {
+ "body": "",
+ "width": 32,
+ "height": 33
+ },
+ "knowledge-vector-search": {
+ "body": "",
+ "width": 16
+ },
+ "line-alertsAndFeedback-alert-triangle": {
+ "body": ""
+ },
+ "line-alertsAndFeedback-thumbs-down": {
+ "body": ""
+ },
+ "line-alertsAndFeedback-thumbs-up": {
+ "body": ""
+ },
+ "line-alertsAndFeedback-warning": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-arrows-arrow-narrow-left": {
+ "body": "",
+ "width": 17,
+ "height": 16
+ },
+ "line-arrows-arrow-up-right": {
+ "body": ""
+ },
+ "line-arrows-chevron-down-double": {
+ "body": "",
+ "width": 12,
+ "height": 13
+ },
+ "line-arrows-chevron-right": {
+ "body": ""
+ },
+ "line-arrows-chevron-selector-vertical": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-arrows-iconr": {
+ "body": ""
+ },
+ "line-arrows-refresh-ccw-01": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-arrows-refresh-cw-05": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "line-arrows-reverse-left": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "line-communication-ai-text": {
+ "body": ""
+ },
+ "line-communication-chat-bot": {
+ "body": ""
+ },
+ "line-communication-chat-bot-slim": {
+ "body": "",
+ "width": 48,
+ "height": 48
+ },
+ "line-communication-cute-robot": {
+ "body": ""
+ },
+ "line-communication-message-check-remove": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-communication-message-fast-plus": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-development-artificial-brain": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-development-bar-chart-square-02": {
+ "body": ""
+ },
+ "line-development-brackets-x": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-development-code-browser": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-development-container": {
+ "body": ""
+ },
+ "line-development-database-01": {
+ "body": "",
+ "width": 17
+ },
+ "line-development-database-03": {
+ "body": ""
+ },
+ "line-development-file-heart-02": {
+ "body": ""
+ },
+ "line-development-git-branch-01": {
+ "body": ""
+ },
+ "line-development-prompt-engineering": {
+ "body": ""
+ },
+ "line-development-puzzle-piece-01": {
+ "body": ""
+ },
+ "line-development-terminal-square": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-development-variable": {
+ "body": ""
+ },
+ "line-development-webhooks": {
+ "body": ""
+ },
+ "line-editor-align-left": {
+ "body": ""
+ },
+ "line-editor-bezier-curve-03": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-editor-collapse": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "line-editor-colors": {
+ "body": ""
+ },
+ "line-editor-image-indent-left": {
+ "body": ""
+ },
+ "line-editor-left-indent-02": {
+ "body": ""
+ },
+ "line-editor-letter-spacing-01": {
+ "body": ""
+ },
+ "line-editor-type-square": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-education-book-open-01": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-files-copy": {
+ "body": ""
+ },
+ "line-files-copy-check": {
+ "body": ""
+ },
+ "line-files-file-02": {
+ "body": ""
+ },
+ "line-files-file-arrow-01": {
+ "body": ""
+ },
+ "line-files-file-check-02": {
+ "body": ""
+ },
+ "line-files-file-download-02": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-files-file-plus-01": {
+ "body": ""
+ },
+ "line-files-file-plus-02": {
+ "body": ""
+ },
+ "line-files-file-text": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-files-file-upload": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-files-folder": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "line-financeAndECommerce-balance": {
+ "body": ""
+ },
+ "line-financeAndECommerce-coins-stacked-01": {
+ "body": ""
+ },
+ "line-financeAndECommerce-credits-coin": {
+ "body": "",
+ "width": 10,
+ "height": 10
+ },
+ "line-financeAndECommerce-gold-coin": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "line-financeAndECommerce-receipt-list": {
+ "body": ""
+ },
+ "line-financeAndECommerce-tag-01": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "line-financeAndECommerce-tag-03": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "line-general-at-sign": {
+ "body": ""
+ },
+ "line-general-bookmark": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-general-check": {
+ "body": ""
+ },
+ "line-general-check-done-01": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-general-checklist-square": {
+ "body": "",
+ "width": 32,
+ "height": 32
+ },
+ "line-general-code-assistant": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-general-dots-grid": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "line-general-edit-02": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "line-general-edit-04": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-general-edit-05": {
+ "body": ""
+ },
+ "line-general-hash-02": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-general-info-circle": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-general-link-03": {
+ "body": "",
+ "width": 17
+ },
+ "line-general-link-external-02": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-general-log-in-04": {
+ "body": ""
+ },
+ "line-general-log-out-01": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "line-general-log-out-04": {
+ "body": ""
+ },
+ "line-general-magic-edit": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-general-menu-01": {
+ "body": ""
+ },
+ "line-general-pin-01": {
+ "body": ""
+ },
+ "line-general-pin-02": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-general-plus-02": {
+ "body": "",
+ "width": 10,
+ "height": 10
+ },
+ "line-general-refresh": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-general-search-menu": {
+ "body": "",
+ "width": 32,
+ "height": 32
+ },
+ "line-general-settings-01": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "line-general-settings-04": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "line-general-target-04": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-general-upload-03": {
+ "body": ""
+ },
+ "line-general-upload-cloud-01": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-general-x": {
+ "body": ""
+ },
+ "line-images-image-plus": {
+ "body": ""
+ },
+ "line-layout-align-left-01": {
+ "body": ""
+ },
+ "line-layout-align-right-01": {
+ "body": ""
+ },
+ "line-layout-grid-01": {
+ "body": "",
+ "width": 17,
+ "height": 16
+ },
+ "line-layout-layout-grid-02": {
+ "body": ""
+ },
+ "line-mediaAndDevices-microphone-01": {
+ "body": ""
+ },
+ "line-mediaAndDevices-play-circle": {
+ "body": ""
+ },
+ "line-mediaAndDevices-sliders-h": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-mediaAndDevices-speaker": {
+ "body": ""
+ },
+ "line-mediaAndDevices-stop": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-mediaAndDevices-stop-circle": {
+ "body": "",
+ "width": 17
+ },
+ "line-others-bubble-x": {
+ "body": ""
+ },
+ "line-others-colors": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "line-others-drag-handle": {
+ "body": ""
+ },
+ "line-others-env": {
+ "body": ""
+ },
+ "line-others-global-variable": {
+ "body": ""
+ },
+ "line-others-icon-3-dots": {
+ "body": ""
+ },
+ "line-others-long-arrow-left": {
+ "body": "",
+ "width": 21,
+ "height": 8
+ },
+ "line-others-long-arrow-right": {
+ "body": "",
+ "width": 26,
+ "height": 8
+ },
+ "line-others-search-menu": {
+ "body": "",
+ "width": 32,
+ "height": 32
+ },
+ "line-others-tools": {
+ "body": "",
+ "height": 17
+ },
+ "line-shapes-cube-outline": {
+ "body": "",
+ "height": 17
+ },
+ "line-time-clock-fast-forward": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "line-time-clock-play": {
+ "body": ""
+ },
+ "line-time-clock-play-slim": {
+ "body": "",
+ "width": 32,
+ "height": 32
+ },
+ "line-time-clock-refresh": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "line-users-user-01": {
+ "body": ""
+ },
+ "line-users-users-01": {
+ "body": ""
+ },
+ "line-weather-stars-02": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "other-anthropic-text": {
+ "body": "",
+ "width": 90,
+ "height": 20
+ },
+ "other-generator": {
+ "body": ""
+ },
+ "other-group": {
+ "body": "",
+ "height": 16
+ },
+ "other-hourglass-shape": {
+ "body": "",
+ "width": 8
+ },
+ "other-mcp": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "other-no-tool-placeholder": {
+ "body": "",
+ "width": 204,
+ "height": 36
+ },
+ "other-openai": {
+ "body": "",
+ "width": 80,
+ "height": 22
+ },
+ "other-replay-line": {
+ "body": "",
+ "width": 20,
+ "height": 20
+ },
+ "other-square-checklist": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "pipeline-input-field": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "pipeline-pipeline-fill": {
+ "body": ""
+ },
+ "pipeline-pipeline-line": {
+ "body": ""
+ },
+ "plugin-box-sparkle-fill": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "plugin-left-corner": {
+ "body": "",
+ "width": 13,
+ "height": 20
+ },
+ "plugin-trigger": {
+ "body": ""
+ },
+ "solid-FinanceAndECommerce-gold-coin": {
+ "body": ""
+ },
+ "solid-FinanceAndECommerce-scales-02": {
+ "body": ""
+ },
+ "solid-alertsAndFeedback-alert-triangle": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "solid-arrows-arrow-down-double-line": {
+ "body": ""
+ },
+ "solid-arrows-arrow-down-round-fill": {
+ "body": ""
+ },
+ "solid-arrows-arrow-up-double-line": {
+ "body": ""
+ },
+ "solid-arrows-chevron-down": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "solid-arrows-high-priority": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "solid-communication-ai-text": {
+ "body": ""
+ },
+ "solid-communication-bubble-text-mod": {
+ "body": ""
+ },
+ "solid-communication-chat-bot": {
+ "body": "",
+ "width": 13,
+ "height": 12
+ },
+ "solid-communication-cute-robot": {
+ "body": ""
+ },
+ "solid-communication-edit-list": {
+ "body": ""
+ },
+ "solid-communication-list-sparkle": {
+ "body": ""
+ },
+ "solid-communication-logic": {
+ "body": ""
+ },
+ "solid-communication-message-dots-circle": {
+ "body": ""
+ },
+ "solid-communication-message-fast": {
+ "body": ""
+ },
+ "solid-communication-message-heart-circle": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-communication-message-smile-square": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-communication-send-03": {
+ "body": "",
+ "width": 20,
+ "height": 20
+ },
+ "solid-development-api-connection": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "solid-development-api-connection-mod": {
+ "body": ""
+ },
+ "solid-development-bar-chart-square-02": {
+ "body": ""
+ },
+ "solid-development-container": {
+ "body": "",
+ "width": 17
+ },
+ "solid-development-database-02": {
+ "body": "",
+ "width": 17
+ },
+ "solid-development-database-03": {
+ "body": ""
+ },
+ "solid-development-file-heart-02": {
+ "body": ""
+ },
+ "solid-development-pattern-recognition": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "solid-development-prompt-engineering": {
+ "body": ""
+ },
+ "solid-development-puzzle-piece-01": {
+ "body": "",
+ "width": 17
+ },
+ "solid-development-semantic": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "solid-development-terminal-square": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "solid-development-variable-02": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "solid-editor-brush-01": {
+ "body": ""
+ },
+ "solid-editor-citations": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-editor-colors": {
+ "body": ""
+ },
+ "solid-editor-paragraph": {
+ "body": ""
+ },
+ "solid-editor-type-square": {
+ "body": ""
+ },
+ "solid-education-beaker-02": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "solid-education-bubble-text": {
+ "body": ""
+ },
+ "solid-education-heart-02": {
+ "body": ""
+ },
+ "solid-education-unblur": {
+ "body": ""
+ },
+ "solid-files-file-05": {
+ "body": ""
+ },
+ "solid-files-file-search-02": {
+ "body": ""
+ },
+ "solid-files-file-zip": {
+ "body": ""
+ },
+ "solid-files-folder": {
+ "body": ""
+ },
+ "solid-general-answer-triangle": {
+ "body": "",
+ "width": 8,
+ "height": 12
+ },
+ "solid-general-arrow-down-round-fill": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-general-check-circle": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-general-check-done-01": {
+ "body": ""
+ },
+ "solid-general-download-02": {
+ "body": ""
+ },
+ "solid-general-edit-03": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "solid-general-edit-04": {
+ "body": ""
+ },
+ "solid-general-eye": {
+ "body": ""
+ },
+ "solid-general-github": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-general-message-clock-circle": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-general-plus-circle": {
+ "body": ""
+ },
+ "solid-general-question-triangle": {
+ "body": "",
+ "width": 8,
+ "height": 12
+ },
+ "solid-general-search-md": {
+ "body": ""
+ },
+ "solid-general-target-04": {
+ "body": ""
+ },
+ "solid-general-tool-03": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-general-x-circle": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-general-zap-fast": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "solid-general-zap-narrow": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "solid-layout-grid-01": {
+ "body": ""
+ },
+ "solid-mediaAndDevices-audio-support-icon": {
+ "body": ""
+ },
+ "solid-mediaAndDevices-document-support-icon": {
+ "body": ""
+ },
+ "solid-mediaAndDevices-magic-box": {
+ "body": ""
+ },
+ "solid-mediaAndDevices-magic-eyes": {
+ "body": ""
+ },
+ "solid-mediaAndDevices-magic-wand": {
+ "body": ""
+ },
+ "solid-mediaAndDevices-microphone-01": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-mediaAndDevices-play": {
+ "body": ""
+ },
+ "solid-mediaAndDevices-robot": {
+ "body": ""
+ },
+ "solid-mediaAndDevices-sliders-02": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "solid-mediaAndDevices-speaker": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "solid-mediaAndDevices-stop-circle": {
+ "body": "",
+ "width": 20,
+ "height": 20
+ },
+ "solid-mediaAndDevices-video-support-icon": {
+ "body": ""
+ },
+ "solid-security-lock-01": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "solid-shapes-corner": {
+ "body": "",
+ "width": 13,
+ "height": 20
+ },
+ "solid-shapes-star-04": {
+ "body": "",
+ "width": 11,
+ "height": 10
+ },
+ "solid-shapes-star-06": {
+ "body": ""
+ },
+ "solid-users-user-01": {
+ "body": ""
+ },
+ "solid-users-user-edit-02": {
+ "body": "",
+ "width": 14,
+ "height": 14
+ },
+ "solid-users-users-01": {
+ "body": ""
+ },
+ "solid-users-users-plus": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "system-auto-update-line": {
+ "body": "",
+ "width": 24,
+ "height": 24
+ },
+ "workflow-agent": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-answer": {
+ "body": ""
+ },
+ "workflow-api-aggregate": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-assigner": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-asterisk": {
+ "body": ""
+ },
+ "workflow-calendar-check-line": {
+ "body": ""
+ },
+ "workflow-code": {
+ "body": ""
+ },
+ "workflow-datasource": {
+ "body": ""
+ },
+ "workflow-docs-extractor": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-end": {
+ "body": ""
+ },
+ "workflow-home": {
+ "body": ""
+ },
+ "workflow-http": {
+ "body": ""
+ },
+ "workflow-human-in-loop": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-if-else": {
+ "body": ""
+ },
+ "workflow-iteration": {
+ "body": ""
+ },
+ "workflow-iteration-start": {
+ "body": "",
+ "width": 12,
+ "height": 12
+ },
+ "workflow-jinja": {
+ "body": "",
+ "width": 24,
+ "height": 12
+ },
+ "workflow-knowledge-base": {
+ "body": ""
+ },
+ "workflow-knowledge-retrieval": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-list-filter": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-llm": {
+ "body": ""
+ },
+ "workflow-loop": {
+ "body": "",
+ "width": 18,
+ "height": 16
+ },
+ "workflow-loop-end": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-parameter-extractor": {
+ "body": ""
+ },
+ "workflow-question-classifier": {
+ "body": ""
+ },
+ "workflow-schedule": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-templating-transform": {
+ "body": ""
+ },
+ "workflow-trigger-all": {
+ "body": ""
+ },
+ "workflow-variable-x": {
+ "body": ""
+ },
+ "workflow-webhook-line": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ },
+ "workflow-window-cursor": {
+ "body": "",
+ "width": 16,
+ "height": 16
+ }
+ }
+}
diff --git a/packages/iconify-collections/custom-vender/index.d.ts b/packages/iconify-collections/custom-vender/index.d.ts
new file mode 100644
index 0000000000..ecca5633d4
--- /dev/null
+++ b/packages/iconify-collections/custom-vender/index.d.ts
@@ -0,0 +1,55 @@
+export interface IconifyJSON {
+ prefix: string
+ icons: Record
+ aliases?: Record
+ width?: number
+ height?: number
+ lastModified?: number
+}
+
+export interface IconifyIcon {
+ body: string
+ left?: number
+ top?: number
+ width?: number
+ height?: number
+ rotate?: 0 | 1 | 2 | 3
+ hFlip?: boolean
+ vFlip?: boolean
+}
+
+export interface IconifyAlias extends Omit {
+ parent: string
+}
+
+export interface IconifyInfo {
+ prefix: string
+ name: string
+ total: number
+ version: string
+ author?: {
+ name: string
+ url?: string
+ }
+ license?: {
+ title: string
+ spdx?: string
+ url?: string
+ }
+ samples?: string[]
+ palette?: boolean
+}
+
+export interface IconifyMetaData {
+ [key: string]: unknown
+}
+
+export interface IconifyChars {
+ [key: string]: string
+}
+
+export declare const icons: IconifyJSON
+export declare const info: IconifyInfo
+export declare const metadata: IconifyMetaData
+export declare const chars: IconifyChars
+
diff --git a/packages/iconify-collections/custom-vender/index.js b/packages/iconify-collections/custom-vender/index.js
new file mode 100644
index 0000000000..81c1d0f5c4
--- /dev/null
+++ b/packages/iconify-collections/custom-vender/index.js
@@ -0,0 +1,9 @@
+'use strict'
+
+const icons = require('./icons.json')
+const info = require('./info.json')
+const metadata = require('./metadata.json')
+const chars = require('./chars.json')
+
+module.exports = { icons, info, metadata, chars }
+
diff --git a/packages/iconify-collections/custom-vender/index.mjs b/packages/iconify-collections/custom-vender/index.mjs
new file mode 100644
index 0000000000..6c1108a92d
--- /dev/null
+++ b/packages/iconify-collections/custom-vender/index.mjs
@@ -0,0 +1,7 @@
+import icons from './icons.json' with { type: 'json' }
+import info from './info.json' with { type: 'json' }
+import metadata from './metadata.json' with { type: 'json' }
+import chars from './chars.json' with { type: 'json' }
+
+export { icons, info, metadata, chars }
+
diff --git a/packages/iconify-collections/custom-vender/info.json b/packages/iconify-collections/custom-vender/info.json
new file mode 100644
index 0000000000..0a84c45bbd
--- /dev/null
+++ b/packages/iconify-collections/custom-vender/info.json
@@ -0,0 +1,24 @@
+{
+ "prefix": "custom-vender",
+ "name": "Dify Custom Vender",
+ "total": 277,
+ "version": "0.0.0-private",
+ "author": {
+ "name": "LangGenius, Inc.",
+ "url": "https://github.com/langgenius/dify"
+ },
+ "license": {
+ "title": "Modified Apache 2.0",
+ "spdx": "Apache-2.0",
+ "url": "https://github.com/langgenius/dify/blob/main/LICENSE"
+ },
+ "samples": [
+ "features-citations",
+ "features-content-moderation",
+ "features-document",
+ "features-folder-upload",
+ "features-love-message",
+ "features-message-fast"
+ ],
+ "palette": false
+}
diff --git a/packages/iconify-collections/custom-vender/metadata.json b/packages/iconify-collections/custom-vender/metadata.json
new file mode 100644
index 0000000000..0967ef424b
--- /dev/null
+++ b/packages/iconify-collections/custom-vender/metadata.json
@@ -0,0 +1 @@
+{}
diff --git a/packages/iconify-collections/package.json b/packages/iconify-collections/package.json
new file mode 100644
index 0000000000..3bd7285f1a
--- /dev/null
+++ b/packages/iconify-collections/package.json
@@ -0,0 +1,31 @@
+{
+ "name": "@dify/iconify-collections",
+ "private": true,
+ "version": "0.0.0-private",
+ "exports": {
+ "./custom-public": {
+ "types": "./custom-public/index.d.ts",
+ "require": "./custom-public/index.js",
+ "import": "./custom-public/index.mjs"
+ },
+ "./custom-public/icons.json": "./custom-public/icons.json",
+ "./custom-public/info.json": "./custom-public/info.json",
+ "./custom-public/metadata.json": "./custom-public/metadata.json",
+ "./custom-public/chars.json": "./custom-public/chars.json",
+ "./custom-vender": {
+ "types": "./custom-vender/index.d.ts",
+ "require": "./custom-vender/index.js",
+ "import": "./custom-vender/index.mjs"
+ },
+ "./custom-vender/icons.json": "./custom-vender/icons.json",
+ "./custom-vender/info.json": "./custom-vender/info.json",
+ "./custom-vender/metadata.json": "./custom-vender/metadata.json",
+ "./custom-vender/chars.json": "./custom-vender/chars.json"
+ },
+ "scripts": {
+ "generate": "node ./scripts/generate-collections.mjs"
+ },
+ "devDependencies": {
+ "iconify-import-svg": "catalog:"
+ }
+}
diff --git a/packages/iconify-collections/scripts/generate-collections.mjs b/packages/iconify-collections/scripts/generate-collections.mjs
new file mode 100644
index 0000000000..1c734731e6
--- /dev/null
+++ b/packages/iconify-collections/scripts/generate-collections.mjs
@@ -0,0 +1,178 @@
+import { mkdir, readFile, rm, writeFile } from 'node:fs/promises'
+import path from 'node:path'
+import { fileURLToPath } from 'node:url'
+import { importSvgCollections } from 'iconify-import-svg'
+
+const __dirname = path.dirname(fileURLToPath(import.meta.url))
+const packageDir = path.resolve(__dirname, '..')
+
+const parseColorOptions = {
+ fallback: () => 'currentColor',
+}
+const svgOptimizeConfig = {
+ cleanupSVG: true,
+ deOptimisePaths: true,
+ runSVGO: true,
+ parseColors: parseColorOptions,
+}
+
+const customPublicCollections = importSvgCollections({
+ source: path.resolve(packageDir, 'assets/public'),
+ prefix: 'custom-public',
+ ignoreImportErrors: true,
+ ...svgOptimizeConfig,
+})
+
+const customVenderCollections = importSvgCollections({
+ source: path.resolve(packageDir, 'assets/vender'),
+ prefix: 'custom-vender',
+ ignoreImportErrors: true,
+ ...svgOptimizeConfig,
+})
+
+const packageJson = JSON.parse(await readFile(path.resolve(packageDir, 'package.json'), 'utf8'))
+
+const flattenCollections = (collections, prefix) => {
+ const icons = {}
+ const aliases = {}
+ let lastModified = 0
+
+ for (const [collectionKey, collection] of Object.entries(collections)) {
+ const segment = collectionKey.slice(prefix.length + 1)
+ const namePrefix = segment
+ ? `${segment}-`
+ : ''
+
+ for (const [iconName, iconData] of Object.entries(collection.icons ?? {}))
+ icons[`${namePrefix}${iconName}`] = iconData
+
+ for (const [aliasName, aliasData] of Object.entries(collection.aliases ?? {}))
+ aliases[`${namePrefix}${aliasName}`] = aliasData
+
+ if (typeof collection.lastModified === 'number')
+ lastModified = Math.max(lastModified, collection.lastModified)
+ }
+
+ return {
+ prefix,
+ ...(lastModified ? { lastModified } : {}),
+ icons,
+ ...(Object.keys(aliases).length ? { aliases } : {}),
+ }
+}
+
+const createCollectionInfo = (prefix, name, icons) => ({
+ prefix,
+ name,
+ total: Object.keys(icons).length,
+ version: packageJson.version,
+ author: {
+ name: 'LangGenius, Inc.',
+ url: 'https://github.com/langgenius/dify',
+ },
+ license: {
+ title: 'Modified Apache 2.0',
+ spdx: 'Apache-2.0',
+ url: 'https://github.com/langgenius/dify/blob/main/LICENSE',
+ },
+ samples: Object.keys(icons).slice(0, 6),
+ palette: false,
+})
+
+const createIndexMjs = () => `import icons from './icons.json' with { type: 'json' }
+import info from './info.json' with { type: 'json' }
+import metadata from './metadata.json' with { type: 'json' }
+import chars from './chars.json' with { type: 'json' }
+
+export { icons, info, metadata, chars }
+`
+
+const createIndexJs = () => `'use strict'
+
+const icons = require('./icons.json')
+const info = require('./info.json')
+const metadata = require('./metadata.json')
+const chars = require('./chars.json')
+
+module.exports = { icons, info, metadata, chars }
+`
+
+const createIndexTypes = () => `export interface IconifyJSON {
+ prefix: string
+ icons: Record
+ aliases?: Record
+ width?: number
+ height?: number
+ lastModified?: number
+}
+
+export interface IconifyIcon {
+ body: string
+ left?: number
+ top?: number
+ width?: number
+ height?: number
+ rotate?: 0 | 1 | 2 | 3
+ hFlip?: boolean
+ vFlip?: boolean
+}
+
+export interface IconifyAlias extends Omit {
+ parent: string
+}
+
+export interface IconifyInfo {
+ prefix: string
+ name: string
+ total: number
+ version: string
+ author?: {
+ name: string
+ url?: string
+ }
+ license?: {
+ title: string
+ spdx?: string
+ url?: string
+ }
+ samples?: string[]
+ palette?: boolean
+}
+
+export interface IconifyMetaData {
+ [key: string]: unknown
+}
+
+export interface IconifyChars {
+ [key: string]: string
+}
+
+export declare const icons: IconifyJSON
+export declare const info: IconifyInfo
+export declare const metadata: IconifyMetaData
+export declare const chars: IconifyChars
+`
+
+const writeCollectionPackage = async (directoryName, collection, name) => {
+ const targetDir = path.resolve(packageDir, directoryName)
+ const info = createCollectionInfo(collection.prefix, name, collection.icons)
+
+ await mkdir(targetDir, { recursive: true })
+ await writeFile(path.resolve(targetDir, 'icons.json'), `${JSON.stringify(collection, null, 2)}\n`)
+ await writeFile(path.resolve(targetDir, 'info.json'), `${JSON.stringify(info, null, 2)}\n`)
+ await writeFile(path.resolve(targetDir, 'metadata.json'), '{}\n')
+ await writeFile(path.resolve(targetDir, 'chars.json'), '{}\n')
+ await writeFile(path.resolve(targetDir, 'index.mjs'), `${createIndexMjs()}\n`)
+ await writeFile(path.resolve(targetDir, 'index.js'), `${createIndexJs()}\n`)
+ await writeFile(path.resolve(targetDir, 'index.d.ts'), `${createIndexTypes()}\n`)
+}
+
+const mergedCustomPublicCollection = flattenCollections(customPublicCollections, 'custom-public')
+const mergedCustomVenderCollection = flattenCollections(customVenderCollections, 'custom-vender')
+
+await rm(path.resolve(packageDir, 'src'), { recursive: true, force: true })
+await rm(path.resolve(packageDir, 'custom-public'), { recursive: true, force: true })
+await rm(path.resolve(packageDir, 'custom-vender'), { recursive: true, force: true })
+
+await writeCollectionPackage('custom-public', mergedCustomPublicCollection, 'Dify Custom Public')
+await writeCollectionPackage('custom-vender', mergedCustomVenderCollection, 'Dify Custom Vender')
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 01a96c5585..7a44b621b1 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -34,7 +34,7 @@ catalogs:
specifier: 3.0.0
version: 3.0.0
'@eslint/js':
- specifier: ^10.0.1
+ specifier: 10.0.1
version: 10.0.1
'@floating-ui/react':
specifier: 0.27.19
@@ -144,9 +144,15 @@ catalogs:
'@t3-oss/env-nextjs':
specifier: 0.13.11
version: 0.13.11
+ '@tailwindcss/postcss':
+ specifier: 4.2.2
+ version: 4.2.2
'@tailwindcss/typography':
specifier: 0.5.19
version: 0.5.19
+ '@tailwindcss/vite':
+ specifier: 4.2.2
+ version: 4.2.2
'@tanstack/eslint-plugin-query':
specifier: 5.95.2
version: 5.95.2
@@ -198,9 +204,6 @@ catalogs:
'@types/node':
specifier: 25.5.0
version: 25.5.0
- '@types/postcss-js':
- specifier: 4.1.0
- version: 4.1.0
'@types/qs':
specifier: 6.15.0
version: 6.15.0
@@ -220,7 +223,7 @@ catalogs:
specifier: 1.15.9
version: 1.15.9
'@typescript-eslint/eslint-plugin':
- specifier: ^8.57.2
+ specifier: 8.57.2
version: 8.57.2
'@typescript-eslint/parser':
specifier: 8.57.2
@@ -235,8 +238,8 @@ catalogs:
specifier: 0.5.21
version: 0.5.21
'@vitest/coverage-v8':
- specifier: 4.1.2
- version: 4.1.2
+ specifier: 4.1.1
+ version: 4.1.1
abcjs:
specifier: 6.6.2
version: 6.6.2
@@ -246,12 +249,6 @@ catalogs:
ahooks:
specifier: 3.9.7
version: 3.9.7
- autoprefixer:
- specifier: 10.4.27
- version: 10.4.27
- axios:
- specifier: ^1.14.0
- version: 1.14.0
class-variance-authority:
specifier: 0.7.1
version: 0.7.1
@@ -348,9 +345,6 @@ catalogs:
html-to-image:
specifier: 1.11.13
version: 1.11.13
- husky:
- specifier: 9.1.7
- version: 9.1.7
i18next:
specifier: 25.10.10
version: 25.10.10
@@ -393,9 +387,6 @@ catalogs:
lexical:
specifier: 0.42.0
version: 0.42.0
- lint-staged:
- specifier: 16.4.0
- version: 16.4.0
mermaid:
specifier: 11.13.0
version: 11.13.0
@@ -423,9 +414,6 @@ catalogs:
postcss:
specifier: 8.5.8
version: 8.5.8
- postcss-js:
- specifier: 5.1.0
- version: 5.1.0
qrcode.react:
specifier: 4.2.0
version: 4.2.0
@@ -508,11 +496,11 @@ catalogs:
specifier: 2.3.1
version: 2.3.1
tailwind-merge:
- specifier: 2.6.1
- version: 2.6.1
+ specifier: 3.5.0
+ version: 3.5.0
tailwindcss:
- specifier: 3.4.19
- version: 3.4.19
+ specifier: 4.2.2
+ version: 4.2.2
taze:
specifier: 19.10.0
version: 19.10.0
@@ -627,6 +615,9 @@ importers:
taze:
specifier: 'catalog:'
version: 19.10.0
+ vite-plus:
+ specifier: 'catalog:'
+ version: 0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)
e2e:
devDependencies:
@@ -649,11 +640,13 @@ importers:
specifier: 'catalog:'
version: 0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)
- sdks/nodejs-client:
- dependencies:
- axios:
+ packages/iconify-collections:
+ devDependencies:
+ iconify-import-svg:
specifier: 'catalog:'
- version: 1.14.0
+ version: 0.1.2
+
+ sdks/nodejs-client:
devDependencies:
'@eslint/js':
specifier: 'catalog:'
@@ -669,7 +662,7 @@ importers:
version: 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@vitest/coverage-v8':
specifier: 'catalog:'
- version: 4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))
+ version: 4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))
eslint:
specifier: 'catalog:'
version: 10.1.0(jiti@2.6.1)
@@ -762,7 +755,7 @@ importers:
version: 0.13.11(typescript@5.9.3)(valibot@1.3.1(typescript@5.9.3))(zod@4.3.6)
'@tailwindcss/typography':
specifier: 'catalog:'
- version: 0.5.19(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.3))
+ version: 0.5.19(tailwindcss@4.2.2)
'@tanstack/react-form':
specifier: 'catalog:'
version: 1.28.5(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@@ -966,7 +959,7 @@ importers:
version: 2.3.1
tailwind-merge:
specifier: 'catalog:'
- version: 2.6.1
+ version: 3.5.0
tldts:
specifier: 'catalog:'
version: 7.0.27
@@ -991,16 +984,19 @@ importers:
devDependencies:
'@antfu/eslint-config':
specifier: 'catalog:'
- version: 7.7.3(@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.2(typescript@5.9.3))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(@vue/compiler-sfc@3.5.31)(eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@1.21.7)))(eslint@10.1.0(jiti@1.21.7))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))(typescript@5.9.3)
+ version: 7.7.3(@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.2(typescript@5.9.3))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(@vue/compiler-sfc@3.5.31)(eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@2.6.1)))(eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))(typescript@5.9.3)
'@chromatic-com/storybook':
specifier: 'catalog:'
version: 5.1.1(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))
+ '@dify/iconify-collections':
+ specifier: workspace:*
+ version: link:../packages/iconify-collections
'@egoist/tailwindcss-icons':
specifier: 'catalog:'
- version: 1.9.2(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.3))
+ version: 1.9.2(tailwindcss@4.2.2)
'@eslint-react/eslint-plugin':
specifier: 'catalog:'
- version: 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ version: 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@hono/node-server':
specifier: 'catalog:'
version: 1.19.11(hono@4.12.9)
@@ -1030,7 +1026,7 @@ importers:
version: 4.2.0
'@storybook/addon-docs':
specifier: 'catalog:'
- version: 10.3.3(@types/react@19.2.14)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
+ version: 10.3.3(@types/react@19.2.14)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
'@storybook/addon-links':
specifier: 'catalog:'
version: 10.3.3(react@19.2.4)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))
@@ -1042,13 +1038,19 @@ importers:
version: 10.3.3(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))
'@storybook/nextjs-vite':
specifier: 'catalog:'
- version: 10.3.3(@babel/core@7.29.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
+ version: 10.3.3(@babel/core@7.29.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
'@storybook/react':
specifier: 'catalog:'
version: 10.3.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
+ '@tailwindcss/postcss':
+ specifier: 'catalog:'
+ version: 4.2.2
+ '@tailwindcss/vite':
+ specifier: 'catalog:'
+ version: 4.2.2(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
'@tanstack/eslint-plugin-query':
specifier: 'catalog:'
- version: 5.95.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ version: 5.95.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@tanstack/react-devtools':
specifier: 'catalog:'
version: 0.10.0(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(csstype@3.2.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)
@@ -1072,13 +1074,13 @@ importers:
version: 14.6.1(@testing-library/dom@10.4.1)
'@tsslint/cli':
specifier: 'catalog:'
- version: 3.0.2(@tsslint/compat-eslint@3.0.2(jiti@1.21.7)(typescript@5.9.3))(typescript@5.9.3)
+ version: 3.0.2(@tsslint/compat-eslint@3.0.2(jiti@2.6.1)(typescript@5.9.3))(typescript@5.9.3)
'@tsslint/compat-eslint':
specifier: 'catalog:'
- version: 3.0.2(jiti@1.21.7)(typescript@5.9.3)
+ version: 3.0.2(jiti@2.6.1)(typescript@5.9.3)
'@tsslint/config':
specifier: 'catalog:'
- version: 3.0.2(@tsslint/compat-eslint@3.0.2(jiti@1.21.7)(typescript@5.9.3))(typescript@5.9.3)
+ version: 3.0.2(@tsslint/compat-eslint@3.0.2(jiti@2.6.1)(typescript@5.9.3))(typescript@5.9.3)
'@types/js-cookie':
specifier: 'catalog:'
version: 3.0.6
@@ -1091,9 +1093,6 @@ importers:
'@types/node':
specifier: 'catalog:'
version: 25.5.0
- '@types/postcss-js':
- specifier: 'catalog:'
- version: 4.1.0
'@types/qs':
specifier: 'catalog:'
version: 6.15.0
@@ -1114,82 +1113,67 @@ importers:
version: 1.15.9
'@typescript-eslint/parser':
specifier: 'catalog:'
- version: 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ version: 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript/native-preview':
specifier: 'catalog:'
version: 7.0.0-dev.20260329.1
'@vitejs/plugin-react':
specifier: 'catalog:'
- version: 6.0.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
+ version: 6.0.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
'@vitejs/plugin-rsc':
specifier: 'catalog:'
- version: 0.5.21(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)
+ version: 0.5.21(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)
'@vitest/coverage-v8':
specifier: 'catalog:'
- version: 4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
+ version: 4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
agentation:
specifier: 'catalog:'
version: 3.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
- autoprefixer:
- specifier: 'catalog:'
- version: 10.4.27(postcss@8.5.8)
code-inspector-plugin:
specifier: 'catalog:'
version: 1.4.5
eslint:
specifier: 'catalog:'
- version: 10.1.0(jiti@1.21.7)
+ version: 10.1.0(jiti@2.6.1)
eslint-markdown:
specifier: 'catalog:'
- version: 0.6.0(eslint@10.1.0(jiti@1.21.7))
+ version: 0.6.0(eslint@10.1.0(jiti@2.6.1))
eslint-plugin-better-tailwindcss:
specifier: 'catalog:'
- version: 4.3.2(eslint@10.1.0(jiti@1.21.7))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.3))(typescript@5.9.3)
+ version: 4.3.2(eslint@10.1.0(jiti@2.6.1))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))(tailwindcss@4.2.2)(typescript@5.9.3)
eslint-plugin-hyoban:
specifier: 'catalog:'
- version: 0.14.1(eslint@10.1.0(jiti@1.21.7))
+ version: 0.14.1(eslint@10.1.0(jiti@2.6.1))
eslint-plugin-markdown-preferences:
specifier: 'catalog:'
- version: 0.40.3(@eslint/markdown@7.5.1)(eslint@10.1.0(jiti@1.21.7))
+ version: 0.40.3(@eslint/markdown@7.5.1)(eslint@10.1.0(jiti@2.6.1))
eslint-plugin-no-barrel-files:
specifier: 'catalog:'
- version: 1.2.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ version: 1.2.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
eslint-plugin-react-hooks:
specifier: 'catalog:'
- version: 7.0.1(eslint@10.1.0(jiti@1.21.7))
+ version: 7.0.1(eslint@10.1.0(jiti@2.6.1))
eslint-plugin-react-refresh:
specifier: 'catalog:'
- version: 0.5.2(eslint@10.1.0(jiti@1.21.7))
+ version: 0.5.2(eslint@10.1.0(jiti@2.6.1))
eslint-plugin-sonarjs:
specifier: 'catalog:'
- version: 4.0.2(eslint@10.1.0(jiti@1.21.7))
+ version: 4.0.2(eslint@10.1.0(jiti@2.6.1))
eslint-plugin-storybook:
specifier: 'catalog:'
- version: 10.3.3(eslint@10.1.0(jiti@1.21.7))(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
+ version: 10.3.3(eslint@10.1.0(jiti@2.6.1))(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
happy-dom:
specifier: 'catalog:'
version: 20.8.9
hono:
specifier: 'catalog:'
version: 4.12.9
- husky:
- specifier: 'catalog:'
- version: 9.1.7
- iconify-import-svg:
- specifier: 'catalog:'
- version: 0.1.2
knip:
specifier: 'catalog:'
version: 6.1.0(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
- lint-staged:
- specifier: 'catalog:'
- version: 16.4.0
postcss:
specifier: 'catalog:'
version: 8.5.8
- postcss-js:
- specifier: 'catalog:'
- version: 5.1.0(postcss@8.5.8)
react-server-dom-webpack:
specifier: 'catalog:'
version: 19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
@@ -1201,7 +1185,7 @@ importers:
version: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
tailwindcss:
specifier: 'catalog:'
- version: 3.4.19(tsx@4.21.0)(yaml@2.8.3)
+ version: 4.2.2
tsx:
specifier: 'catalog:'
version: 4.21.0
@@ -1213,22 +1197,22 @@ importers:
version: 3.19.3
vinext:
specifier: 'catalog:'
- version: 0.0.38(f5786d681f520e26604259e094ebaa46)
+ version: 0.0.38(21fde6c2677b0aab516df83ef1beed5d)
vite:
specifier: npm:@voidzero-dev/vite-plus-core@0.1.14
- version: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ version: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
vite-plugin-inspect:
specifier: 'catalog:'
- version: 12.0.0-beta.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)(ws@8.20.0)
+ version: 12.0.0-beta.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)(ws@8.20.0)
vite-plus:
specifier: 'catalog:'
- version: 0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
+ version: 0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
vitest:
specifier: npm:@voidzero-dev/vite-plus-test@0.1.14
- version: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ version: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
vitest-canvas-mock:
specifier: 'catalog:'
- version: 1.1.4(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
+ version: 1.1.4(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
packages:
@@ -2407,10 +2391,6 @@ packages:
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
- '@nolyfill/hasown@1.0.44':
- resolution: {integrity: sha512-GA/21lkTr2PAQuT6jGnhLuBD5IFd/AEhBXJ/tf33+/bVxPxg+5ejKx9jGQGnyV/P0eSmdup5E+s8b2HL6lOrwQ==}
- engines: {node: '>=12.4.0'}
-
'@nolyfill/is-core-module@1.0.39':
resolution: {integrity: sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==}
engines: {node: '>=12.4.0'}
@@ -3848,11 +3828,108 @@ packages:
zod:
optional: true
+ '@tailwindcss/node@4.2.2':
+ resolution: {integrity: sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==}
+
+ '@tailwindcss/oxide-android-arm64@4.2.2':
+ resolution: {integrity: sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==}
+ engines: {node: '>= 20'}
+ cpu: [arm64]
+ os: [android]
+
+ '@tailwindcss/oxide-darwin-arm64@4.2.2':
+ resolution: {integrity: sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==}
+ engines: {node: '>= 20'}
+ cpu: [arm64]
+ os: [darwin]
+
+ '@tailwindcss/oxide-darwin-x64@4.2.2':
+ resolution: {integrity: sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==}
+ engines: {node: '>= 20'}
+ cpu: [x64]
+ os: [darwin]
+
+ '@tailwindcss/oxide-freebsd-x64@4.2.2':
+ resolution: {integrity: sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==}
+ engines: {node: '>= 20'}
+ cpu: [x64]
+ os: [freebsd]
+
+ '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.2':
+ resolution: {integrity: sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==}
+ engines: {node: '>= 20'}
+ cpu: [arm]
+ os: [linux]
+
+ '@tailwindcss/oxide-linux-arm64-gnu@4.2.2':
+ resolution: {integrity: sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==}
+ engines: {node: '>= 20'}
+ cpu: [arm64]
+ os: [linux]
+ libc: [glibc]
+
+ '@tailwindcss/oxide-linux-arm64-musl@4.2.2':
+ resolution: {integrity: sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==}
+ engines: {node: '>= 20'}
+ cpu: [arm64]
+ os: [linux]
+ libc: [musl]
+
+ '@tailwindcss/oxide-linux-x64-gnu@4.2.2':
+ resolution: {integrity: sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==}
+ engines: {node: '>= 20'}
+ cpu: [x64]
+ os: [linux]
+ libc: [glibc]
+
+ '@tailwindcss/oxide-linux-x64-musl@4.2.2':
+ resolution: {integrity: sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==}
+ engines: {node: '>= 20'}
+ cpu: [x64]
+ os: [linux]
+ libc: [musl]
+
+ '@tailwindcss/oxide-wasm32-wasi@4.2.2':
+ resolution: {integrity: sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==}
+ engines: {node: '>=14.0.0'}
+ cpu: [wasm32]
+ bundledDependencies:
+ - '@napi-rs/wasm-runtime'
+ - '@emnapi/core'
+ - '@emnapi/runtime'
+ - '@tybys/wasm-util'
+ - '@emnapi/wasi-threads'
+ - tslib
+
+ '@tailwindcss/oxide-win32-arm64-msvc@4.2.2':
+ resolution: {integrity: sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==}
+ engines: {node: '>= 20'}
+ cpu: [arm64]
+ os: [win32]
+
+ '@tailwindcss/oxide-win32-x64-msvc@4.2.2':
+ resolution: {integrity: sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==}
+ engines: {node: '>= 20'}
+ cpu: [x64]
+ os: [win32]
+
+ '@tailwindcss/oxide@4.2.2':
+ resolution: {integrity: sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==}
+ engines: {node: '>= 20'}
+
+ '@tailwindcss/postcss@4.2.2':
+ resolution: {integrity: sha512-n4goKQbW8RVXIbNKRB/45LzyUqN451deQK0nzIeauVEqjlI49slUlgKYJM2QyUzap/PcpnS7kzSUmPb1sCRvYQ==}
+
'@tailwindcss/typography@0.5.19':
resolution: {integrity: sha512-w31dd8HOx3k9vPtcQh5QHP9GwKcgbMp87j58qi6xgiBnFFtKEAgCWnDw4qUT8aHwkCp8bKvb/KGKWWHedP0AAg==}
peerDependencies:
tailwindcss: '>=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1'
+ '@tailwindcss/vite@4.2.2':
+ resolution: {integrity: sha512-mEiF5HO1QqCLXoNEfXVA1Tzo+cYsrqV7w9Juj2wdUFyW07JRenqMG225MvPwr3ZD9N1bFQj46X7r33iHxLUW0w==}
+ peerDependencies:
+ vite: ^5.2.0 || ^6 || ^7 || ^8
+
'@tanstack/devtools-client@0.0.6':
resolution: {integrity: sha512-f85ZJXJnDIFOoykG/BFIixuAevJovCvJF391LPs6YjBAPhGYC50NWlx1y4iF/UmK5/cCMx+/JqI5SBOz7FanQQ==}
engines: {node: '>=18'}
@@ -4226,9 +4303,6 @@ packages:
'@types/papaparse@5.5.2':
resolution: {integrity: sha512-gFnFp/JMzLHCwRf7tQHrNnfhN4eYBVYYI897CGX4MY1tzY9l2aLkVyx2IlKZ/SAqDbB3I1AOZW5gTMGGsqWliA==}
- '@types/postcss-js@4.1.0':
- resolution: {integrity: sha512-E19kBYOk2uEhzxfbam6jALzE6J1GNdny2jdftwDHo72+oWWt7bkWSGzZYVfaRK1r/UToMhAcfbKCAauBXrxi7g==}
-
'@types/qs@6.15.0':
resolution: {integrity: sha512-JawvT8iBVWpzTrz3EGw9BTQFg3BQNmwERdKE22vlTxawwtbyUSlMppvZYKLZzB5zgACXdXxbD3m1bXaMqP/9ow==}
@@ -4442,11 +4516,11 @@ packages:
react-server-dom-webpack:
optional: true
- '@vitest/coverage-v8@4.1.2':
- resolution: {integrity: sha512-sPK//PHO+kAkScb8XITeB1bf7fsk85Km7+rt4eeuRR3VS1/crD47cmV5wicisJmjNdfeokTZwjMk4Mj2d58Mgg==}
+ '@vitest/coverage-v8@4.1.1':
+ resolution: {integrity: sha512-nZ4RWwGCoGOQRMmU/Q9wlUY540RVRxJZ9lxFsFfy0QV7Zmo5VVBhB6Sl9Xa0KIp2iIs3zWfPlo9LcY1iqbpzCw==}
peerDependencies:
- '@vitest/browser': 4.1.2
- vitest: 4.1.2
+ '@vitest/browser': 4.1.1
+ vitest: 4.1.1
peerDependenciesMeta:
'@vitest/browser':
optional: true
@@ -4473,8 +4547,8 @@ packages:
'@vitest/pretty-format@3.2.4':
resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==}
- '@vitest/pretty-format@4.1.2':
- resolution: {integrity: sha512-dwQga8aejqeuB+TvXCMzSQemvV9hNEtDDpgUKDzOmNQayl2OG241PSWeJwKRH3CiC+sESrmoFd49rfnq7T4RnA==}
+ '@vitest/pretty-format@4.1.1':
+ resolution: {integrity: sha512-GM+TEQN5WhOygr1lp7skeVjdLPqqWMHsfzXrcHAqZJi/lIVh63H0kaRCY8MDhNWikx19zBUK8ceaLB7X5AH9NQ==}
'@vitest/spy@3.2.4':
resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==}
@@ -4482,8 +4556,8 @@ packages:
'@vitest/utils@3.2.4':
resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==}
- '@vitest/utils@4.1.2':
- resolution: {integrity: sha512-xw2/TiX82lQHA06cgbqRKFb5lCAy3axQ4H4SoUFhUsg+wztiet+co86IAMDtF6Vm1hc7J6j09oh/rgDn+JdKIQ==}
+ '@vitest/utils@4.1.1':
+ resolution: {integrity: sha512-cNxAlaB3sHoCdL6pj6yyUXv9Gry1NHNg0kFTXdvSIZXLHsqKH7chiWOkwJ5s5+d/oMwcoG9T0bKU38JZWKusrQ==}
'@voidzero-dev/vite-plus-core@0.1.14':
resolution: {integrity: sha512-CCWzdkfW0fo0cQNlIsYp5fOuH2IwKuPZEb2UY2Z8gXcp5pG74A82H2Pthj0heAuvYTAnfT7kEC6zM+RbiBgQbg==}
@@ -4762,10 +4836,6 @@ packages:
ajv@8.18.0:
resolution: {integrity: sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==}
- ansi-escapes@7.3.0:
- resolution: {integrity: sha512-BvU8nYgGQBxcmMuEeUEmNTvrMVjJNSH7RgW24vXexN4Ven6qCvy4TntnvlnwnMLTVlcRQQdbRY8NKnaIoeWDNg==}
- engines: {node: '>=18'}
-
ansi-regex@4.1.1:
resolution: {integrity: sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==}
engines: {node: '>=6'}
@@ -4786,10 +4856,6 @@ packages:
resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==}
engines: {node: '>=10'}
- ansi-styles@6.2.3:
- resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==}
- engines: {node: '>=12'}
-
ansis@4.2.0:
resolution: {integrity: sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig==}
engines: {node: '>=14'}
@@ -4797,17 +4863,10 @@ packages:
any-promise@1.3.0:
resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==}
- anymatch@3.1.3:
- resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
- engines: {node: '>= 8'}
-
are-docs-informative@0.0.2:
resolution: {integrity: sha512-ixiS0nLNNG5jNQzgZJNoUpBKdo9yTYZMGJ+QgT2jmjR7G7+QHRCc4v6LQ3NgE7EBJq+o0ams3waJwkrlBom8Ig==}
engines: {node: '>=14'}
- arg@5.0.2:
- resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==}
-
argparse@2.0.1:
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
@@ -4843,19 +4902,6 @@ packages:
async@3.2.6:
resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==}
- asynckit@0.4.0:
- resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
-
- autoprefixer@10.4.27:
- resolution: {integrity: sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==}
- engines: {node: ^10 || ^12 || >=14}
- hasBin: true
- peerDependencies:
- postcss: ^8.1.0
-
- axios@1.14.0:
- resolution: {integrity: sha512-3Y8yrqLSwjuzpXuZ0oIYZ/XGgLwUIBU3uLvbcpb0pidD9ctpShJd43KSlEEkVQg6DS0G9NKyzOvBfUtDKEyHvQ==}
-
bail@2.0.2:
resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==}
@@ -4882,10 +4928,6 @@ packages:
engines: {node: '>=6.0.0'}
hasBin: true
- binary-extensions@2.3.0:
- resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
- engines: {node: '>=8'}
-
birecord@0.1.1:
resolution: {integrity: sha512-VUpsf/qykW0heRlC8LooCq28Kxn3mAqKohhDG/49rrsQ1dT1CXyj/pgXS+5BSRzFTR/3DyIBOqQOrGyZOh71Aw==}
@@ -4953,18 +4995,10 @@ packages:
resolution: {integrity: sha512-tixWYgm5ZoOD+3g6UTea91eow5z6AAHaho3g0V9CNSNb45gM8SmflpAc+GRd1InC4AqN/07Unrgp56Y94N9hJQ==}
engines: {node: '>=20.19.0'}
- call-bind-apply-helpers@1.0.2:
- resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==}
- engines: {node: '>= 0.4'}
-
callsites@3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
engines: {node: '>=6'}
- camelcase-css@2.0.1:
- resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==}
- engines: {node: '>= 6'}
-
camelize@1.0.1:
resolution: {integrity: sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==}
@@ -5036,10 +5070,6 @@ packages:
chevrotain@11.1.2:
resolution: {integrity: sha512-opLQzEVriiH1uUQ4Kctsd49bRoFDXGGSC4GUqj7pGyxM3RehRhvTlZJc1FL/Flew2p5uwxa1tUDWKzI4wNM8pg==}
- chokidar@3.6.0:
- resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
- engines: {node: '>= 8.10.0'}
-
chokidar@4.0.3:
resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==}
engines: {node: '>= 14.16.0'}
@@ -5087,18 +5117,10 @@ packages:
resolution: {integrity: sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==}
engines: {node: '>=4'}
- cli-cursor@5.0.0:
- resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==}
- engines: {node: '>=18'}
-
cli-table3@0.6.5:
resolution: {integrity: sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==}
engines: {node: 10.* || >= 12.*}
- cli-truncate@5.2.0:
- resolution: {integrity: sha512-xRwvIOMGrfOAnM1JYtqQImuaNtDEv9v6oIYAs4LIHwTiKee8uwvIi363igssOC0O5U04i4AlENs79LQLu9tEMw==}
- engines: {node: '>=20'}
-
client-only@0.0.1:
resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==}
@@ -5125,13 +5147,6 @@ packages:
color-name@1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
- colorette@2.0.20:
- resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==}
-
- combined-stream@1.0.8:
- resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
- engines: {node: '>= 0.8'}
-
comma-separated-tokens@1.0.8:
resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
@@ -5466,10 +5481,6 @@ packages:
delaunator@5.1.0:
resolution: {integrity: sha512-AGrQ4QSgssa1NGmWmLPqN5NY2KajF5MqxetNEO+o0n3ZwZZeTmt7bBnvzHWrmkZFxGgr4HdyFgelzgi06otLuQ==}
- delayed-stream@1.0.0:
- resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
- engines: {node: '>=0.4.0'}
-
dequal@2.0.3:
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
engines: {node: '>=6'}
@@ -5487,9 +5498,6 @@ packages:
devlop@1.1.0:
resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==}
- didyoumean@1.2.2:
- resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==}
-
diff-sequences@29.6.3:
resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
@@ -5498,9 +5506,6 @@ packages:
resolution: {integrity: sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==}
engines: {node: '>=0.3.1'}
- dlv@1.1.3:
- resolution: {integrity: sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==}
-
doctrine@3.0.0:
resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==}
engines: {node: '>=6.0.0'}
@@ -5535,10 +5540,6 @@ packages:
resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==}
engines: {node: '>=12'}
- dunder-proto@1.0.1:
- resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==}
- engines: {node: '>= 0.4'}
-
echarts-for-react@3.0.6:
resolution: {integrity: sha512-4zqLgTGWS3JvkQDXjzkR1k1CHRdpd6by0988TWMJgnvDytegWLbeP/VNZmMa+0VJx2eD7Y632bi2JquXDgiGJg==}
peerDependencies:
@@ -5605,38 +5606,18 @@ packages:
resolution: {integrity: sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==}
engines: {node: '>=0.12'}
- environment@1.1.0:
- resolution: {integrity: sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==}
- engines: {node: '>=18'}
-
error-stack-parser-es@1.0.5:
resolution: {integrity: sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==}
error-stack-parser@2.1.4:
resolution: {integrity: sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==}
- es-define-property@1.0.1:
- resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==}
- engines: {node: '>= 0.4'}
-
- es-errors@1.3.0:
- resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
- engines: {node: '>= 0.4'}
-
es-module-lexer@1.7.0:
resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==}
es-module-lexer@2.0.0:
resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==}
- es-object-atoms@1.1.1:
- resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==}
- engines: {node: '>= 0.4'}
-
- es-set-tostringtag@2.1.0:
- resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==}
- engines: {node: '>= 0.4'}
-
es-toolkit@1.45.1:
resolution: {integrity: sha512-/jhoOj/Fx+A+IIyDNOvO3TItGmlMKhtX8ISAHKE90c4b/k1tqaqEZ+uUqfpU8DMnW5cgNJv606zS55jGvza0Xw==}
@@ -6014,9 +5995,6 @@ packages:
event-target-bus@1.0.0:
resolution: {integrity: sha512-uPcWKbj/BJU3Tbw9XqhHqET4/LBOhvv3/SJWr7NksxA6TC5YqBpaZgawE9R+WpYFCBFSAE4Vun+xQS6w4ABdlA==}
- eventemitter3@5.0.4:
- resolution: {integrity: sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==}
-
events@3.3.0:
resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==}
engines: {node: '>=0.8.x'}
@@ -6117,19 +6095,6 @@ packages:
flatted@3.4.2:
resolution: {integrity: sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==}
- follow-redirects@1.15.11:
- resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==}
- engines: {node: '>=4.0'}
- peerDependencies:
- debug: '*'
- peerDependenciesMeta:
- debug:
- optional: true
-
- form-data@4.0.5:
- resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==}
- engines: {node: '>= 6'}
-
format@0.2.2:
resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==}
engines: {node: '>=0.4.x'}
@@ -6150,9 +6115,6 @@ packages:
react-dom:
optional: true
- fraction.js@5.3.4:
- resolution: {integrity: sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==}
-
fs-constants@1.0.0:
resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==}
@@ -6166,9 +6128,6 @@ packages:
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
- function-bind@1.1.2:
- resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
-
functional-red-black-tree@1.0.1:
resolution: {integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==}
@@ -6183,18 +6142,10 @@ packages:
resolution: {integrity: sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA==}
engines: {node: '>=18'}
- get-intrinsic@1.3.0:
- resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==}
- engines: {node: '>= 0.4'}
-
get-nonce@1.0.1:
resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==}
engines: {node: '>=6'}
- get-proto@1.0.1:
- resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==}
- engines: {node: '>= 0.4'}
-
get-stream@5.2.0:
resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==}
engines: {node: '>=8'}
@@ -6251,10 +6202,6 @@ packages:
peerDependencies:
csstype: ^3.0.10
- gopd@1.2.0:
- resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
- engines: {node: '>= 0.4'}
-
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
@@ -6273,14 +6220,6 @@ packages:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
- has-symbols@1.1.0:
- resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==}
- engines: {node: '>= 0.4'}
-
- has-tostringtag@1.0.2:
- resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==}
- engines: {node: '>= 0.4'}
-
hast-util-from-dom@5.0.1:
resolution: {integrity: sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==}
@@ -6374,11 +6313,6 @@ packages:
htmlparser2@10.1.0:
resolution: {integrity: sha512-VTZkM9GWRAtEpveh7MSF6SjjrpNVNNVJfFup7xTY3UpFtm67foy9HDVXneLtFVt4pMz5kZtgNcvCniNFb1hlEQ==}
- husky@9.1.7:
- resolution: {integrity: sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==}
- engines: {node: '>=18'}
- hasBin: true
-
i18next-resources-to-backend@1.2.1:
resolution: {integrity: sha512-okHbVA+HZ7n1/76MsfhPqDou0fptl2dAlhRDu2ideXloRRduzHsqDOznJBef+R3DFZnbvWoBW+KxJ7fnFjd6Yw==}
@@ -6481,10 +6415,6 @@ packages:
is-alphanumerical@2.0.1:
resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==}
- is-binary-path@2.1.0:
- resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
- engines: {node: '>=8'}
-
is-builtin-module@5.0.0:
resolution: {integrity: sha512-f4RqJKBUe5rQkJ2eJEJBXSticB3hGbN9j0yxxMQFqIW89Jp9WYFtzfTcRlstDKVUTRzSOTLKRfO9vIztenwtxA==}
engines: {node: '>=18.20'}
@@ -6504,10 +6434,6 @@ packages:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
- is-fullwidth-code-point@5.1.0:
- resolution: {integrity: sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==}
- engines: {node: '>=18'}
-
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
@@ -6576,10 +6502,6 @@ packages:
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
engines: {node: '>= 10.13.0'}
- jiti@1.21.7:
- resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==}
- hasBin: true
-
jiti@2.6.1:
resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==}
hasBin: true
@@ -6815,15 +6737,6 @@ packages:
lines-and-columns@1.2.4:
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
- lint-staged@16.4.0:
- resolution: {integrity: sha512-lBWt8hujh/Cjysw5GYVmZpFHXDCgZzhrOm8vbcUdobADZNOK/bRshr2kM3DfgrrtR1DQhfupW9gnIXOfiFi+bw==}
- engines: {node: '>=20.17'}
- hasBin: true
-
- listr2@9.0.5:
- resolution: {integrity: sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==}
- engines: {node: '>=20.0.0'}
-
load-tsconfig@0.2.5:
resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
@@ -6855,10 +6768,6 @@ packages:
lodash@4.17.23:
resolution: {integrity: sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==}
- log-update@6.1.0:
- resolution: {integrity: sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==}
- engines: {node: '>=18'}
-
longest-streak@3.1.0:
resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==}
@@ -6922,10 +6831,6 @@ packages:
engines: {node: '>= 20'}
hasBin: true
- math-intrinsics@1.1.0:
- resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==}
- engines: {node: '>= 0.4'}
-
mdast-util-directive@3.1.0:
resolution: {integrity: sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==}
@@ -7279,10 +7184,6 @@ packages:
resolution: {integrity: sha512-RWk+PI433eESQ7ounYxIp67CYuVsS1uYSonX3kA6ps/3LWfjVQa/ptEg6Y3T6uAMq1mWpX9PQ+qx+QaHpsc7gQ==}
engines: {node: ^20.17.0 || >=22.9.0}
- normalize-path@3.0.0:
- resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
- engines: {node: '>=0.10.0'}
-
normalize-wheel@1.0.1:
resolution: {integrity: sha512-1OnlAPZ3zgrk8B91HyRj+eVv+kS5u+Z0SCsak6Xil/kmgEia50ga7zfkumayonZrImffAxPU/5WcyGhzetHNPA==}
@@ -7317,10 +7218,6 @@ packages:
object-deep-merge@2.0.0:
resolution: {integrity: sha512-3DC3UMpeffLTHiuXSy/UG4NOIYTLlY9u3V82+djSCLYClWobZiS4ivYzpIUWrRY/nfsJ8cWsKyG3QfyLePmhvg==}
- object-hash@3.0.0:
- resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==}
- engines: {node: '>= 6'}
-
obug@2.1.1:
resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==}
@@ -7498,10 +7395,6 @@ packages:
resolution: {integrity: sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==}
engines: {node: '>=12'}
- pify@2.3.0:
- resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==}
- engines: {node: '>=0.10.0'}
-
pinyin-pro@3.28.0:
resolution: {integrity: sha512-mMRty6RisoyYNphJrTo3pnvp3w8OMZBrXm9YSWkxhAfxKj1KZk2y8T2PDIZlDDRsvZ0No+Hz6FI4sZpA6Ey25g==}
@@ -7550,24 +7443,6 @@ packages:
resolution: {integrity: sha512-rEwq/ZHlJIKw++XtLAO8PPuOQA/zaPJOZJ37BVuN97nLpMJeuDVLVGRwbFoBgLudgdTMP2hdRJP++H+8QOA3vg==}
engines: {node: '>= 10.12'}
- postcss-import@15.1.0:
- resolution: {integrity: sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==}
- engines: {node: '>=14.0.0'}
- peerDependencies:
- postcss: ^8.0.0
-
- postcss-js@4.1.0:
- resolution: {integrity: sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==}
- engines: {node: ^12 || ^14 || >= 16}
- peerDependencies:
- postcss: ^8.4.21
-
- postcss-js@5.1.0:
- resolution: {integrity: sha512-glrtXSrLt3eH/mgceNgP6u/6jHodqRQ/ToFht+yqwquw0KBf6Zue5qJQFgcIEfQQyYl+BCPN/TYdWyeOQh3c5Q==}
- engines: {node: ^20 || ^22 || >= 24}
- peerDependencies:
- postcss: ^8.4.21
-
postcss-load-config@6.0.1:
resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==}
engines: {node: '>= 18'}
@@ -7586,20 +7461,10 @@ packages:
yaml:
optional: true
- postcss-nested@6.2.0:
- resolution: {integrity: sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==}
- engines: {node: '>=12.0'}
- peerDependencies:
- postcss: ^8.2.14
-
postcss-selector-parser@6.0.10:
resolution: {integrity: sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==}
engines: {node: '>=4'}
- postcss-selector-parser@6.1.2:
- resolution: {integrity: sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==}
- engines: {node: '>=4'}
-
postcss-selector-parser@7.1.1:
resolution: {integrity: sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==}
engines: {node: '>=4'}
@@ -7653,10 +7518,6 @@ packages:
property-information@7.1.0:
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
- proxy-from-env@2.1.0:
- resolution: {integrity: sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==}
- engines: {node: '>=10'}
-
pump@3.0.4:
resolution: {integrity: sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==}
@@ -7859,9 +7720,6 @@ packages:
react: '>=17'
react-dom: '>=17'
- read-cache@1.0.0:
- resolution: {integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==}
-
read-package-up@12.0.0:
resolution: {integrity: sha512-Q5hMVBYur/eQNWDdbF4/Wqqr9Bjvtrw2kjGxxBbKLbx8bVCL8gcArjTy8zDUuLGQicftpMuU0riQNcAsbtOVsw==}
engines: {node: '>=20'}
@@ -7874,10 +7732,6 @@ packages:
resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==}
engines: {node: '>= 6'}
- readdirp@3.6.0:
- resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
- engines: {node: '>=8.10.0'}
-
readdirp@4.1.2:
resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==}
engines: {node: '>= 14.18.0'}
@@ -8013,9 +7867,6 @@ packages:
resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
- rfdc@1.4.1:
- resolution: {integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==}
-
robust-predicates@3.0.3:
resolution: {integrity: sha512-NS3levdsRIUOmiJ8FZWCP7LG3QpJyrs/TE0Zpf1yvZu8cAJJ6QMW92H1c7kWpdIHo8RvmLxN/o2JXTKHp74lUA==}
@@ -8136,14 +7987,6 @@ packages:
size-sensor@1.0.3:
resolution: {integrity: sha512-+k9mJ2/rQMiRmQUcjn+qznch260leIXY8r4FyYKKyRBO/s5UoeMAHGkCJyE1R/4wrIhTJONfyloY55SkE7ve3A==}
- slice-ansi@7.1.2:
- resolution: {integrity: sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==}
- engines: {node: '>=18'}
-
- slice-ansi@8.0.0:
- resolution: {integrity: sha512-stxByr12oeeOyY2BlviTNQlYV5xOj47GirPr4yA1hE9JCtxfQN0+tVbkxwCtYDQWhEKWFHsEK48ORg5jrouCAg==}
- engines: {node: '>=20'}
-
smol-toml@1.6.1:
resolution: {integrity: sha512-dWUG8F5sIIARXih1DTaQAX4SsiTXhInKf1buxdY9DIg4ZYPZK5nGM1VRIYmEbDbsHt7USo99xSLFu5Q1IqTmsg==}
engines: {node: '>= 18'}
@@ -8227,10 +8070,6 @@ packages:
resolution: {integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==}
engines: {node: '>=0.6.19'}
- string-argv@0.3.2:
- resolution: {integrity: sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==}
- engines: {node: '>=0.6.19'}
-
string-ts@2.3.1:
resolution: {integrity: sha512-xSJq+BS52SaFFAVxuStmx6n5aYZU571uYUnUrPXkPFCfdHyZMMlbP2v2Wx5sNBnAVzq/2+0+mcBLBa3Xa5ubYw==}
@@ -8345,16 +8184,11 @@ packages:
'@eslint/css':
optional: true
- tailwind-merge@2.6.1:
- resolution: {integrity: sha512-Oo6tHdpZsGpkKG88HJ8RR1rg/RdnEkQEfMoEk2x1XRI3F1AxeU+ijRXpiVUF4UbLfcxxRGw6TbUINKYdWVsQTQ==}
-
tailwind-merge@3.5.0:
resolution: {integrity: sha512-I8K9wewnVDkL1NTGoqWmVEIlUcB9gFriAEkXkfCjX5ib8ezGxtR3xD7iZIxrfArjEsH7F1CHD4RFUtxefdqV/A==}
- tailwindcss@3.4.19:
- resolution: {integrity: sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==}
- engines: {node: '>=14.0.0'}
- hasBin: true
+ tailwindcss@4.2.2:
+ resolution: {integrity: sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==}
tapable@2.3.2:
resolution: {integrity: sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==}
@@ -8967,10 +8801,6 @@ packages:
resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==}
engines: {node: '>=0.10.0'}
- wrap-ansi@9.0.2:
- resolution: {integrity: sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==}
- engines: {node: '>=18'}
-
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
@@ -9257,50 +9087,50 @@ snapshots:
idb: 8.0.0
tslib: 2.8.1
- '@antfu/eslint-config@7.7.3(@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.2(typescript@5.9.3))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(@vue/compiler-sfc@3.5.31)(eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@1.21.7)))(eslint@10.1.0(jiti@1.21.7))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))(typescript@5.9.3)':
+ '@antfu/eslint-config@7.7.3(@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.2(typescript@5.9.3))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(@vue/compiler-sfc@3.5.31)(eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@2.6.1)))(eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))(typescript@5.9.3)':
dependencies:
'@antfu/install-pkg': 1.1.0
'@clack/prompts': 1.1.0
- '@e18e/eslint-plugin': 0.2.0(eslint@10.1.0(jiti@1.21.7))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))
- '@eslint-community/eslint-plugin-eslint-comments': 4.7.1(eslint@10.1.0(jiti@1.21.7))
+ '@e18e/eslint-plugin': 0.2.0(eslint@10.1.0(jiti@2.6.1))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))
+ '@eslint-community/eslint-plugin-eslint-comments': 4.7.1(eslint@10.1.0(jiti@2.6.1))
'@eslint/markdown': 7.5.1
- '@stylistic/eslint-plugin': 5.10.0(eslint@10.1.0(jiti@1.21.7))
- '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@vitest/eslint-plugin': 1.6.13(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@stylistic/eslint-plugin': 5.10.0(eslint@10.1.0(jiti@2.6.1))
+ '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@vitest/eslint-plugin': 1.6.13(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
ansis: 4.2.0
cac: 7.0.0
- eslint: 10.1.0(jiti@1.21.7)
- eslint-config-flat-gitignore: 2.3.0(eslint@10.1.0(jiti@1.21.7))
+ eslint: 10.1.0(jiti@2.6.1)
+ eslint-config-flat-gitignore: 2.3.0(eslint@10.1.0(jiti@2.6.1))
eslint-flat-config-utils: 3.0.2
- eslint-merge-processors: 2.0.0(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-antfu: 3.2.2(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-command: 3.5.2(@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.2(typescript@5.9.3))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-import-lite: 0.5.2(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-jsdoc: 62.8.1(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-jsonc: 3.1.2(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-n: 17.24.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ eslint-merge-processors: 2.0.0(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-antfu: 3.2.2(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-command: 3.5.2(@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.2(typescript@5.9.3))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-import-lite: 0.5.2(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-jsdoc: 62.8.1(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-jsonc: 3.1.2(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-n: 17.24.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
eslint-plugin-no-only-tests: 3.3.0
- eslint-plugin-perfectionist: 5.7.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint-plugin-pnpm: 1.6.0(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-regexp: 3.1.0(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-toml: 1.3.1(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-unicorn: 63.0.0(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-unused-imports: 4.4.1(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-vue: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@1.21.7)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@1.21.7)))
- eslint-plugin-yml: 3.3.1(eslint@10.1.0(jiti@1.21.7))
- eslint-processor-vue-blocks: 2.0.0(@vue/compiler-sfc@3.5.31)(eslint@10.1.0(jiti@1.21.7))
+ eslint-plugin-perfectionist: 5.7.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint-plugin-pnpm: 1.6.0(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-regexp: 3.1.0(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-toml: 1.3.1(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-unicorn: 63.0.0(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-unused-imports: 4.4.1(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-vue: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1)))
+ eslint-plugin-yml: 3.3.1(eslint@10.1.0(jiti@2.6.1))
+ eslint-processor-vue-blocks: 2.0.0(@vue/compiler-sfc@3.5.31)(eslint@10.1.0(jiti@2.6.1))
globals: 17.4.0
local-pkg: 1.1.2
parse-gitignore: 2.0.0
toml-eslint-parser: 1.0.3
- vue-eslint-parser: 10.4.0(eslint@10.1.0(jiti@1.21.7))
+ vue-eslint-parser: 10.4.0(eslint@10.1.0(jiti@2.6.1))
yaml-eslint-parser: 2.0.0
optionalDependencies:
- '@eslint-react/eslint-plugin': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/eslint-plugin': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@next/eslint-plugin-next': 16.2.1
- eslint-plugin-react-hooks: 7.0.1(eslint@10.1.0(jiti@1.21.7))
- eslint-plugin-react-refresh: 0.5.2(eslint@10.1.0(jiti@1.21.7))
+ eslint-plugin-react-hooks: 7.0.1(eslint@10.1.0(jiti@2.6.1))
+ eslint-plugin-react-refresh: 0.5.2(eslint@10.1.0(jiti@2.6.1))
transitivePeerDependencies:
- '@eslint/json'
- '@typescript-eslint/rule-tester'
@@ -9658,17 +9488,17 @@ snapshots:
'@cucumber/tag-expressions@9.1.0': {}
- '@e18e/eslint-plugin@0.2.0(eslint@10.1.0(jiti@1.21.7))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))':
+ '@e18e/eslint-plugin@0.2.0(eslint@10.1.0(jiti@2.6.1))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))':
dependencies:
- eslint-plugin-depend: 1.5.0(eslint@10.1.0(jiti@1.21.7))
+ eslint-plugin-depend: 1.5.0(eslint@10.1.0(jiti@2.6.1))
optionalDependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
oxlint: 1.57.0(oxlint-tsgolint@0.17.3)
- '@egoist/tailwindcss-icons@1.9.2(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.3))':
+ '@egoist/tailwindcss-icons@1.9.2(tailwindcss@4.2.2)':
dependencies:
'@iconify/utils': 3.1.0
- tailwindcss: 3.4.19(tsx@4.21.0)(yaml@2.8.3)
+ tailwindcss: 4.2.2
'@emnapi/core@1.9.1':
dependencies:
@@ -9776,100 +9606,95 @@ snapshots:
'@esbuild/win32-x64@0.27.2':
optional: true
- '@eslint-community/eslint-plugin-eslint-comments@4.7.1(eslint@10.1.0(jiti@1.21.7))':
+ '@eslint-community/eslint-plugin-eslint-comments@4.7.1(eslint@10.1.0(jiti@2.6.1))':
dependencies:
escape-string-regexp: 4.0.0
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
ignore: 7.0.5
- '@eslint-community/eslint-utils@4.9.1(eslint@10.1.0(jiti@1.21.7))':
- dependencies:
- eslint: 10.1.0(jiti@1.21.7)
- eslint-visitor-keys: 3.4.3
-
'@eslint-community/eslint-utils@4.9.1(eslint@10.1.0(jiti@2.6.1))':
dependencies:
eslint: 10.1.0(jiti@2.6.1)
eslint-visitor-keys: 3.4.3
- '@eslint-community/eslint-utils@4.9.1(eslint@9.27.0(jiti@1.21.7))':
+ '@eslint-community/eslint-utils@4.9.1(eslint@9.27.0(jiti@2.6.1))':
dependencies:
- eslint: 9.27.0(jiti@1.21.7)
+ eslint: 9.27.0(jiti@2.6.1)
eslint-visitor-keys: 3.4.3
'@eslint-community/regexpp@4.12.2': {}
- '@eslint-react/ast@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@eslint-react/ast@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
'@typescript-eslint/types': 8.57.2
'@typescript-eslint/typescript-estree': 8.57.2(typescript@5.9.3)
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
string-ts: 2.3.1
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@eslint-react/core@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@eslint-react/core@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/scope-manager': 8.57.2
'@typescript-eslint/types': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
ts-pattern: 5.9.0
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/scope-manager': 8.57.2
- '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/types': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
- eslint-plugin-react-dom: 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint-plugin-react-naming-convention: 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint-plugin-react-rsc: 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint-plugin-react-web-api: 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint-plugin-react-x: 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
+ eslint-plugin-react-dom: 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint-plugin-react-naming-convention: 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint-plugin-react-rsc: 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint-plugin-react-web-api: 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint-plugin-react-x: 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
ts-api-utils: 2.5.0(typescript@5.9.3)
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@eslint-react/shared@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@eslint-react/shared@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
ts-pattern: 5.9.0
typescript: 5.9.3
zod: 4.3.6
transitivePeerDependencies:
- supports-color
- '@eslint-react/var@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@eslint-react/var@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/scope-manager': 8.57.2
'@typescript-eslint/types': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
ts-pattern: 5.9.0
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@eslint/compat@2.0.3(eslint@10.1.0(jiti@1.21.7))':
+ '@eslint/compat@2.0.3(eslint@10.1.0(jiti@2.6.1))':
dependencies:
'@eslint/core': 1.1.1
optionalDependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
'@eslint/config-array@0.20.1':
dependencies:
@@ -10180,11 +10005,11 @@ snapshots:
dependencies:
minipass: 7.1.3
- '@joshwooding/vite-plugin-react-docgen-typescript@0.6.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)':
+ '@joshwooding/vite-plugin-react-docgen-typescript@0.6.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)':
dependencies:
glob: 13.0.6
react-docgen-typescript: 2.4.0(typescript@5.9.3)
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
optionalDependencies:
typescript: 5.9.3
@@ -10499,8 +10324,6 @@ snapshots:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.20.1
- '@nolyfill/hasown@1.0.44': {}
-
'@nolyfill/is-core-module@1.0.39': {}
'@nolyfill/safer-buffer@1.0.44': {}
@@ -11431,10 +11254,10 @@ snapshots:
'@standard-schema/spec@1.1.0': {}
- '@storybook/addon-docs@10.3.3(@types/react@19.2.14)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
+ '@storybook/addon-docs@10.3.3(@types/react@19.2.14)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
dependencies:
'@mdx-js/react': 3.1.1(@types/react@19.2.14)(react@19.2.4)
- '@storybook/csf-plugin': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
+ '@storybook/csf-plugin': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
'@storybook/icons': 2.0.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
'@storybook/react-dom-shim': 10.3.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))
react: 19.2.4
@@ -11464,25 +11287,25 @@ snapshots:
storybook: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
ts-dedent: 2.2.0
- '@storybook/builder-vite@10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
+ '@storybook/builder-vite@10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
dependencies:
- '@storybook/csf-plugin': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
+ '@storybook/csf-plugin': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
storybook: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
ts-dedent: 2.2.0
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
transitivePeerDependencies:
- esbuild
- rollup
- webpack
- '@storybook/csf-plugin@10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
+ '@storybook/csf-plugin@10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
dependencies:
storybook: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
unplugin: 2.3.11
optionalDependencies:
esbuild: 0.27.2
rollup: 4.59.0
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
webpack: 5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)
'@storybook/global@5.0.0': {}
@@ -11492,18 +11315,18 @@ snapshots:
react: 19.2.4
react-dom: 19.2.4(react@19.2.4)
- '@storybook/nextjs-vite@10.3.3(@babel/core@7.29.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
+ '@storybook/nextjs-vite@10.3.3(@babel/core@7.29.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
dependencies:
- '@storybook/builder-vite': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
+ '@storybook/builder-vite': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
'@storybook/react': 10.3.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
- '@storybook/react-vite': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
+ '@storybook/react-vite': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
next: 16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0)
react: 19.2.4
react-dom: 19.2.4(react@19.2.4)
storybook: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
styled-jsx: 5.1.6(@babel/core@7.29.0)(react@19.2.4)
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
- vite-plugin-storybook-nextjs: 3.2.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite-plugin-storybook-nextjs: 3.2.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
optionalDependencies:
typescript: 5.9.3
transitivePeerDependencies:
@@ -11520,11 +11343,11 @@ snapshots:
react-dom: 19.2.4(react@19.2.4)
storybook: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
- '@storybook/react-vite@10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
+ '@storybook/react-vite@10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))':
dependencies:
- '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)
+ '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)
'@rollup/pluginutils': 5.3.0(rollup@4.59.0)
- '@storybook/builder-vite': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
+ '@storybook/builder-vite': 10.3.3(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(rollup@4.59.0)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
'@storybook/react': 10.3.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
empathic: 2.0.0
magic-string: 0.30.21
@@ -11534,7 +11357,7 @@ snapshots:
resolve: 1.22.11
storybook: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
tsconfig-paths: 4.2.0
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
transitivePeerDependencies:
- esbuild
- rollup
@@ -11565,11 +11388,11 @@ snapshots:
transitivePeerDependencies:
- supports-color
- '@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@1.21.7))':
+ '@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1))':
dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
'@typescript-eslint/types': 8.57.2
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
eslint-visitor-keys: 4.2.1
espree: 10.4.0
estraverse: 5.3.0
@@ -11599,10 +11422,86 @@ snapshots:
valibot: 1.3.1(typescript@5.9.3)
zod: 4.3.6
- '@tailwindcss/typography@0.5.19(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.3))':
+ '@tailwindcss/node@4.2.2':
+ dependencies:
+ '@jridgewell/remapping': 2.3.5
+ enhanced-resolve: 5.20.1
+ jiti: 2.6.1
+ lightningcss: 1.32.0
+ magic-string: 0.30.21
+ source-map-js: 1.2.1
+ tailwindcss: 4.2.2
+
+ '@tailwindcss/oxide-android-arm64@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-darwin-arm64@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-darwin-x64@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-freebsd-x64@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-linux-arm64-gnu@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-linux-arm64-musl@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-linux-x64-gnu@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-linux-x64-musl@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-wasm32-wasi@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-win32-arm64-msvc@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide-win32-x64-msvc@4.2.2':
+ optional: true
+
+ '@tailwindcss/oxide@4.2.2':
+ optionalDependencies:
+ '@tailwindcss/oxide-android-arm64': 4.2.2
+ '@tailwindcss/oxide-darwin-arm64': 4.2.2
+ '@tailwindcss/oxide-darwin-x64': 4.2.2
+ '@tailwindcss/oxide-freebsd-x64': 4.2.2
+ '@tailwindcss/oxide-linux-arm-gnueabihf': 4.2.2
+ '@tailwindcss/oxide-linux-arm64-gnu': 4.2.2
+ '@tailwindcss/oxide-linux-arm64-musl': 4.2.2
+ '@tailwindcss/oxide-linux-x64-gnu': 4.2.2
+ '@tailwindcss/oxide-linux-x64-musl': 4.2.2
+ '@tailwindcss/oxide-wasm32-wasi': 4.2.2
+ '@tailwindcss/oxide-win32-arm64-msvc': 4.2.2
+ '@tailwindcss/oxide-win32-x64-msvc': 4.2.2
+
+ '@tailwindcss/postcss@4.2.2':
+ dependencies:
+ '@alloc/quick-lru': 5.2.0
+ '@tailwindcss/node': 4.2.2
+ '@tailwindcss/oxide': 4.2.2
+ postcss: 8.5.8
+ tailwindcss: 4.2.2
+
+ '@tailwindcss/typography@0.5.19(tailwindcss@4.2.2)':
dependencies:
postcss-selector-parser: 6.0.10
- tailwindcss: 3.4.19(tsx@4.21.0)(yaml@2.8.3)
+ tailwindcss: 4.2.2
+
+ '@tailwindcss/vite@4.2.2(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
+ dependencies:
+ '@tailwindcss/node': 4.2.2
+ '@tailwindcss/oxide': 4.2.2
+ tailwindcss: 4.2.2
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
'@tanstack/devtools-client@0.0.6':
dependencies:
@@ -11648,10 +11547,10 @@ snapshots:
- csstype
- utf-8-validate
- '@tanstack/eslint-plugin-query@5.95.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@tanstack/eslint-plugin-query@5.95.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
optionalDependencies:
typescript: 5.9.3
transitivePeerDependencies:
@@ -11782,10 +11681,10 @@ snapshots:
dependencies:
'@testing-library/dom': 10.4.1
- '@tsslint/cli@3.0.2(@tsslint/compat-eslint@3.0.2(jiti@1.21.7)(typescript@5.9.3))(typescript@5.9.3)':
+ '@tsslint/cli@3.0.2(@tsslint/compat-eslint@3.0.2(jiti@2.6.1)(typescript@5.9.3))(typescript@5.9.3)':
dependencies:
'@clack/prompts': 0.8.2
- '@tsslint/config': 3.0.2(@tsslint/compat-eslint@3.0.2(jiti@1.21.7)(typescript@5.9.3))(typescript@5.9.3)
+ '@tsslint/config': 3.0.2(@tsslint/compat-eslint@3.0.2(jiti@2.6.1)(typescript@5.9.3))(typescript@5.9.3)
'@tsslint/core': 3.0.2
'@volar/language-core': 2.4.28
'@volar/language-hub': 0.0.1
@@ -11796,23 +11695,23 @@ snapshots:
- '@tsslint/compat-eslint'
- tsl
- '@tsslint/compat-eslint@3.0.2(jiti@1.21.7)(typescript@5.9.3)':
+ '@tsslint/compat-eslint@3.0.2(jiti@2.6.1)(typescript@5.9.3)':
dependencies:
'@tsslint/types': 3.0.2
- '@typescript-eslint/parser': 8.57.2(eslint@9.27.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 9.27.0(jiti@1.21.7)
+ '@typescript-eslint/parser': 8.57.2(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 9.27.0(jiti@2.6.1)
transitivePeerDependencies:
- jiti
- supports-color
- typescript
- '@tsslint/config@3.0.2(@tsslint/compat-eslint@3.0.2(jiti@1.21.7)(typescript@5.9.3))(typescript@5.9.3)':
+ '@tsslint/config@3.0.2(@tsslint/compat-eslint@3.0.2(jiti@2.6.1)(typescript@5.9.3))(typescript@5.9.3)':
dependencies:
'@tsslint/types': 3.0.2
minimatch: 10.2.4
ts-api-utils: 2.5.0(typescript@5.9.3)
optionalDependencies:
- '@tsslint/compat-eslint': 3.0.2(jiti@1.21.7)(typescript@5.9.3)
+ '@tsslint/compat-eslint': 3.0.2(jiti@2.6.1)(typescript@5.9.3)
transitivePeerDependencies:
- typescript
@@ -12039,10 +11938,6 @@ snapshots:
dependencies:
'@types/node': 25.5.0
- '@types/postcss-js@4.1.0':
- dependencies:
- postcss: 8.5.8
-
'@types/qs@6.15.0': {}
'@types/react-dom@19.2.3(@types/react@19.2.14)':
@@ -12085,22 +11980,6 @@ snapshots:
'@types/zen-observable@0.8.3': {}
- '@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
- dependencies:
- '@eslint-community/regexpp': 4.12.2
- '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@typescript-eslint/scope-manager': 8.57.2
- '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@typescript-eslint/visitor-keys': 8.57.2
- eslint: 10.1.0(jiti@1.21.7)
- ignore: 7.0.5
- natural-compare: 1.4.0
- ts-api-utils: 2.5.0(typescript@5.9.3)
- typescript: 5.9.3
- transitivePeerDependencies:
- - supports-color
-
'@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
'@eslint-community/regexpp': 4.12.2
@@ -12117,18 +11996,6 @@ snapshots:
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
- dependencies:
- '@typescript-eslint/scope-manager': 8.57.2
- '@typescript-eslint/types': 8.57.2
- '@typescript-eslint/typescript-estree': 8.57.2(typescript@5.9.3)
- '@typescript-eslint/visitor-keys': 8.57.2
- debug: 4.4.3(supports-color@8.1.1)
- eslint: 10.1.0(jiti@1.21.7)
- typescript: 5.9.3
- transitivePeerDependencies:
- - supports-color
-
'@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
'@typescript-eslint/scope-manager': 8.57.2
@@ -12141,14 +12008,14 @@ snapshots:
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/parser@8.57.2(eslint@9.27.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@typescript-eslint/parser@8.57.2(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
'@typescript-eslint/scope-manager': 8.57.2
'@typescript-eslint/types': 8.57.2
'@typescript-eslint/typescript-estree': 8.57.2(typescript@5.9.3)
'@typescript-eslint/visitor-keys': 8.57.2
debug: 4.4.3(supports-color@8.1.1)
- eslint: 9.27.0(jiti@1.21.7)
+ eslint: 9.27.0(jiti@2.6.1)
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
@@ -12162,13 +12029,13 @@ snapshots:
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/typescript-estree': 8.57.2(typescript@5.9.3)
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
ajv: 6.14.0
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
json-stable-stringify-without-jsonify: 1.0.1
lodash.merge: 4.6.2
semver: 7.7.4
@@ -12185,18 +12052,6 @@ snapshots:
dependencies:
typescript: 5.9.3
- '@typescript-eslint/type-utils@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
- dependencies:
- '@typescript-eslint/types': 8.57.2
- '@typescript-eslint/typescript-estree': 8.57.2(typescript@5.9.3)
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- debug: 4.4.3(supports-color@8.1.1)
- eslint: 10.1.0(jiti@1.21.7)
- ts-api-utils: 2.5.0(typescript@5.9.3)
- typescript: 5.9.3
- transitivePeerDependencies:
- - supports-color
-
'@typescript-eslint/type-utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
'@typescript-eslint/types': 8.57.2
@@ -12226,17 +12081,6 @@ snapshots:
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
- dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
- '@typescript-eslint/scope-manager': 8.57.2
- '@typescript-eslint/types': 8.57.2
- '@typescript-eslint/typescript-estree': 8.57.2(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
- typescript: 5.9.3
- transitivePeerDependencies:
- - supports-color
-
'@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
'@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
@@ -12312,12 +12156,12 @@ snapshots:
'@resvg/resvg-wasm': 2.4.0
satori: 0.16.0
- '@vitejs/devtools-kit@0.1.11(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)(ws@8.20.0)':
+ '@vitejs/devtools-kit@0.1.11(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)(ws@8.20.0)':
dependencies:
'@vitejs/devtools-rpc': 0.1.11(typescript@5.9.3)(ws@8.20.0)
birpc: 4.0.0
ohash: 2.0.11
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
transitivePeerDependencies:
- typescript
- ws
@@ -12334,12 +12178,12 @@ snapshots:
transitivePeerDependencies:
- typescript
- '@vitejs/plugin-react@6.0.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
+ '@vitejs/plugin-react@6.0.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
dependencies:
'@rolldown/pluginutils': 1.0.0-rc.7
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
- '@vitejs/plugin-rsc@0.5.21(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)':
+ '@vitejs/plugin-rsc@0.5.21(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)':
dependencies:
'@rolldown/pluginutils': 1.0.0-rc.5
es-module-lexer: 2.0.0
@@ -12351,15 +12195,15 @@ snapshots:
srvx: 0.11.13
strip-literal: 3.1.0
turbo-stream: 3.2.0
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
- vitefu: 1.1.2(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vitefu: 1.1.2(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
optionalDependencies:
react-server-dom-webpack: 19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
- '@vitest/coverage-v8@4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
+ '@vitest/coverage-v8@4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
dependencies:
'@bcoe/v8-coverage': 1.0.2
- '@vitest/utils': 4.1.2
+ '@vitest/utils': 4.1.1
ast-v8-to-istanbul: 1.0.0
istanbul-lib-coverage: 3.2.2
istanbul-lib-report: 3.0.1
@@ -12368,12 +12212,12 @@ snapshots:
obug: 2.1.1
std-env: 4.0.0
tinyrainbow: 3.1.0
- vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
- '@vitest/coverage-v8@4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))':
+ '@vitest/coverage-v8@4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))':
dependencies:
'@bcoe/v8-coverage': 1.0.2
- '@vitest/utils': 4.1.2
+ '@vitest/utils': 4.1.1
ast-v8-to-istanbul: 1.0.0
istanbul-lib-coverage: 3.2.2
istanbul-lib-report: 3.0.1
@@ -12384,15 +12228,15 @@ snapshots:
tinyrainbow: 3.1.0
vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)'
- '@vitest/eslint-plugin@1.6.13(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
+ '@vitest/eslint-plugin@1.6.13(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
'@typescript-eslint/scope-manager': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
optionalDependencies:
- '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
typescript: 5.9.3
- vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
transitivePeerDependencies:
- supports-color
@@ -12408,7 +12252,7 @@ snapshots:
dependencies:
tinyrainbow: 2.0.0
- '@vitest/pretty-format@4.1.2':
+ '@vitest/pretty-format@4.1.1':
dependencies:
tinyrainbow: 3.1.0
@@ -12422,29 +12266,12 @@ snapshots:
loupe: 3.2.1
tinyrainbow: 2.0.0
- '@vitest/utils@4.1.2':
+ '@vitest/utils@4.1.1':
dependencies:
- '@vitest/pretty-format': 4.1.2
+ '@vitest/pretty-format': 4.1.1
convert-source-map: 2.0.0
tinyrainbow: 3.1.0
- '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)':
- dependencies:
- '@oxc-project/runtime': 0.121.0
- '@oxc-project/types': 0.122.0
- lightningcss: 1.32.0
- postcss: 8.5.8
- optionalDependencies:
- '@types/node': 25.5.0
- esbuild: 0.27.2
- fsevents: 2.3.3
- jiti: 1.21.7
- sass: 1.98.0
- terser: 5.46.1
- tsx: 4.21.0
- typescript: 5.9.3
- yaml: 2.8.3
-
'@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)':
dependencies:
'@oxc-project/runtime': 0.121.0
@@ -12480,11 +12307,11 @@ snapshots:
'@voidzero-dev/vite-plus-linux-x64-musl@0.1.14':
optional: true
- '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)':
+ '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)':
dependencies:
'@standard-schema/spec': 1.1.0
'@types/chai': 5.2.3
- '@voidzero-dev/vite-plus-core': 0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
+ '@voidzero-dev/vite-plus-core': 0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
es-module-lexer: 1.7.0
obug: 2.1.1
pixelmatch: 7.1.0
@@ -12494,7 +12321,7 @@ snapshots:
tinybench: 2.9.0
tinyexec: 1.0.4
tinyglobby: 0.2.15
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
ws: 8.20.0
optionalDependencies:
'@types/node': 25.5.0
@@ -12753,10 +12580,6 @@ snapshots:
json-schema-traverse: 1.0.0
require-from-string: 2.0.2
- ansi-escapes@7.3.0:
- dependencies:
- environment: 1.1.0
-
ansi-regex@4.1.1: {}
ansi-regex@5.0.1: {}
@@ -12769,21 +12592,12 @@ snapshots:
ansi-styles@5.2.0: {}
- ansi-styles@6.2.3: {}
-
ansis@4.2.0: {}
any-promise@1.3.0: {}
- anymatch@3.1.3:
- dependencies:
- normalize-path: 3.0.0
- picomatch: 2.3.2
-
are-docs-informative@0.0.2: {}
- arg@5.0.2: {}
-
argparse@2.0.1: {}
aria-hidden@1.2.6:
@@ -12818,25 +12632,6 @@ snapshots:
async@3.2.6: {}
- asynckit@0.4.0: {}
-
- autoprefixer@10.4.27(postcss@8.5.8):
- dependencies:
- browserslist: 4.28.1
- caniuse-lite: 1.0.30001781
- fraction.js: 5.3.4
- picocolors: 1.1.1
- postcss: 8.5.8
- postcss-value-parser: 4.2.0
-
- axios@1.14.0:
- dependencies:
- follow-redirects: 1.15.11
- form-data: 4.0.5
- proxy-from-env: 2.1.0
- transitivePeerDependencies:
- - debug
-
bail@2.0.2: {}
balanced-match@1.0.2: {}
@@ -12852,8 +12647,6 @@ snapshots:
baseline-browser-mapping@2.10.12: {}
- binary-extensions@2.3.0: {}
-
birecord@0.1.1: {}
birpc@4.0.0: {}
@@ -12916,15 +12709,8 @@ snapshots:
cac@7.0.0: {}
- call-bind-apply-helpers@1.0.2:
- dependencies:
- es-errors: 1.3.0
- function-bind: 1.1.2
-
callsites@3.1.0: {}
- camelcase-css@2.0.1: {}
-
camelize@1.0.1: {}
caniuse-lite@1.0.30001781: {}
@@ -13016,18 +12802,6 @@ snapshots:
'@chevrotain/utils': 11.1.2
lodash-es: 4.17.23
- chokidar@3.6.0:
- dependencies:
- anymatch: 3.1.3
- braces: 3.0.3
- glob-parent: 5.1.2
- is-binary-path: 2.1.0
- is-glob: 4.0.3
- normalize-path: 3.0.0
- readdirp: 3.6.0
- optionalDependencies:
- fsevents: 2.3.3
-
chokidar@4.0.3:
dependencies:
readdirp: 4.1.2
@@ -13057,21 +12831,12 @@ snapshots:
dependencies:
escape-string-regexp: 1.0.5
- cli-cursor@5.0.0:
- dependencies:
- restore-cursor: 5.1.0
-
cli-table3@0.6.5:
dependencies:
string-width: 8.2.0
optionalDependencies:
'@colors/colors': 1.5.0
- cli-truncate@5.2.0:
- dependencies:
- slice-ansi: 8.0.0
- string-width: 8.2.0
-
client-only@0.0.1: {}
clsx@2.1.1: {}
@@ -13108,12 +12873,6 @@ snapshots:
color-name@1.1.4: {}
- colorette@2.0.20: {}
-
- combined-stream@1.0.8:
- dependencies:
- delayed-stream: 1.0.0
-
comma-separated-tokens@1.0.8: {}
comma-separated-tokens@2.0.3: {}
@@ -13443,8 +13202,6 @@ snapshots:
dependencies:
robust-predicates: 3.0.3
- delayed-stream@1.0.0: {}
-
dequal@2.0.3: {}
destr@2.0.5: {}
@@ -13457,14 +13214,10 @@ snapshots:
dependencies:
dequal: 2.0.3
- didyoumean@1.2.2: {}
-
diff-sequences@29.6.3: {}
diff@4.0.4: {}
- dlv@1.1.3: {}
-
doctrine@3.0.0:
dependencies:
esutils: 2.0.3
@@ -13501,12 +13254,6 @@ snapshots:
dotenv@16.6.1: {}
- dunder-proto@1.0.1:
- dependencies:
- call-bind-apply-helpers: 1.0.2
- es-errors: 1.3.0
- gopd: 1.2.0
-
echarts-for-react@3.0.6(echarts@6.0.0)(react@19.2.4):
dependencies:
echarts: 6.0.0
@@ -13565,33 +13312,16 @@ snapshots:
entities@7.0.1: {}
- environment@1.1.0: {}
-
error-stack-parser-es@1.0.5: {}
error-stack-parser@2.1.4:
dependencies:
stackframe: 1.3.4
- es-define-property@1.0.1: {}
-
- es-errors@1.3.0: {}
-
es-module-lexer@1.7.0: {}
es-module-lexer@2.0.0: {}
- es-object-atoms@1.1.1:
- dependencies:
- es-errors: 1.3.0
-
- es-set-tostringtag@2.1.0:
- dependencies:
- es-errors: 1.3.0
- get-intrinsic: 1.3.0
- has-tostringtag: 1.0.2
- hasown: '@nolyfill/hasown@1.0.44'
-
es-toolkit@1.45.1: {}
esast-util-from-estree@2.0.0:
@@ -13647,46 +13377,46 @@ snapshots:
escape-string-regexp@5.0.0: {}
- eslint-compat-utils@0.5.1(eslint@10.1.0(jiti@1.21.7)):
+ eslint-compat-utils@0.5.1(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
semver: 7.7.4
- eslint-config-flat-gitignore@2.3.0(eslint@10.1.0(jiti@1.21.7)):
+ eslint-config-flat-gitignore@2.3.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- '@eslint/compat': 2.0.3(eslint@10.1.0(jiti@1.21.7))
- eslint: 10.1.0(jiti@1.21.7)
+ '@eslint/compat': 2.0.3(eslint@10.1.0(jiti@2.6.1))
+ eslint: 10.1.0(jiti@2.6.1)
eslint-flat-config-utils@3.0.2:
dependencies:
'@eslint/config-helpers': 0.5.3
pathe: 2.0.3
- eslint-json-compat-utils@0.2.3(eslint@10.1.0(jiti@1.21.7))(jsonc-eslint-parser@3.1.0):
+ eslint-json-compat-utils@0.2.3(eslint@10.1.0(jiti@2.6.1))(jsonc-eslint-parser@3.1.0):
dependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
esquery: 1.7.0
jsonc-eslint-parser: 3.1.0
- eslint-markdown@0.6.0(eslint@10.1.0(jiti@1.21.7)):
+ eslint-markdown@0.6.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@eslint/markdown': 7.5.1
micromark-util-normalize-identifier: 2.0.1
parse5: 8.0.0
optionalDependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
transitivePeerDependencies:
- supports-color
- eslint-merge-processors@2.0.0(eslint@10.1.0(jiti@1.21.7)):
+ eslint-merge-processors@2.0.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
- eslint-plugin-antfu@3.2.2(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-antfu@3.2.2(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
- eslint-plugin-better-tailwindcss@4.3.2(eslint@10.1.0(jiti@1.21.7))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.3))(typescript@5.9.3):
+ eslint-plugin-better-tailwindcss@4.3.2(eslint@10.1.0(jiti@2.6.1))(oxlint@1.57.0(oxlint-tsgolint@0.17.3))(tailwindcss@4.2.2)(typescript@5.9.3):
dependencies:
'@eslint/css-tree': 3.6.9
'@valibot/to-json-schema': 1.6.0(valibot@1.3.1(typescript@5.9.3))
@@ -13694,47 +13424,47 @@ snapshots:
jiti: 2.6.1
synckit: 0.11.12
tailwind-csstree: 0.1.5
- tailwindcss: 3.4.19(tsx@4.21.0)(yaml@2.8.3)
+ tailwindcss: 4.2.2
tsconfig-paths-webpack-plugin: 4.2.0
valibot: 1.3.1(typescript@5.9.3)
optionalDependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
oxlint: 1.57.0(oxlint-tsgolint@0.17.3)
transitivePeerDependencies:
- '@eslint/css'
- typescript
- eslint-plugin-command@3.5.2(@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.2(typescript@5.9.3))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-command@3.5.2(@typescript-eslint/rule-tester@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.2(typescript@5.9.3))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@es-joy/jsdoccomment': 0.84.0
- '@typescript-eslint/rule-tester': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/rule-tester': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/typescript-estree': 8.57.2(typescript@5.9.3)
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
- eslint-plugin-depend@1.5.0(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-depend@1.5.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
empathic: 2.0.0
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
module-replacements: 2.11.0
semver: 7.7.4
- eslint-plugin-es-x@7.8.0(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-es-x@7.8.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
'@eslint-community/regexpp': 4.12.2
- eslint: 10.1.0(jiti@1.21.7)
- eslint-compat-utils: 0.5.1(eslint@10.1.0(jiti@1.21.7))
+ eslint: 10.1.0(jiti@2.6.1)
+ eslint-compat-utils: 0.5.1(eslint@10.1.0(jiti@2.6.1))
- eslint-plugin-hyoban@0.14.1(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-hyoban@0.14.1(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
- eslint-plugin-import-lite@0.5.2(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-import-lite@0.5.2(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
- eslint-plugin-jsdoc@62.8.1(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-jsdoc@62.8.1(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@es-joy/jsdoccomment': 0.84.0
'@es-joy/resolve.exports': 1.2.0
@@ -13742,7 +13472,7 @@ snapshots:
comment-parser: 1.4.5
debug: 4.4.3(supports-color@8.1.1)
escape-string-regexp: 4.0.0
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
espree: 11.2.0
esquery: 1.7.0
html-entities: 2.6.0
@@ -13754,27 +13484,27 @@ snapshots:
transitivePeerDependencies:
- supports-color
- eslint-plugin-jsonc@3.1.2(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-jsonc@3.1.2(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
'@eslint/core': 1.1.1
'@eslint/plugin-kit': 0.6.1
'@ota-meshi/ast-token-store': 0.3.0
diff-sequences: 29.6.3
- eslint: 10.1.0(jiti@1.21.7)
- eslint-json-compat-utils: 0.2.3(eslint@10.1.0(jiti@1.21.7))(jsonc-eslint-parser@3.1.0)
+ eslint: 10.1.0(jiti@2.6.1)
+ eslint-json-compat-utils: 0.2.3(eslint@10.1.0(jiti@2.6.1))(jsonc-eslint-parser@3.1.0)
jsonc-eslint-parser: 3.1.0
natural-compare: 1.4.0
synckit: 0.11.12
transitivePeerDependencies:
- '@eslint/json'
- eslint-plugin-markdown-preferences@0.40.3(@eslint/markdown@7.5.1)(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-markdown-preferences@0.40.3(@eslint/markdown@7.5.1)(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@eslint/markdown': 7.5.1
diff-sequences: 29.6.3
emoji-regex-xs: 2.0.1
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
mdast-util-from-markdown: 2.0.3
mdast-util-frontmatter: 2.0.1
mdast-util-gfm: 3.1.0
@@ -13789,12 +13519,12 @@ snapshots:
transitivePeerDependencies:
- supports-color
- eslint-plugin-n@17.24.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3):
+ eslint-plugin-n@17.24.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
enhanced-resolve: 5.20.1
- eslint: 10.1.0(jiti@1.21.7)
- eslint-plugin-es-x: 7.8.0(eslint@10.1.0(jiti@1.21.7))
+ eslint: 10.1.0(jiti@2.6.1)
+ eslint-plugin-es-x: 7.8.0(eslint@10.1.0(jiti@2.6.1))
get-tsconfig: 4.13.7
globals: 15.15.0
globrex: 0.1.2
@@ -13804,9 +13534,9 @@ snapshots:
transitivePeerDependencies:
- typescript
- eslint-plugin-no-barrel-files@1.2.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3):
+ eslint-plugin-no-barrel-files@1.2.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
transitivePeerDependencies:
- eslint
- supports-color
@@ -13814,19 +13544,19 @@ snapshots:
eslint-plugin-no-only-tests@3.3.0: {}
- eslint-plugin-perfectionist@5.7.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3):
+ eslint-plugin-perfectionist@5.7.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
natural-orderby: 5.0.0
transitivePeerDependencies:
- supports-color
- typescript
- eslint-plugin-pnpm@1.6.0(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-pnpm@1.6.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
empathic: 2.0.0
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
jsonc-eslint-parser: 3.1.0
pathe: 2.0.3
pnpm-workspace-yaml: 1.6.0
@@ -13834,98 +13564,98 @@ snapshots:
yaml: 2.8.3
yaml-eslint-parser: 2.0.0
- eslint-plugin-react-dom@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3):
+ eslint-plugin-react-dom@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/core': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/core': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/scope-manager': 8.57.2
'@typescript-eslint/types': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
compare-versions: 6.1.1
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
ts-pattern: 5.9.0
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@babel/core': 7.29.0
'@babel/parser': 7.29.2
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
hermes-parser: 0.25.1
zod: 4.3.6
zod-validation-error: 4.0.2(zod@4.3.6)
transitivePeerDependencies:
- supports-color
- eslint-plugin-react-naming-convention@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3):
+ eslint-plugin-react-naming-convention@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/core': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/core': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/scope-manager': 8.57.2
- '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/types': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
compare-versions: 6.1.1
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
string-ts: 2.3.1
ts-pattern: 5.9.0
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
- eslint-plugin-react-rsc@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3):
+ eslint-plugin-react-rsc@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/scope-manager': 8.57.2
- '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/types': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
ts-pattern: 5.9.0
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- eslint-plugin-react-web-api@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3):
+ eslint-plugin-react-web-api@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/core': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/core': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/scope-manager': 8.57.2
'@typescript-eslint/types': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
birecord: 0.1.1
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
ts-pattern: 5.9.0
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- eslint-plugin-react-x@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3):
+ eslint-plugin-react-x@3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/core': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@eslint-react/ast': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/core': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/shared': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ '@eslint-react/var': 3.0.0(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/scope-manager': 8.57.2
- '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@typescript-eslint/types': 8.57.2
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
compare-versions: 6.1.1
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
string-ts: 2.3.1
ts-api-utils: 2.5.0(typescript@5.9.3)
ts-pattern: 5.9.0
@@ -13933,23 +13663,23 @@ snapshots:
transitivePeerDependencies:
- supports-color
- eslint-plugin-regexp@3.1.0(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-regexp@3.1.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
'@eslint-community/regexpp': 4.12.2
comment-parser: 1.4.6
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
jsdoc-type-pratt-parser: 7.1.1
refa: 0.12.1
regexp-ast-analysis: 0.7.1
scslre: 0.3.0
- eslint-plugin-sonarjs@4.0.2(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-sonarjs@4.0.2(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@eslint-community/regexpp': 4.12.2
builtin-modules: 3.3.0
bytes: 3.1.2
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
functional-red-black-tree: 1.0.1
globals: 17.4.0
jsx-ast-utils-x: 0.1.0
@@ -13960,35 +13690,35 @@ snapshots:
ts-api-utils: 2.5.0(typescript@5.9.3)
typescript: 5.9.3
- eslint-plugin-storybook@10.3.3(eslint@10.1.0(jiti@1.21.7))(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3):
+ eslint-plugin-storybook@10.3.3(eslint@10.1.0(jiti@2.6.1))(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3):
dependencies:
- '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
- eslint: 10.1.0(jiti@1.21.7)
+ '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
+ eslint: 10.1.0(jiti@2.6.1)
storybook: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
transitivePeerDependencies:
- supports-color
- typescript
- eslint-plugin-toml@1.3.1(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-toml@1.3.1(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@eslint/core': 1.1.1
'@eslint/plugin-kit': 0.6.1
'@ota-meshi/ast-token-store': 0.3.0
debug: 4.4.3(supports-color@8.1.1)
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
toml-eslint-parser: 1.0.3
transitivePeerDependencies:
- supports-color
- eslint-plugin-unicorn@63.0.0(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-unicorn@63.0.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@babel/helper-validator-identifier': 7.28.5
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
change-case: 5.4.4
ci-info: 4.4.0
clean-regexp: 1.0.0
core-js-compat: 3.49.0
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
find-up-simple: 1.0.1
globals: 16.5.0
indent-string: 5.0.0
@@ -14000,27 +13730,27 @@ snapshots:
semver: 7.7.4
strip-indent: 4.1.1
- eslint-plugin-unused-imports@4.4.1(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-unused-imports@4.4.1(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1)):
dependencies:
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
optionalDependencies:
- '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
- eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@1.21.7)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@1.21.7))):
+ eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))):
dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
- eslint: 10.1.0(jiti@1.21.7)
+ '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
+ eslint: 10.1.0(jiti@2.6.1)
natural-compare: 1.4.0
nth-check: 2.1.1
postcss-selector-parser: 7.1.1
semver: 7.7.4
- vue-eslint-parser: 10.4.0(eslint@10.1.0(jiti@1.21.7))
+ vue-eslint-parser: 10.4.0(eslint@10.1.0(jiti@2.6.1))
xml-name-validator: 4.0.0
optionalDependencies:
- '@stylistic/eslint-plugin': 5.10.0(eslint@10.1.0(jiti@1.21.7))
- '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
+ '@stylistic/eslint-plugin': 5.10.0(eslint@10.1.0(jiti@2.6.1))
+ '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
- eslint-plugin-yml@3.3.1(eslint@10.1.0(jiti@1.21.7)):
+ eslint-plugin-yml@3.3.1(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@eslint/core': 1.1.1
'@eslint/plugin-kit': 0.6.1
@@ -14028,16 +13758,16 @@ snapshots:
debug: 4.4.3(supports-color@8.1.1)
diff-sequences: 29.6.3
escape-string-regexp: 5.0.0
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
natural-compare: 1.4.0
yaml-eslint-parser: 2.0.0
transitivePeerDependencies:
- supports-color
- eslint-processor-vue-blocks@2.0.0(@vue/compiler-sfc@3.5.31)(eslint@10.1.0(jiti@1.21.7)):
+ eslint-processor-vue-blocks@2.0.0(@vue/compiler-sfc@3.5.31)(eslint@10.1.0(jiti@2.6.1)):
dependencies:
'@vue/compiler-sfc': 3.5.31
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
eslint-scope@5.1.1:
dependencies:
@@ -14062,43 +13792,6 @@ snapshots:
eslint-visitor-keys@5.0.1: {}
- eslint@10.1.0(jiti@1.21.7):
- dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@1.21.7))
- '@eslint-community/regexpp': 4.12.2
- '@eslint/config-array': 0.23.3
- '@eslint/config-helpers': 0.5.3
- '@eslint/core': 1.1.1
- '@eslint/plugin-kit': 0.6.1
- '@humanfs/node': 0.16.7
- '@humanwhocodes/module-importer': 1.0.1
- '@humanwhocodes/retry': 0.4.3
- '@types/estree': 1.0.8
- ajv: 6.14.0
- cross-spawn: 7.0.6
- debug: 4.4.3(supports-color@8.1.1)
- escape-string-regexp: 4.0.0
- eslint-scope: 9.1.2
- eslint-visitor-keys: 5.0.1
- espree: 11.2.0
- esquery: 1.7.0
- esutils: 2.0.3
- fast-deep-equal: 3.1.3
- file-entry-cache: 8.0.0
- find-up: 5.0.0
- glob-parent: 6.0.2
- ignore: 5.3.2
- imurmurhash: 0.1.4
- is-glob: 4.0.3
- json-stable-stringify-without-jsonify: 1.0.1
- minimatch: 10.2.4
- natural-compare: 1.4.0
- optionator: 0.9.4
- optionalDependencies:
- jiti: 1.21.7
- transitivePeerDependencies:
- - supports-color
-
eslint@10.1.0(jiti@2.6.1):
dependencies:
'@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1))
@@ -14136,9 +13829,9 @@ snapshots:
transitivePeerDependencies:
- supports-color
- eslint@9.27.0(jiti@1.21.7):
+ eslint@9.27.0(jiti@2.6.1):
dependencies:
- '@eslint-community/eslint-utils': 4.9.1(eslint@9.27.0(jiti@1.21.7))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@9.27.0(jiti@2.6.1))
'@eslint-community/regexpp': 4.12.2
'@eslint/config-array': 0.20.1
'@eslint/config-helpers': 0.2.3
@@ -14174,7 +13867,7 @@ snapshots:
natural-compare: 1.4.0
optionator: 0.9.4
optionalDependencies:
- jiti: 1.21.7
+ jiti: 2.6.1
transitivePeerDependencies:
- supports-color
@@ -14243,8 +13936,6 @@ snapshots:
event-target-bus@1.0.0: {}
- eventemitter3@5.0.4: {}
-
events@3.3.0: {}
expand-template@2.0.3:
@@ -14346,16 +14037,6 @@ snapshots:
flatted@3.4.2: {}
- follow-redirects@1.15.11: {}
-
- form-data@4.0.5:
- dependencies:
- asynckit: 0.4.0
- combined-stream: 1.0.8
- es-set-tostringtag: 2.1.0
- hasown: '@nolyfill/hasown@1.0.44'
- mime-types: 2.1.35
-
format@0.2.2: {}
formatly@0.3.0:
@@ -14371,8 +14052,6 @@ snapshots:
react: 19.2.4
react-dom: 19.2.4(react@19.2.4)
- fraction.js@5.3.4: {}
-
fs-constants@1.0.0:
optional: true
@@ -14382,8 +14061,6 @@ snapshots:
fsevents@2.3.3:
optional: true
- function-bind@1.1.2: {}
-
functional-red-black-tree@1.0.1: {}
fzf@0.5.2: {}
@@ -14392,26 +14069,8 @@ snapshots:
get-east-asian-width@1.5.0: {}
- get-intrinsic@1.3.0:
- dependencies:
- call-bind-apply-helpers: 1.0.2
- es-define-property: 1.0.1
- es-errors: 1.3.0
- es-object-atoms: 1.1.1
- function-bind: 1.1.2
- get-proto: 1.0.1
- gopd: 1.2.0
- has-symbols: 1.1.0
- hasown: '@nolyfill/hasown@1.0.44'
- math-intrinsics: 1.1.0
-
get-nonce@1.0.1: {}
- get-proto@1.0.1:
- dependencies:
- dunder-proto: 1.0.1
- es-object-atoms: 1.1.1
-
get-stream@5.2.0:
dependencies:
pump: 3.0.4
@@ -14459,8 +14118,6 @@ snapshots:
dependencies:
csstype: 3.2.3
- gopd@1.2.0: {}
-
graceful-fs@4.2.11: {}
hachure-fill@0.5.2: {}
@@ -14483,12 +14140,6 @@ snapshots:
has-flag@4.0.0: {}
- has-symbols@1.1.0: {}
-
- has-tostringtag@1.0.2:
- dependencies:
- has-symbols: 1.1.0
-
hast-util-from-dom@5.0.1:
dependencies:
'@types/hast': 3.0.4
@@ -14671,8 +14322,6 @@ snapshots:
domutils: 3.2.2
entities: 7.0.1
- husky@9.1.7: {}
-
i18next-resources-to-backend@1.2.1:
dependencies:
'@babel/runtime': 7.29.2
@@ -14755,10 +14404,6 @@ snapshots:
is-alphabetical: 2.0.1
is-decimal: 2.0.1
- is-binary-path@2.1.0:
- dependencies:
- binary-extensions: 2.3.0
-
is-builtin-module@5.0.0:
dependencies:
builtin-modules: 5.0.0
@@ -14771,10 +14416,6 @@ snapshots:
is-extglob@2.1.1: {}
- is-fullwidth-code-point@5.1.0:
- dependencies:
- get-east-asian-width: 1.5.0
-
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
@@ -14833,8 +14474,6 @@ snapshots:
merge-stream: 2.0.0
supports-color: 8.1.1
- jiti@1.21.7: {}
-
jiti@2.6.1: {}
jotai@2.19.0(@babel/core@7.29.0)(@babel/template@7.28.6)(@types/react@19.2.14)(react@19.2.4):
@@ -15028,24 +14667,6 @@ snapshots:
lines-and-columns@1.2.4: {}
- lint-staged@16.4.0:
- dependencies:
- commander: 14.0.3
- listr2: 9.0.5
- picomatch: 4.0.4
- string-argv: 0.3.2
- tinyexec: 1.0.4
- yaml: 2.8.3
-
- listr2@9.0.5:
- dependencies:
- cli-truncate: 5.2.0
- colorette: 2.0.20
- eventemitter3: 5.0.4
- log-update: 6.1.0
- rfdc: 1.4.1
- wrap-ansi: 9.0.2
-
load-tsconfig@0.2.5: {}
loader-runner@4.3.1: {}
@@ -15070,14 +14691,6 @@ snapshots:
lodash@4.17.23: {}
- log-update@6.1.0:
- dependencies:
- ansi-escapes: 7.3.0
- cli-cursor: 5.0.0
- slice-ansi: 7.1.2
- strip-ansi: 7.2.0
- wrap-ansi: 9.0.2
-
longest-streak@3.1.0: {}
loose-envify@1.4.0:
@@ -15129,8 +14742,6 @@ snapshots:
marked@17.0.5: {}
- math-intrinsics@1.1.0: {}
-
mdast-util-directive@3.1.0:
dependencies:
'@types/mdast': 4.0.4
@@ -15805,8 +15416,6 @@ snapshots:
semver: 7.7.4
validate-npm-package-license: 3.0.4
- normalize-path@3.0.0: {}
-
normalize-wheel@1.0.1: {}
nth-check@2.1.1:
@@ -15824,8 +15433,6 @@ snapshots:
object-deep-merge@2.0.0: {}
- object-hash@3.0.0: {}
-
obug@2.1.1: {}
ofetch@1.5.1:
@@ -16105,8 +15712,6 @@ snapshots:
picomatch@4.0.4: {}
- pify@2.3.0: {}
-
pinyin-pro@3.28.0: {}
pirates@4.0.7: {}
@@ -16157,31 +15762,6 @@ snapshots:
transitivePeerDependencies:
- supports-color
- postcss-import@15.1.0(postcss@8.5.8):
- dependencies:
- postcss: 8.5.8
- postcss-value-parser: 4.2.0
- read-cache: 1.0.0
- resolve: 1.22.11
-
- postcss-js@4.1.0(postcss@8.5.8):
- dependencies:
- camelcase-css: 2.0.1
- postcss: 8.5.8
-
- postcss-js@5.1.0(postcss@8.5.8):
- dependencies:
- postcss: 8.5.8
-
- postcss-load-config@6.0.1(jiti@1.21.7)(postcss@8.5.8)(tsx@4.21.0)(yaml@2.8.3):
- dependencies:
- lilconfig: 3.1.3
- optionalDependencies:
- jiti: 1.21.7
- postcss: 8.5.8
- tsx: 4.21.0
- yaml: 2.8.3
-
postcss-load-config@6.0.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0)(yaml@2.8.3):
dependencies:
lilconfig: 3.1.3
@@ -16191,21 +15771,11 @@ snapshots:
tsx: 4.21.0
yaml: 2.8.3
- postcss-nested@6.2.0(postcss@8.5.8):
- dependencies:
- postcss: 8.5.8
- postcss-selector-parser: 6.1.2
-
postcss-selector-parser@6.0.10:
dependencies:
cssesc: 3.0.0
util-deprecate: 1.0.2
- postcss-selector-parser@6.1.2:
- dependencies:
- cssesc: 3.0.0
- util-deprecate: 1.0.2
-
postcss-selector-parser@7.1.1:
dependencies:
cssesc: 3.0.0
@@ -16269,8 +15839,6 @@ snapshots:
property-information@7.1.0: {}
- proxy-from-env@2.1.0: {}
-
pump@3.0.4:
dependencies:
end-of-stream: 1.4.5
@@ -16489,10 +16057,6 @@ snapshots:
- '@types/react'
- immer
- read-cache@1.0.0:
- dependencies:
- pify: 2.3.0
-
read-package-up@12.0.0:
dependencies:
find-up-simple: 1.0.1
@@ -16514,10 +16078,6 @@ snapshots:
util-deprecate: 1.0.2
optional: true
- readdirp@3.6.0:
- dependencies:
- picomatch: 2.3.2
-
readdirp@4.1.2: {}
recast@0.23.11:
@@ -16718,8 +16278,6 @@ snapshots:
reusify@1.1.0: {}
- rfdc@1.4.1: {}
-
robust-predicates@3.0.3: {}
rolldown@1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1):
@@ -16913,16 +16471,6 @@ snapshots:
size-sensor@1.0.3: {}
- slice-ansi@7.1.2:
- dependencies:
- ansi-styles: 6.2.3
- is-fullwidth-code-point: 5.1.0
-
- slice-ansi@8.0.0:
- dependencies:
- ansi-styles: 6.2.3
- is-fullwidth-code-point: 5.1.0
-
smol-toml@1.6.1: {}
solid-js@1.9.11:
@@ -17023,8 +16571,6 @@ snapshots:
string-argv@0.3.1: {}
- string-argv@0.3.2: {}
-
string-ts@2.3.1: {}
string-width@8.2.0:
@@ -17126,37 +16672,9 @@ snapshots:
tailwind-csstree@0.1.5: {}
- tailwind-merge@2.6.1: {}
-
tailwind-merge@3.5.0: {}
- tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.3):
- dependencies:
- '@alloc/quick-lru': 5.2.0
- arg: 5.0.2
- chokidar: 3.6.0
- didyoumean: 1.2.2
- dlv: 1.1.3
- fast-glob: 3.3.3
- glob-parent: 6.0.2
- is-glob: 4.0.3
- jiti: 1.21.7
- lilconfig: 3.1.3
- micromatch: 4.0.8
- normalize-path: 3.0.0
- object-hash: 3.0.0
- picocolors: 1.1.1
- postcss: 8.5.8
- postcss-import: 15.1.0(postcss@8.5.8)
- postcss-js: 4.1.0(postcss@8.5.8)
- postcss-load-config: 6.0.1(jiti@1.21.7)(postcss@8.5.8)(tsx@4.21.0)(yaml@2.8.3)
- postcss-nested: 6.2.0(postcss@8.5.8)
- postcss-selector-parser: 6.1.2
- resolve: 1.22.11
- sucrase: 3.35.1
- transitivePeerDependencies:
- - tsx
- - yaml
+ tailwindcss@4.2.2: {}
tapable@2.3.2: {}
@@ -17565,21 +17083,21 @@ snapshots:
'@types/unist': 3.0.3
vfile-message: 4.0.3
- vinext@0.0.38(f5786d681f520e26604259e094ebaa46):
+ vinext@0.0.38(21fde6c2677b0aab516df83ef1beed5d):
dependencies:
'@unpic/react': 1.0.2(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
'@vercel/og': 0.8.6
- '@vitejs/plugin-react': 6.0.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
+ '@vitejs/plugin-react': 6.0.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
magic-string: 0.30.21
react: 19.2.4
react-dom: 19.2.4(react@19.2.4)
rsc-html-stream: 0.0.7
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
vite-plugin-commonjs: 0.10.4
- vite-tsconfig-paths: 6.1.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)
+ vite-tsconfig-paths: 6.1.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)
optionalDependencies:
'@mdx-js/rollup': 3.1.1(rollup@4.59.0)
- '@vitejs/plugin-rsc': 0.5.21(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)
+ '@vitejs/plugin-rsc': 0.5.21(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)
react-server-dom-webpack: 19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
transitivePeerDependencies:
- next
@@ -17599,9 +17117,9 @@ snapshots:
fast-glob: 3.3.3
magic-string: 0.30.21
- vite-plugin-inspect@12.0.0-beta.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)(ws@8.20.0):
+ vite-plugin-inspect@12.0.0-beta.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)(ws@8.20.0):
dependencies:
- '@vitejs/devtools-kit': 0.1.11(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)(ws@8.20.0)
+ '@vitejs/devtools-kit': 0.1.11(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)(ws@8.20.0)
ansis: 4.2.0
error-stack-parser-es: 1.0.5
obug: 2.1.1
@@ -17610,12 +17128,12 @@ snapshots:
perfect-debounce: 2.1.0
sirv: 3.0.2
unplugin-utils: 0.3.1
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
transitivePeerDependencies:
- typescript
- ws
- vite-plugin-storybook-nextjs@3.2.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3):
+ vite-plugin-storybook-nextjs@3.2.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(next@16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0))(storybook@10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3):
dependencies:
'@next/env': 16.0.0
image-size: 2.0.2
@@ -17624,17 +17142,17 @@ snapshots:
next: 16.2.1(@babel/core@7.29.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.98.0)
storybook: 10.3.3(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
ts-dedent: 2.2.0
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
- vite-tsconfig-paths: 5.1.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite-tsconfig-paths: 5.1.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3)
transitivePeerDependencies:
- supports-color
- typescript
- vite-plus@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3):
+ vite-plus@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3):
dependencies:
'@oxc-project/types': 0.122.0
- '@voidzero-dev/vite-plus-core': 0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
- '@voidzero-dev/vite-plus-test': 0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
+ '@voidzero-dev/vite-plus-core': 0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
+ '@voidzero-dev/vite-plus-test': 0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
cac: 7.0.0
cross-spawn: 7.0.6
oxfmt: 0.42.0
@@ -17726,23 +17244,23 @@ snapshots:
- vite
- yaml
- vite-tsconfig-paths@5.1.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3):
+ vite-tsconfig-paths@5.1.4(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3):
dependencies:
debug: 4.4.3(supports-color@8.1.1)
globrex: 0.1.2
tsconfck: 3.1.6(typescript@5.9.3)
optionalDependencies:
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
transitivePeerDependencies:
- supports-color
- typescript
- vite-tsconfig-paths@6.1.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3):
+ vite-tsconfig-paths@6.1.1(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(typescript@5.9.3):
dependencies:
debug: 4.4.3(supports-color@8.1.1)
globrex: 0.1.2
tsconfck: 3.1.6(typescript@5.9.3)
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
transitivePeerDependencies:
- supports-color
- typescript
@@ -17767,15 +17285,15 @@ snapshots:
- '@emnapi/core'
- '@emnapi/runtime'
- vitefu@1.1.2(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)):
+ vitefu@1.1.2(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)):
optionalDependencies:
- vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vite: '@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
- vitest-canvas-mock@1.1.4(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)):
+ vitest-canvas-mock@1.1.4(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)):
dependencies:
cssfontparser: 1.2.1
moo-color: 1.0.3
- vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
+ vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
void-elements@3.1.0: {}
@@ -17796,10 +17314,10 @@ snapshots:
vscode-uri@3.1.0: {}
- vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@1.21.7)):
+ vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1)):
dependencies:
debug: 4.4.3(supports-color@8.1.1)
- eslint: 10.1.0(jiti@1.21.7)
+ eslint: 10.1.0(jiti@2.6.1)
eslint-scope: 9.1.2
eslint-visitor-keys: 5.0.1
espree: 11.2.0
@@ -17869,12 +17387,6 @@ snapshots:
word-wrap@1.2.5: {}
- wrap-ansi@9.0.2:
- dependencies:
- ansi-styles: 6.2.3
- string-width: 8.2.0
- strip-ansi: 7.2.0
-
wrappy@1.0.2: {}
ws@8.20.0: {}
diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml
index dece6f3f4f..abcbff7a68 100644
--- a/pnpm-workspace.yaml
+++ b/pnpm-workspace.yaml
@@ -1,7 +1,17 @@
+trustPolicy: no-downgrade
+minimumReleaseAge: 2880
+blockExoticSubdeps: true
+strictDepBuilds: true
+allowBuilds:
+ "@parcel/watcher": false
+ canvas: false
+ esbuild: false
+ sharp: false
packages:
- web
- e2e
- sdks/nodejs-client
+ - packages/*
overrides:
"@lexical/code": npm:lexical-code-no-prism@0.41.0
"@monaco-editor/loader": 1.7.0
@@ -59,13 +69,6 @@ overrides:
which-typed-array: npm:@nolyfill/which-typed-array@^1.0.44
yaml@>=2.0.0 <2.8.3: 2.8.3
yauzl@<3.2.1: 3.2.1
-ignoredBuiltDependencies:
- - canvas
- - core-js-pure
-onlyBuiltDependencies:
- - "@parcel/watcher"
- - esbuild
- - sharp
catalog:
"@amplitude/analytics-browser": 2.38.0
"@amplitude/plugin-session-replay-browser": 1.27.5
@@ -76,7 +79,7 @@ catalog:
"@egoist/tailwindcss-icons": 1.9.2
"@emoji-mart/data": 1.2.1
"@eslint-react/eslint-plugin": 3.0.0
- "@eslint/js": ^10.0.1
+ "@eslint/js": 10.0.1
"@floating-ui/react": 0.27.19
"@formatjs/intl-localematcher": 0.8.2
"@headlessui/react": 2.2.9
@@ -114,7 +117,9 @@ catalog:
"@streamdown/math": 1.0.2
"@svgdotjs/svg.js": 3.2.5
"@t3-oss/env-nextjs": 0.13.11
+ "@tailwindcss/postcss": 4.2.2
"@tailwindcss/typography": 0.5.19
+ "@tailwindcss/vite": 4.2.2
"@tanstack/eslint-plugin-query": 5.95.2
"@tanstack/react-devtools": 0.10.0
"@tanstack/react-form": 1.28.5
@@ -139,17 +144,16 @@ catalog:
"@types/react-syntax-highlighter": 15.5.13
"@types/react-window": 1.8.8
"@types/sortablejs": 1.15.9
- "@typescript-eslint/eslint-plugin": ^8.57.2
+ "@typescript-eslint/eslint-plugin": 8.57.2
"@typescript-eslint/parser": 8.57.2
"@typescript/native-preview": 7.0.0-dev.20260329.1
"@vitejs/plugin-react": 6.0.1
"@vitejs/plugin-rsc": 0.5.21
- "@vitest/coverage-v8": 4.1.2
+ "@vitest/coverage-v8": 4.1.1
abcjs: 6.6.2
agentation: 3.0.2
ahooks: 3.9.7
autoprefixer: 10.4.27
- axios: ^1.14.0
class-variance-authority: 0.7.1
clsx: 2.1.1
cmdk: 1.1.1
@@ -182,7 +186,6 @@ catalog:
hono: 4.12.9
html-entities: 2.6.0
html-to-image: 1.11.13
- husky: 9.1.7
i18next: 25.10.10
i18next-resources-to-backend: 1.2.1
iconify-import-svg: 0.1.2
@@ -197,7 +200,6 @@ catalog:
ky: 1.14.3
lamejs: 1.2.1
lexical: 0.42.0
- lint-staged: 16.4.0
mermaid: 11.13.0
mime: 4.1.0
mitt: 3.0.1
@@ -235,8 +237,8 @@ catalog:
storybook: 10.3.3
streamdown: 2.5.0
string-ts: 2.3.1
- tailwind-merge: 2.6.1
- tailwindcss: 3.4.19
+ tailwind-merge: 3.5.0
+ tailwindcss: 4.2.2
taze: 19.10.0
tldts: 7.0.27
tsup: ^8.5.1
diff --git a/sdks/nodejs-client/eslint.config.js b/sdks/nodejs-client/eslint.config.js
index 9e659f5d28..21ac872f2a 100644
--- a/sdks/nodejs-client/eslint.config.js
+++ b/sdks/nodejs-client/eslint.config.js
@@ -12,11 +12,11 @@ const typeCheckedRules =
export default [
{
- ignores: ["dist", "node_modules", "scripts", "tests", "**/*.test.*", "**/*.spec.*"],
+ ignores: ["dist", "node_modules", "scripts"],
},
js.configs.recommended,
{
- files: ["src/**/*.ts"],
+ files: ["src/**/*.ts", "tests/**/*.ts"],
languageOptions: {
parser: tsParser,
ecmaVersion: "latest",
diff --git a/sdks/nodejs-client/package.json b/sdks/nodejs-client/package.json
index 63fa6799b1..d487c3abb3 100644
--- a/sdks/nodejs-client/package.json
+++ b/sdks/nodejs-client/package.json
@@ -1,6 +1,6 @@
{
"name": "dify-client",
- "version": "3.0.0",
+ "version": "3.1.0",
"description": "This is the Node.js SDK for the Dify.AI API, which allows you to easily integrate Dify.AI into your Node.js applications.",
"type": "module",
"main": "./dist/index.js",
@@ -15,7 +15,8 @@
"node": ">=18.0.0"
},
"files": [
- "dist",
+ "dist/index.js",
+ "dist/index.d.ts",
"README.md",
"LICENSE"
],
@@ -53,9 +54,6 @@
"publish:check": "./scripts/publish.sh --dry-run",
"publish:npm": "./scripts/publish.sh"
},
- "dependencies": {
- "axios": "catalog:"
- },
"devDependencies": {
"@eslint/js": "catalog:",
"@types/node": "catalog:",
diff --git a/sdks/nodejs-client/src/client/base.test.js b/sdks/nodejs-client/src/client/base.test.ts
similarity index 96%
rename from sdks/nodejs-client/src/client/base.test.js
rename to sdks/nodejs-client/src/client/base.test.ts
index 5e1b21d0f1..868c476432 100644
--- a/sdks/nodejs-client/src/client/base.test.js
+++ b/sdks/nodejs-client/src/client/base.test.ts
@@ -1,6 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
-import { DifyClient } from "./base";
import { ValidationError } from "../errors/dify-error";
+import { DifyClient } from "./base";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("DifyClient base", () => {
@@ -103,7 +103,7 @@ describe("DifyClient base", () => {
});
});
- it("filePreview uses arraybuffer response", async () => {
+ it("filePreview uses bytes response", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
@@ -113,7 +113,7 @@ describe("DifyClient base", () => {
method: "GET",
path: "/files/file/preview",
query: { user: "user", as_attachment: "true" },
- responseType: "arraybuffer",
+ responseType: "bytes",
});
});
@@ -162,11 +162,11 @@ describe("DifyClient base", () => {
streaming: false,
voice: "voice",
},
- responseType: "arraybuffer",
+ responseType: "bytes",
});
});
- it("textToAudio requires text or message id", async () => {
+ it("textToAudio requires text or message id", () => {
const { client } = createHttpClientWithSpies();
const dify = new DifyClient(client);
diff --git a/sdks/nodejs-client/src/client/base.ts b/sdks/nodejs-client/src/client/base.ts
index 0fa535a488..f02b88be3a 100644
--- a/sdks/nodejs-client/src/client/base.ts
+++ b/sdks/nodejs-client/src/client/base.ts
@@ -2,14 +2,18 @@ import type {
BinaryStream,
DifyClientConfig,
DifyResponse,
+ JsonObject,
MessageFeedbackRequest,
QueryParams,
RequestMethod,
+ SuccessResponse,
TextToAudioRequest,
} from "../types/common";
+import type { HttpRequestBody } from "../http/client";
import { HttpClient } from "../http/client";
import { ensureNonEmptyString, ensureRating } from "./validation";
import { FileUploadError, ValidationError } from "../errors/dify-error";
+import type { SdkFormData } from "../http/form-data";
import { isFormData } from "../http/form-data";
const toConfig = (
@@ -25,13 +29,8 @@ const toConfig = (
return init;
};
-const appendUserToFormData = (form: unknown, user: string): void => {
- if (!isFormData(form)) {
- throw new FileUploadError("FormData is required for file uploads");
- }
- if (typeof form.append === "function") {
- form.append("user", user);
- }
+const appendUserToFormData = (form: SdkFormData, user: string): void => {
+ form.append("user", user);
};
export class DifyClient {
@@ -57,7 +56,7 @@ export class DifyClient {
sendRequest(
method: RequestMethod,
endpoint: string,
- data: unknown = null,
+ data: HttpRequestBody = null,
params: QueryParams | null = null,
stream = false,
headerParams: Record = {}
@@ -72,14 +71,14 @@ export class DifyClient {
});
}
- getRoot(): Promise> {
+ getRoot(): Promise> {
return this.http.request({
method: "GET",
path: "/",
});
}
- getApplicationParameters(user?: string): Promise> {
+ getApplicationParameters(user?: string): Promise> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -90,11 +89,11 @@ export class DifyClient {
});
}
- async getParameters(user?: string): Promise> {
+ async getParameters(user?: string): Promise> {
return this.getApplicationParameters(user);
}
- getMeta(user?: string): Promise> {
+ getMeta(user?: string): Promise> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -107,21 +106,21 @@ export class DifyClient {
messageFeedback(
request: MessageFeedbackRequest
- ): Promise>>;
+ ): Promise>;
messageFeedback(
messageId: string,
rating: "like" | "dislike" | null,
user: string,
content?: string
- ): Promise>>;
+ ): Promise>;
messageFeedback(
messageIdOrRequest: string | MessageFeedbackRequest,
rating?: "like" | "dislike" | null,
user?: string,
content?: string
- ): Promise>> {
+ ): Promise> {
let messageId: string;
- const payload: Record = {};
+ const payload: JsonObject = {};
if (typeof messageIdOrRequest === "string") {
messageId = messageIdOrRequest;
@@ -157,7 +156,7 @@ export class DifyClient {
});
}
- getInfo(user?: string): Promise> {
+ getInfo(user?: string): Promise> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -168,7 +167,7 @@ export class DifyClient {
});
}
- getSite(user?: string): Promise> {
+ getSite(user?: string): Promise> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -179,7 +178,7 @@ export class DifyClient {
});
}
- fileUpload(form: unknown, user: string): Promise> {
+ fileUpload(form: unknown, user: string): Promise> {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for file uploads");
}
@@ -199,18 +198,18 @@ export class DifyClient {
): Promise> {
ensureNonEmptyString(fileId, "fileId");
ensureNonEmptyString(user, "user");
- return this.http.request({
+ return this.http.request({
method: "GET",
path: `/files/${fileId}/preview`,
query: {
user,
as_attachment: asAttachment ? "true" : undefined,
},
- responseType: "arraybuffer",
+ responseType: "bytes",
});
}
- audioToText(form: unknown, user: string): Promise> {
+ audioToText(form: unknown, user: string): Promise> {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for audio uploads");
}
@@ -274,11 +273,11 @@ export class DifyClient {
});
}
- return this.http.request({
+ return this.http.request({
method: "POST",
path: "/text-to-audio",
data: payload,
- responseType: "arraybuffer",
+ responseType: "bytes",
});
}
}
diff --git a/sdks/nodejs-client/src/client/chat.test.js b/sdks/nodejs-client/src/client/chat.test.ts
similarity index 97%
rename from sdks/nodejs-client/src/client/chat.test.js
rename to sdks/nodejs-client/src/client/chat.test.ts
index a97c9d4a5c..712ad64fd1 100644
--- a/sdks/nodejs-client/src/client/chat.test.js
+++ b/sdks/nodejs-client/src/client/chat.test.ts
@@ -1,6 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
-import { ChatClient } from "./chat";
import { ValidationError } from "../errors/dify-error";
+import { ChatClient } from "./chat";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("ChatClient", () => {
@@ -156,13 +156,13 @@ describe("ChatClient", () => {
});
});
- it("requires name when autoGenerate is false", async () => {
+ it("requires name when autoGenerate is false", () => {
const { client } = createHttpClientWithSpies();
const chat = new ChatClient(client);
- expect(() =>
- chat.renameConversation("conv", "", "user", false)
- ).toThrow(ValidationError);
+ expect(() => chat.renameConversation("conv", "", "user", false)).toThrow(
+ ValidationError
+ );
});
it("deletes conversations", async () => {
diff --git a/sdks/nodejs-client/src/client/chat.ts b/sdks/nodejs-client/src/client/chat.ts
index 745c999552..9c232e5117 100644
--- a/sdks/nodejs-client/src/client/chat.ts
+++ b/sdks/nodejs-client/src/client/chat.ts
@@ -1,5 +1,9 @@
import { DifyClient } from "./base";
-import type { ChatMessageRequest, ChatMessageResponse } from "../types/chat";
+import type {
+ ChatMessageRequest,
+ ChatMessageResponse,
+ ConversationSortBy,
+} from "../types/chat";
import type {
AnnotationCreateRequest,
AnnotationListOptions,
@@ -9,7 +13,11 @@ import type {
import type {
DifyResponse,
DifyStream,
+ JsonObject,
+ JsonValue,
QueryParams,
+ SuccessResponse,
+ SuggestedQuestionsResponse,
} from "../types/common";
import {
ensureNonEmptyString,
@@ -22,20 +30,20 @@ export class ChatClient extends DifyClient {
request: ChatMessageRequest
): Promise | DifyStream>;
createChatMessage(
- inputs: Record,
+ inputs: JsonObject,
query: string,
user: string,
stream?: boolean,
conversationId?: string | null,
- files?: Array> | null
+ files?: ChatMessageRequest["files"]
): Promise | DifyStream>;
createChatMessage(
- inputOrRequest: ChatMessageRequest | Record,
+ inputOrRequest: ChatMessageRequest | JsonObject,
query?: string,
user?: string,
stream = false,
conversationId?: string | null,
- files?: Array> | null
+ files?: ChatMessageRequest["files"]
): Promise | DifyStream> {
let payload: ChatMessageRequest;
let shouldStream = stream;
@@ -46,8 +54,8 @@ export class ChatClient extends DifyClient {
} else {
ensureNonEmptyString(query, "query");
ensureNonEmptyString(user, "user");
- payload = {
- inputs: inputOrRequest as Record,
+ payload = {
+ inputs: inputOrRequest,
query,
user,
response_mode: stream ? "streaming" : "blocking",
@@ -79,10 +87,10 @@ export class ChatClient extends DifyClient {
stopChatMessage(
taskId: string,
user: string
- ): Promise> {
+ ): Promise> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
- return this.http.request({
+ return this.http.request({
method: "POST",
path: `/chat-messages/${taskId}/stop`,
data: { user },
@@ -92,17 +100,17 @@ export class ChatClient extends DifyClient {
stopMessage(
taskId: string,
user: string
- ): Promise> {
+ ): Promise> {
return this.stopChatMessage(taskId, user);
}
getSuggested(
messageId: string,
user: string
- ): Promise> {
+ ): Promise> {
ensureNonEmptyString(messageId, "messageId");
ensureNonEmptyString(user, "user");
- return this.http.request({
+ return this.http.request({
method: "GET",
path: `/messages/${messageId}/suggested`,
query: { user },
@@ -114,7 +122,7 @@ export class ChatClient extends DifyClient {
getAppFeedbacks(
page?: number,
limit?: number
- ): Promise>> {
+ ): Promise> {
ensureOptionalInt(page, "page");
ensureOptionalInt(limit, "limit");
return this.http.request({
@@ -131,8 +139,8 @@ export class ChatClient extends DifyClient {
user: string,
lastId?: string | null,
limit?: number | null,
- sortByOrPinned?: string | boolean | null
- ): Promise>> {
+ sortBy?: ConversationSortBy | null
+ ): Promise> {
ensureNonEmptyString(user, "user");
ensureOptionalString(lastId, "lastId");
ensureOptionalInt(limit, "limit");
@@ -144,10 +152,8 @@ export class ChatClient extends DifyClient {
if (limit) {
params.limit = limit;
}
- if (typeof sortByOrPinned === "string") {
- params.sort_by = sortByOrPinned;
- } else if (typeof sortByOrPinned === "boolean") {
- params.pinned = sortByOrPinned;
+ if (sortBy) {
+ params.sort_by = sortBy;
}
return this.http.request({
@@ -162,7 +168,7 @@ export class ChatClient extends DifyClient {
conversationId: string,
firstId?: string | null,
limit?: number | null
- ): Promise>> {
+ ): Promise> {
ensureNonEmptyString(user, "user");
ensureNonEmptyString(conversationId, "conversationId");
ensureOptionalString(firstId, "firstId");
@@ -189,18 +195,18 @@ export class ChatClient extends DifyClient {
name: string,
user: string,
autoGenerate?: boolean
- ): Promise>>;
+ ): Promise>;
renameConversation(
conversationId: string,
user: string,
options?: { name?: string | null; autoGenerate?: boolean }
- ): Promise>>;
+ ): Promise>;
renameConversation(
conversationId: string,
nameOrUser: string,
userOrOptions?: string | { name?: string | null; autoGenerate?: boolean },
autoGenerate?: boolean
- ): Promise>> {
+ ): Promise> {
ensureNonEmptyString(conversationId, "conversationId");
let name: string | null | undefined;
@@ -222,7 +228,7 @@ export class ChatClient extends DifyClient {
ensureNonEmptyString(name, "name");
}
- const payload: Record = {
+ const payload: JsonObject = {
user,
auto_generate: resolvedAutoGenerate,
};
@@ -240,7 +246,7 @@ export class ChatClient extends DifyClient {
deleteConversation(
conversationId: string,
user: string
- ): Promise>> {
+ ): Promise> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(user, "user");
return this.http.request({
@@ -256,7 +262,7 @@ export class ChatClient extends DifyClient {
lastId?: string | null,
limit?: number | null,
variableName?: string | null
- ): Promise>> {
+ ): Promise> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(user, "user");
ensureOptionalString(lastId, "lastId");
@@ -279,8 +285,8 @@ export class ChatClient extends DifyClient {
conversationId: string,
variableId: string,
user: string,
- value: unknown
- ): Promise>> {
+ value: JsonValue
+ ): Promise> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(variableId, "variableId");
ensureNonEmptyString(user, "user");
diff --git a/sdks/nodejs-client/src/client/completion.test.js b/sdks/nodejs-client/src/client/completion.test.ts
similarity index 100%
rename from sdks/nodejs-client/src/client/completion.test.js
rename to sdks/nodejs-client/src/client/completion.test.ts
diff --git a/sdks/nodejs-client/src/client/completion.ts b/sdks/nodejs-client/src/client/completion.ts
index 9e39898e8b..f4e7121776 100644
--- a/sdks/nodejs-client/src/client/completion.ts
+++ b/sdks/nodejs-client/src/client/completion.ts
@@ -1,6 +1,11 @@
import { DifyClient } from "./base";
import type { CompletionRequest, CompletionResponse } from "../types/completion";
-import type { DifyResponse, DifyStream } from "../types/common";
+import type {
+ DifyResponse,
+ DifyStream,
+ JsonObject,
+ SuccessResponse,
+} from "../types/common";
import { ensureNonEmptyString } from "./validation";
const warned = new Set();
@@ -17,16 +22,16 @@ export class CompletionClient extends DifyClient {
request: CompletionRequest
): Promise | DifyStream>;
createCompletionMessage(
- inputs: Record,
+ inputs: JsonObject,
user: string,
stream?: boolean,
- files?: Array> | null
+ files?: CompletionRequest["files"]
): Promise | DifyStream>;
createCompletionMessage(
- inputOrRequest: CompletionRequest | Record,
+ inputOrRequest: CompletionRequest | JsonObject,
user?: string,
stream = false,
- files?: Array> | null
+ files?: CompletionRequest["files"]
): Promise | DifyStream> {
let payload: CompletionRequest;
let shouldStream = stream;
@@ -37,7 +42,7 @@ export class CompletionClient extends DifyClient {
} else {
ensureNonEmptyString(user, "user");
payload = {
- inputs: inputOrRequest as Record,
+ inputs: inputOrRequest,
user,
files,
response_mode: stream ? "streaming" : "blocking",
@@ -64,10 +69,10 @@ export class CompletionClient extends DifyClient {
stopCompletionMessage(
taskId: string,
user: string
- ): Promise> {
+ ): Promise> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
- return this.http.request({
+ return this.http.request({
method: "POST",
path: `/completion-messages/${taskId}/stop`,
data: { user },
@@ -77,15 +82,15 @@ export class CompletionClient extends DifyClient {
stop(
taskId: string,
user: string
- ): Promise> {
+ ): Promise> {
return this.stopCompletionMessage(taskId, user);
}
runWorkflow(
- inputs: Record,
+ inputs: JsonObject,
user: string,
stream = false
- ): Promise> | DifyStream>> {
+ ): Promise | DifyStream> {
warnOnce(
"CompletionClient.runWorkflow is deprecated. Use WorkflowClient.run instead."
);
@@ -96,13 +101,13 @@ export class CompletionClient extends DifyClient {
response_mode: stream ? "streaming" : "blocking",
};
if (stream) {
- return this.http.requestStream>({
+ return this.http.requestStream({
method: "POST",
path: "/workflows/run",
data: payload,
});
}
- return this.http.request>({
+ return this.http.request({
method: "POST",
path: "/workflows/run",
data: payload,
diff --git a/sdks/nodejs-client/src/client/knowledge-base.test.js b/sdks/nodejs-client/src/client/knowledge-base.test.ts
similarity index 92%
rename from sdks/nodejs-client/src/client/knowledge-base.test.js
rename to sdks/nodejs-client/src/client/knowledge-base.test.ts
index 4381b39e56..113a9db24b 100644
--- a/sdks/nodejs-client/src/client/knowledge-base.test.js
+++ b/sdks/nodejs-client/src/client/knowledge-base.test.ts
@@ -1,4 +1,5 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
+import { FileUploadError, ValidationError } from "../errors/dify-error";
import { KnowledgeBaseClient } from "./knowledge-base";
import { createHttpClientWithSpies } from "../../tests/test-utils";
@@ -174,7 +175,6 @@ describe("KnowledgeBaseClient", () => {
it("handles pipeline operations", async () => {
const { client, request, requestStream } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
- const warn = vi.spyOn(console, "warn").mockImplementation(() => {});
const form = { append: vi.fn(), getHeaders: () => ({}) };
await kb.listDatasourcePlugins("ds", { isPublished: true });
@@ -201,7 +201,6 @@ describe("KnowledgeBaseClient", () => {
});
await kb.uploadPipelineFile(form);
- expect(warn).toHaveBeenCalled();
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/datasets/ds/pipeline/datasource-plugins",
@@ -246,4 +245,22 @@ describe("KnowledgeBaseClient", () => {
data: form,
});
});
+
+ it("validates form-data and optional array filters", async () => {
+ const { client } = createHttpClientWithSpies();
+ const kb = new KnowledgeBaseClient(client);
+
+ await expect(kb.createDocumentByFile("ds", {})).rejects.toBeInstanceOf(
+ FileUploadError
+ );
+ await expect(
+ kb.listSegments("ds", "doc", { status: ["ok", 1] as unknown as string[] })
+ ).rejects.toBeInstanceOf(ValidationError);
+ await expect(
+ kb.hitTesting("ds", {
+ query: "q",
+ attachment_ids: ["att-1", 2] as unknown as string[],
+ })
+ ).rejects.toBeInstanceOf(ValidationError);
+ });
});
diff --git a/sdks/nodejs-client/src/client/knowledge-base.ts b/sdks/nodejs-client/src/client/knowledge-base.ts
index 7a0e39898b..9871c098e9 100644
--- a/sdks/nodejs-client/src/client/knowledge-base.ts
+++ b/sdks/nodejs-client/src/client/knowledge-base.ts
@@ -38,22 +38,17 @@ import {
ensureStringArray,
} from "./validation";
import { FileUploadError, ValidationError } from "../errors/dify-error";
+import type { SdkFormData } from "../http/form-data";
import { isFormData } from "../http/form-data";
-const warned = new Set();
-const warnOnce = (message: string): void => {
- if (warned.has(message)) {
- return;
- }
- warned.add(message);
- console.warn(message);
-};
-
-const ensureFormData = (form: unknown, context: string): void => {
+function ensureFormData(
+ form: unknown,
+ context: string
+): asserts form is SdkFormData {
if (!isFormData(form)) {
throw new FileUploadError(`${context} requires FormData`);
}
-};
+}
const ensureNonEmptyArray = (value: unknown, name: string): void => {
if (!Array.isArray(value) || value.length === 0) {
@@ -61,12 +56,6 @@ const ensureNonEmptyArray = (value: unknown, name: string): void => {
}
};
-const warnPipelineRoutes = (): void => {
- warnOnce(
- "RAG pipeline endpoints may be unavailable unless the service API registers dataset/rag_pipeline routes."
- );
-};
-
export class KnowledgeBaseClient extends DifyClient {
async listDatasets(
options?: DatasetListOptions
@@ -641,7 +630,6 @@ export class KnowledgeBaseClient extends DifyClient {
datasetId: string,
options?: DatasourcePluginListOptions
): Promise> {
- warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureOptionalBoolean(options?.isPublished, "isPublished");
return this.http.request({
@@ -658,7 +646,6 @@ export class KnowledgeBaseClient extends DifyClient {
nodeId: string,
request: DatasourceNodeRunRequest
): Promise> {
- warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(nodeId, "nodeId");
ensureNonEmptyString(request.datasource_type, "datasource_type");
@@ -673,7 +660,6 @@ export class KnowledgeBaseClient extends DifyClient {
datasetId: string,
request: PipelineRunRequest
): Promise | DifyStream> {
- warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(request.datasource_type, "datasource_type");
ensureNonEmptyString(request.start_node_id, "start_node_id");
@@ -695,7 +681,6 @@ export class KnowledgeBaseClient extends DifyClient {
async uploadPipelineFile(
form: unknown
): Promise> {
- warnPipelineRoutes();
ensureFormData(form, "uploadPipelineFile");
return this.http.request({
method: "POST",
diff --git a/sdks/nodejs-client/src/client/validation.test.js b/sdks/nodejs-client/src/client/validation.test.ts
similarity index 93%
rename from sdks/nodejs-client/src/client/validation.test.js
rename to sdks/nodejs-client/src/client/validation.test.ts
index 65bfa471a6..384dd46309 100644
--- a/sdks/nodejs-client/src/client/validation.test.js
+++ b/sdks/nodejs-client/src/client/validation.test.ts
@@ -10,7 +10,7 @@ import {
validateParams,
} from "./validation";
-const makeLongString = (length) => "a".repeat(length);
+const makeLongString = (length: number) => "a".repeat(length);
describe("validation utilities", () => {
it("ensureNonEmptyString throws on empty or whitespace", () => {
@@ -19,9 +19,7 @@ describe("validation utilities", () => {
});
it("ensureNonEmptyString throws on overly long strings", () => {
- expect(() =>
- ensureNonEmptyString(makeLongString(10001), "name")
- ).toThrow();
+ expect(() => ensureNonEmptyString(makeLongString(10001), "name")).toThrow();
});
it("ensureOptionalString ignores undefined and validates when set", () => {
@@ -73,7 +71,6 @@ describe("validation utilities", () => {
expect(() => validateParams({ rating: "bad" })).toThrow();
expect(() => validateParams({ page: 1.1 })).toThrow();
expect(() => validateParams({ files: "bad" })).toThrow();
- // Empty strings are allowed for optional params (e.g., keyword: "" means no filter)
expect(() => validateParams({ keyword: "" })).not.toThrow();
expect(() => validateParams({ name: makeLongString(10001) })).toThrow();
expect(() =>
diff --git a/sdks/nodejs-client/src/client/validation.ts b/sdks/nodejs-client/src/client/validation.ts
index 6aeec36bdc..0fe747a8f9 100644
--- a/sdks/nodejs-client/src/client/validation.ts
+++ b/sdks/nodejs-client/src/client/validation.ts
@@ -1,4 +1,5 @@
import { ValidationError } from "../errors/dify-error";
+import { isRecord } from "../internal/type-guards";
const MAX_STRING_LENGTH = 10000;
const MAX_LIST_LENGTH = 1000;
@@ -109,8 +110,8 @@ export function validateParams(params: Record): void {
`Parameter '${key}' exceeds maximum size of ${MAX_LIST_LENGTH} items`
);
}
- } else if (typeof value === "object") {
- if (Object.keys(value as Record).length > MAX_DICT_LENGTH) {
+ } else if (isRecord(value)) {
+ if (Object.keys(value).length > MAX_DICT_LENGTH) {
throw new ValidationError(
`Parameter '${key}' exceeds maximum size of ${MAX_DICT_LENGTH} items`
);
diff --git a/sdks/nodejs-client/src/client/workflow.test.js b/sdks/nodejs-client/src/client/workflow.test.ts
similarity index 97%
rename from sdks/nodejs-client/src/client/workflow.test.js
rename to sdks/nodejs-client/src/client/workflow.test.ts
index 79c419b55a..281540304e 100644
--- a/sdks/nodejs-client/src/client/workflow.test.js
+++ b/sdks/nodejs-client/src/client/workflow.test.ts
@@ -90,7 +90,6 @@ describe("WorkflowClient", () => {
const { client, request } = createHttpClientWithSpies();
const workflow = new WorkflowClient(client);
- // Use createdByEndUserSessionId to filter by user session (backend API parameter)
await workflow.getLogs({
keyword: "k",
status: "succeeded",
diff --git a/sdks/nodejs-client/src/client/workflow.ts b/sdks/nodejs-client/src/client/workflow.ts
index ae4d5861fa..6e073b12d2 100644
--- a/sdks/nodejs-client/src/client/workflow.ts
+++ b/sdks/nodejs-client/src/client/workflow.ts
@@ -1,6 +1,12 @@
import { DifyClient } from "./base";
import type { WorkflowRunRequest, WorkflowRunResponse } from "../types/workflow";
-import type { DifyResponse, DifyStream, QueryParams } from "../types/common";
+import type {
+ DifyResponse,
+ DifyStream,
+ JsonObject,
+ QueryParams,
+ SuccessResponse,
+} from "../types/common";
import {
ensureNonEmptyString,
ensureOptionalInt,
@@ -12,12 +18,12 @@ export class WorkflowClient extends DifyClient {
request: WorkflowRunRequest
): Promise | DifyStream>;
run(
- inputs: Record,
+ inputs: JsonObject,
user: string,
stream?: boolean
): Promise | DifyStream>;
run(
- inputOrRequest: WorkflowRunRequest | Record,
+ inputOrRequest: WorkflowRunRequest | JsonObject,
user?: string,
stream = false
): Promise | DifyStream> {
@@ -30,7 +36,7 @@ export class WorkflowClient extends DifyClient {
} else {
ensureNonEmptyString(user, "user");
payload = {
- inputs: inputOrRequest as Record,
+ inputs: inputOrRequest,
user,
response_mode: stream ? "streaming" : "blocking",
};
@@ -84,10 +90,10 @@ export class WorkflowClient extends DifyClient {
stop(
taskId: string,
user: string
- ): Promise> {
+ ): Promise> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
- return this.http.request({
+ return this.http.request({
method: "POST",
path: `/workflows/tasks/${taskId}/stop`,
data: { user },
@@ -111,7 +117,7 @@ export class WorkflowClient extends DifyClient {
limit?: number;
startTime?: string;
endTime?: string;
- }): Promise>> {
+ }): Promise> {
if (options?.keyword) {
ensureOptionalString(options.keyword, "keyword");
}
diff --git a/sdks/nodejs-client/src/client/workspace.test.js b/sdks/nodejs-client/src/client/workspace.test.ts
similarity index 100%
rename from sdks/nodejs-client/src/client/workspace.test.js
rename to sdks/nodejs-client/src/client/workspace.test.ts
diff --git a/sdks/nodejs-client/src/errors/dify-error.test.js b/sdks/nodejs-client/src/errors/dify-error.test.ts
similarity index 100%
rename from sdks/nodejs-client/src/errors/dify-error.test.js
rename to sdks/nodejs-client/src/errors/dify-error.test.ts
diff --git a/sdks/nodejs-client/src/http/client.test.js b/sdks/nodejs-client/src/http/client.test.js
deleted file mode 100644
index 05892547ed..0000000000
--- a/sdks/nodejs-client/src/http/client.test.js
+++ /dev/null
@@ -1,304 +0,0 @@
-import axios from "axios";
-import { Readable } from "node:stream";
-import { beforeEach, describe, expect, it, vi } from "vitest";
-import {
- APIError,
- AuthenticationError,
- FileUploadError,
- NetworkError,
- RateLimitError,
- TimeoutError,
- ValidationError,
-} from "../errors/dify-error";
-import { HttpClient } from "./client";
-
-describe("HttpClient", () => {
- beforeEach(() => {
- vi.restoreAllMocks();
- });
- it("builds requests with auth headers and JSON content type", async () => {
- const mockRequest = vi.fn().mockResolvedValue({
- status: 200,
- data: { ok: true },
- headers: { "x-request-id": "req" },
- });
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
-
- const client = new HttpClient({ apiKey: "test" });
- const response = await client.request({
- method: "POST",
- path: "/chat-messages",
- data: { user: "u" },
- });
-
- expect(response.requestId).toBe("req");
- const config = mockRequest.mock.calls[0][0];
- expect(config.headers.Authorization).toBe("Bearer test");
- expect(config.headers["Content-Type"]).toBe("application/json");
- expect(config.responseType).toBe("json");
- });
-
- it("serializes array query params", async () => {
- const mockRequest = vi.fn().mockResolvedValue({
- status: 200,
- data: "ok",
- headers: {},
- });
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
-
- const client = new HttpClient({ apiKey: "test" });
- await client.requestRaw({
- method: "GET",
- path: "/datasets",
- query: { tag_ids: ["a", "b"], limit: 2 },
- });
-
- const config = mockRequest.mock.calls[0][0];
- const queryString = config.paramsSerializer.serialize({
- tag_ids: ["a", "b"],
- limit: 2,
- });
- expect(queryString).toBe("tag_ids=a&tag_ids=b&limit=2");
- });
-
- it("returns SSE stream helpers", async () => {
- const mockRequest = vi.fn().mockResolvedValue({
- status: 200,
- data: Readable.from(["data: {\"text\":\"hi\"}\n\n"]),
- headers: { "x-request-id": "req" },
- });
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
-
- const client = new HttpClient({ apiKey: "test" });
- const stream = await client.requestStream({
- method: "POST",
- path: "/chat-messages",
- data: { user: "u" },
- });
-
- expect(stream.status).toBe(200);
- expect(stream.requestId).toBe("req");
- await expect(stream.toText()).resolves.toBe("hi");
- });
-
- it("returns binary stream helpers", async () => {
- const mockRequest = vi.fn().mockResolvedValue({
- status: 200,
- data: Readable.from(["chunk"]),
- headers: { "x-request-id": "req" },
- });
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
-
- const client = new HttpClient({ apiKey: "test" });
- const stream = await client.requestBinaryStream({
- method: "POST",
- path: "/text-to-audio",
- data: { user: "u", text: "hi" },
- });
-
- expect(stream.status).toBe(200);
- expect(stream.requestId).toBe("req");
- });
-
- it("respects form-data headers", async () => {
- const mockRequest = vi.fn().mockResolvedValue({
- status: 200,
- data: "ok",
- headers: {},
- });
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
-
- const client = new HttpClient({ apiKey: "test" });
- const form = {
- append: () => {},
- getHeaders: () => ({ "content-type": "multipart/form-data; boundary=abc" }),
- };
-
- await client.requestRaw({
- method: "POST",
- path: "/files/upload",
- data: form,
- });
-
- const config = mockRequest.mock.calls[0][0];
- expect(config.headers["content-type"]).toBe(
- "multipart/form-data; boundary=abc"
- );
- expect(config.headers["Content-Type"]).toBeUndefined();
- });
-
- it("maps 401 and 429 errors", async () => {
- const mockRequest = vi.fn();
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
- const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
-
- mockRequest.mockRejectedValueOnce({
- isAxiosError: true,
- response: {
- status: 401,
- data: { message: "unauthorized" },
- headers: {},
- },
- });
- await expect(
- client.requestRaw({ method: "GET", path: "/meta" })
- ).rejects.toBeInstanceOf(AuthenticationError);
-
- mockRequest.mockRejectedValueOnce({
- isAxiosError: true,
- response: {
- status: 429,
- data: { message: "rate" },
- headers: { "retry-after": "2" },
- },
- });
- const error = await client
- .requestRaw({ method: "GET", path: "/meta" })
- .catch((err) => err);
- expect(error).toBeInstanceOf(RateLimitError);
- expect(error.retryAfter).toBe(2);
- });
-
- it("maps validation and upload errors", async () => {
- const mockRequest = vi.fn();
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
- const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
-
- mockRequest.mockRejectedValueOnce({
- isAxiosError: true,
- response: {
- status: 422,
- data: { message: "invalid" },
- headers: {},
- },
- });
- await expect(
- client.requestRaw({ method: "POST", path: "/chat-messages", data: { user: "u" } })
- ).rejects.toBeInstanceOf(ValidationError);
-
- mockRequest.mockRejectedValueOnce({
- isAxiosError: true,
- config: { url: "/files/upload" },
- response: {
- status: 400,
- data: { message: "bad upload" },
- headers: {},
- },
- });
- await expect(
- client.requestRaw({ method: "POST", path: "/files/upload", data: { user: "u" } })
- ).rejects.toBeInstanceOf(FileUploadError);
- });
-
- it("maps timeout and network errors", async () => {
- const mockRequest = vi.fn();
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
- const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
-
- mockRequest.mockRejectedValueOnce({
- isAxiosError: true,
- code: "ECONNABORTED",
- message: "timeout",
- });
- await expect(
- client.requestRaw({ method: "GET", path: "/meta" })
- ).rejects.toBeInstanceOf(TimeoutError);
-
- mockRequest.mockRejectedValueOnce({
- isAxiosError: true,
- message: "network",
- });
- await expect(
- client.requestRaw({ method: "GET", path: "/meta" })
- ).rejects.toBeInstanceOf(NetworkError);
- });
-
- it("retries on timeout errors", async () => {
- const mockRequest = vi.fn();
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
- const client = new HttpClient({ apiKey: "test", maxRetries: 1, retryDelay: 0 });
-
- mockRequest
- .mockRejectedValueOnce({
- isAxiosError: true,
- code: "ECONNABORTED",
- message: "timeout",
- })
- .mockResolvedValueOnce({ status: 200, data: "ok", headers: {} });
-
- await client.requestRaw({ method: "GET", path: "/meta" });
- expect(mockRequest).toHaveBeenCalledTimes(2);
- });
-
- it("validates query parameters before request", async () => {
- const mockRequest = vi.fn();
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
- const client = new HttpClient({ apiKey: "test" });
-
- await expect(
- client.requestRaw({ method: "GET", path: "/meta", query: { user: 1 } })
- ).rejects.toBeInstanceOf(ValidationError);
- expect(mockRequest).not.toHaveBeenCalled();
- });
-
- it("returns APIError for other http failures", async () => {
- const mockRequest = vi.fn();
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
- const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
-
- mockRequest.mockRejectedValueOnce({
- isAxiosError: true,
- response: { status: 500, data: { message: "server" }, headers: {} },
- });
-
- await expect(
- client.requestRaw({ method: "GET", path: "/meta" })
- ).rejects.toBeInstanceOf(APIError);
- });
-
- it("logs requests and responses when enableLogging is true", async () => {
- const mockRequest = vi.fn().mockResolvedValue({
- status: 200,
- data: { ok: true },
- headers: {},
- });
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
- const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
-
- const client = new HttpClient({ apiKey: "test", enableLogging: true });
- await client.requestRaw({ method: "GET", path: "/meta" });
-
- expect(consoleInfo).toHaveBeenCalledWith(
- expect.stringContaining("dify-client-node response 200 GET")
- );
- consoleInfo.mockRestore();
- });
-
- it("logs retry attempts when enableLogging is true", async () => {
- const mockRequest = vi.fn();
- vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
- const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
-
- const client = new HttpClient({
- apiKey: "test",
- maxRetries: 1,
- retryDelay: 0,
- enableLogging: true,
- });
-
- mockRequest
- .mockRejectedValueOnce({
- isAxiosError: true,
- code: "ECONNABORTED",
- message: "timeout",
- })
- .mockResolvedValueOnce({ status: 200, data: "ok", headers: {} });
-
- await client.requestRaw({ method: "GET", path: "/meta" });
-
- expect(consoleInfo).toHaveBeenCalledWith(
- expect.stringContaining("dify-client-node retry")
- );
- consoleInfo.mockRestore();
- });
-});
diff --git a/sdks/nodejs-client/src/http/client.test.ts b/sdks/nodejs-client/src/http/client.test.ts
new file mode 100644
index 0000000000..af859801c6
--- /dev/null
+++ b/sdks/nodejs-client/src/http/client.test.ts
@@ -0,0 +1,527 @@
+import { Readable, Stream } from "node:stream";
+import { beforeEach, describe, expect, it, vi } from "vitest";
+import {
+ APIError,
+ AuthenticationError,
+ FileUploadError,
+ NetworkError,
+ RateLimitError,
+ TimeoutError,
+ ValidationError,
+} from "../errors/dify-error";
+import { HttpClient } from "./client";
+
+const stubFetch = (): ReturnType => {
+ const fetchMock = vi.fn();
+ vi.stubGlobal("fetch", fetchMock);
+ return fetchMock;
+};
+
+const getFetchCall = (
+ fetchMock: ReturnType,
+ index = 0
+): [string, RequestInit | undefined] => {
+ const call = fetchMock.mock.calls[index];
+ if (!call) {
+ throw new Error(`Missing fetch call at index ${index}`);
+ }
+ return call as [string, RequestInit | undefined];
+};
+
+const toHeaderRecord = (headers: HeadersInit | undefined): Record =>
+ Object.fromEntries(new Headers(headers).entries());
+
+const jsonResponse = (
+ body: unknown,
+ init: ResponseInit = {}
+): Response =>
+ new Response(JSON.stringify(body), {
+ ...init,
+ headers: {
+ "content-type": "application/json",
+ ...(init.headers ?? {}),
+ },
+ });
+
+const textResponse = (body: string, init: ResponseInit = {}): Response =>
+ new Response(body, {
+ ...init,
+ headers: {
+ ...(init.headers ?? {}),
+ },
+ });
+
+describe("HttpClient", () => {
+ beforeEach(() => {
+ vi.restoreAllMocks();
+ vi.unstubAllGlobals();
+ });
+
+ it("builds requests with auth headers and JSON content type", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(
+ jsonResponse({ ok: true }, { status: 200, headers: { "x-request-id": "req" } })
+ );
+
+ const client = new HttpClient({ apiKey: "test" });
+ const response = await client.request({
+ method: "POST",
+ path: "/chat-messages",
+ data: { user: "u" },
+ });
+
+ expect(response.requestId).toBe("req");
+ expect(fetchMock).toHaveBeenCalledTimes(1);
+ const [url, init] = getFetchCall(fetchMock);
+ expect(url).toBe("https://api.dify.ai/v1/chat-messages");
+ expect(toHeaderRecord(init?.headers)).toMatchObject({
+ authorization: "Bearer test",
+ "content-type": "application/json",
+ "user-agent": "dify-client-node",
+ });
+ expect(init?.body).toBe(JSON.stringify({ user: "u" }));
+ });
+
+ it("serializes array query params", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
+
+ const client = new HttpClient({ apiKey: "test" });
+ await client.requestRaw({
+ method: "GET",
+ path: "/datasets",
+ query: { tag_ids: ["a", "b"], limit: 2 },
+ });
+
+ const [url] = getFetchCall(fetchMock);
+ expect(new URL(url).searchParams.toString()).toBe(
+ "tag_ids=a&tag_ids=b&limit=2"
+ );
+ });
+
+ it("returns SSE stream helpers", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(
+ new Response('data: {"text":"hi"}\n\n', {
+ status: 200,
+ headers: { "x-request-id": "req" },
+ })
+ );
+
+ const client = new HttpClient({ apiKey: "test" });
+ const stream = await client.requestStream({
+ method: "POST",
+ path: "/chat-messages",
+ data: { user: "u" },
+ });
+
+ expect(stream.status).toBe(200);
+ expect(stream.requestId).toBe("req");
+ await expect(stream.toText()).resolves.toBe("hi");
+ });
+
+ it("returns binary stream helpers", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(
+ new Response("chunk", {
+ status: 200,
+ headers: { "x-request-id": "req" },
+ })
+ );
+
+ const client = new HttpClient({ apiKey: "test" });
+ const stream = await client.requestBinaryStream({
+ method: "POST",
+ path: "/text-to-audio",
+ data: { user: "u", text: "hi" },
+ });
+
+ expect(stream.status).toBe(200);
+ expect(stream.requestId).toBe("req");
+ expect(stream.data).toBeInstanceOf(Readable);
+ });
+
+ it("respects form-data headers", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
+
+ const client = new HttpClient({ apiKey: "test" });
+ const form = new FormData();
+ form.append("file", new Blob(["abc"]), "file.txt");
+
+ await client.requestRaw({
+ method: "POST",
+ path: "/files/upload",
+ data: form,
+ });
+
+ const [, init] = getFetchCall(fetchMock);
+ expect(toHeaderRecord(init?.headers)).toMatchObject({
+ authorization: "Bearer test",
+ });
+ expect(toHeaderRecord(init?.headers)["content-type"]).toBeUndefined();
+ });
+
+ it("sends legacy form-data as a readable request body", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
+
+ const client = new HttpClient({ apiKey: "test" });
+ const legacyForm = Object.assign(Readable.from(["chunk"]), {
+ append: vi.fn(),
+ getHeaders: () => ({
+ "content-type": "multipart/form-data; boundary=test",
+ }),
+ });
+
+ await client.requestRaw({
+ method: "POST",
+ path: "/files/upload",
+ data: legacyForm,
+ });
+
+ const [, init] = getFetchCall(fetchMock);
+ expect(toHeaderRecord(init?.headers)).toMatchObject({
+ authorization: "Bearer test",
+ "content-type": "multipart/form-data; boundary=test",
+ });
+ expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
+ "half"
+ );
+ expect(init?.body).not.toBe(legacyForm);
+ });
+
+ it("rejects legacy form-data objects that are not readable streams", async () => {
+ const fetchMock = stubFetch();
+ const client = new HttpClient({ apiKey: "test" });
+ const legacyForm = {
+ append: vi.fn(),
+ getHeaders: () => ({
+ "content-type": "multipart/form-data; boundary=test",
+ }),
+ };
+
+ await expect(
+ client.requestRaw({
+ method: "POST",
+ path: "/files/upload",
+ data: legacyForm,
+ })
+ ).rejects.toBeInstanceOf(FileUploadError);
+
+ expect(fetchMock).not.toHaveBeenCalled();
+ });
+
+ it("accepts legacy pipeable streams that are not Readable instances", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
+ const client = new HttpClient({ apiKey: "test" });
+
+ const legacyStream = new Stream() as Stream &
+ NodeJS.ReadableStream & {
+ append: ReturnType;
+ getHeaders: () => Record;
+ };
+ legacyStream.readable = true;
+ legacyStream.pause = () => legacyStream;
+ legacyStream.resume = () => legacyStream;
+ legacyStream.append = vi.fn();
+ legacyStream.getHeaders = () => ({
+ "content-type": "multipart/form-data; boundary=test",
+ });
+ queueMicrotask(() => {
+ legacyStream.emit("data", Buffer.from("chunk"));
+ legacyStream.emit("end");
+ });
+
+ await client.requestRaw({
+ method: "POST",
+ path: "/files/upload",
+ data: legacyStream as unknown as FormData,
+ });
+
+ const [, init] = getFetchCall(fetchMock);
+ expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
+ "half"
+ );
+ });
+
+ it("returns buffers for byte responses", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(
+ new Response(Uint8Array.from([1, 2, 3]), {
+ status: 200,
+ headers: { "content-type": "application/octet-stream" },
+ })
+ );
+
+ const client = new HttpClient({ apiKey: "test" });
+ const response = await client.request({
+ method: "GET",
+ path: "/files/file-1/preview",
+ responseType: "bytes",
+ });
+
+ expect(Buffer.isBuffer(response.data)).toBe(true);
+ expect(Array.from(response.data.values())).toEqual([1, 2, 3]);
+ });
+
+ it("keeps arraybuffer as a backward-compatible binary alias", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(
+ new Response(Uint8Array.from([4, 5, 6]), {
+ status: 200,
+ headers: { "content-type": "application/octet-stream" },
+ })
+ );
+
+ const client = new HttpClient({ apiKey: "test" });
+ const response = await client.request({
+ method: "GET",
+ path: "/files/file-1/preview",
+ responseType: "arraybuffer",
+ });
+
+ expect(Buffer.isBuffer(response.data)).toBe(true);
+ expect(Array.from(response.data.values())).toEqual([4, 5, 6]);
+ });
+
+ it("returns null for empty no-content responses", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(new Response(null, { status: 204 }));
+
+ const client = new HttpClient({ apiKey: "test" });
+ const response = await client.requestRaw({
+ method: "GET",
+ path: "/meta",
+ });
+
+ expect(response.data).toBeNull();
+ });
+
+ it("maps 401 and 429 errors", async () => {
+ const fetchMock = stubFetch();
+ fetchMock
+ .mockResolvedValueOnce(
+ jsonResponse({ message: "unauthorized" }, { status: 401 })
+ )
+ .mockResolvedValueOnce(
+ jsonResponse({ message: "rate" }, { status: 429, headers: { "retry-after": "2" } })
+ );
+ const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
+
+ await expect(
+ client.requestRaw({ method: "GET", path: "/meta" })
+ ).rejects.toBeInstanceOf(AuthenticationError);
+
+ const error = await client
+ .requestRaw({ method: "GET", path: "/meta" })
+ .catch((err: unknown) => err);
+ expect(error).toBeInstanceOf(RateLimitError);
+ expect((error as RateLimitError).retryAfter).toBe(2);
+ });
+
+ it("maps validation and upload errors", async () => {
+ const fetchMock = stubFetch();
+ fetchMock
+ .mockResolvedValueOnce(jsonResponse({ message: "invalid" }, { status: 422 }))
+ .mockResolvedValueOnce(jsonResponse({ message: "bad upload" }, { status: 400 }));
+ const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
+
+ await expect(
+ client.requestRaw({ method: "POST", path: "/chat-messages", data: { user: "u" } })
+ ).rejects.toBeInstanceOf(ValidationError);
+
+ await expect(
+ client.requestRaw({ method: "POST", path: "/files/upload", data: { user: "u" } })
+ ).rejects.toBeInstanceOf(FileUploadError);
+ });
+
+ it("maps timeout and network errors", async () => {
+ const fetchMock = stubFetch();
+ fetchMock
+ .mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
+ .mockRejectedValueOnce(new Error("network"));
+ const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
+
+ await expect(
+ client.requestRaw({ method: "GET", path: "/meta" })
+ ).rejects.toBeInstanceOf(TimeoutError);
+
+ await expect(
+ client.requestRaw({ method: "GET", path: "/meta" })
+ ).rejects.toBeInstanceOf(NetworkError);
+ });
+
+ it("maps unknown transport failures to NetworkError", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockRejectedValueOnce("boom");
+ const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
+
+ await expect(
+ client.requestRaw({ method: "GET", path: "/meta" })
+ ).rejects.toMatchObject({
+ name: "NetworkError",
+ message: "Unexpected network error",
+ });
+ });
+
+ it("retries on timeout errors", async () => {
+ const fetchMock = stubFetch();
+ fetchMock
+ .mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
+ .mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
+ const client = new HttpClient({ apiKey: "test", maxRetries: 1, retryDelay: 0 });
+
+ await client.requestRaw({ method: "GET", path: "/meta" });
+ expect(fetchMock).toHaveBeenCalledTimes(2);
+ });
+
+ it("does not retry non-replayable readable request bodies", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockRejectedValueOnce(new Error("network"));
+ const client = new HttpClient({ apiKey: "test", maxRetries: 2, retryDelay: 0 });
+
+ await expect(
+ client.requestRaw({
+ method: "POST",
+ path: "/chat-messages",
+ data: Readable.from(["chunk"]),
+ })
+ ).rejects.toBeInstanceOf(NetworkError);
+
+ expect(fetchMock).toHaveBeenCalledTimes(1);
+ const [, init] = getFetchCall(fetchMock);
+ expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
+ "half"
+ );
+ });
+
+ it("validates query parameters before request", async () => {
+ const fetchMock = stubFetch();
+ const client = new HttpClient({ apiKey: "test" });
+
+ await expect(
+ client.requestRaw({ method: "GET", path: "/meta", query: { user: 1 } })
+ ).rejects.toBeInstanceOf(ValidationError);
+ expect(fetchMock).not.toHaveBeenCalled();
+ });
+
+ it("returns APIError for other http failures", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(jsonResponse({ message: "server" }, { status: 500 }));
+ const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
+
+ await expect(
+ client.requestRaw({ method: "GET", path: "/meta" })
+ ).rejects.toBeInstanceOf(APIError);
+ });
+
+ it("uses plain text bodies when json parsing is not possible", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(
+ textResponse("plain text", {
+ status: 200,
+ headers: { "content-type": "text/plain" },
+ })
+ );
+ const client = new HttpClient({ apiKey: "test" });
+
+ const response = await client.requestRaw({
+ method: "GET",
+ path: "/info",
+ });
+
+ expect(response.data).toBe("plain text");
+ });
+
+ it("keeps invalid json error bodies as API errors", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(
+ textResponse("{invalid", {
+ status: 500,
+ headers: { "content-type": "application/json", "x-request-id": "req-500" },
+ })
+ );
+ const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
+
+ await expect(
+ client.requestRaw({ method: "GET", path: "/meta" })
+ ).rejects.toMatchObject({
+ name: "APIError",
+ statusCode: 500,
+ requestId: "req-500",
+ responseBody: "{invalid",
+ });
+ });
+
+ it("sends raw string bodies without additional json encoding", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
+ const client = new HttpClient({ apiKey: "test" });
+
+ await client.requestRaw({
+ method: "POST",
+ path: "/meta",
+ data: '{"pre":"serialized"}',
+ headers: { "Content-Type": "application/custom+json" },
+ });
+
+ const [, init] = getFetchCall(fetchMock);
+ expect(init?.body).toBe('{"pre":"serialized"}');
+ expect(toHeaderRecord(init?.headers)).toMatchObject({
+ "content-type": "application/custom+json",
+ });
+ });
+
+ it("preserves explicit user-agent headers", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }, { status: 200 }));
+ const client = new HttpClient({ apiKey: "test" });
+
+ await client.requestRaw({
+ method: "GET",
+ path: "/meta",
+ headers: { "User-Agent": "custom-agent" },
+ });
+
+ const [, init] = getFetchCall(fetchMock);
+ expect(toHeaderRecord(init?.headers)).toMatchObject({
+ "user-agent": "custom-agent",
+ });
+ });
+
+ it("logs requests and responses when enableLogging is true", async () => {
+ const fetchMock = stubFetch();
+ fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }, { status: 200 }));
+ const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
+
+ const client = new HttpClient({ apiKey: "test", enableLogging: true });
+ await client.requestRaw({ method: "GET", path: "/meta" });
+
+ expect(consoleInfo).toHaveBeenCalledWith(
+ expect.stringContaining("dify-client-node response 200 GET")
+ );
+ });
+
+ it("logs retry attempts when enableLogging is true", async () => {
+ const fetchMock = stubFetch();
+ fetchMock
+ .mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
+ .mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
+ const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
+
+ const client = new HttpClient({
+ apiKey: "test",
+ maxRetries: 1,
+ retryDelay: 0,
+ enableLogging: true,
+ });
+
+ await client.requestRaw({ method: "GET", path: "/meta" });
+
+ expect(consoleInfo).toHaveBeenCalledWith(
+ expect.stringContaining("dify-client-node retry")
+ );
+ });
+});
diff --git a/sdks/nodejs-client/src/http/client.ts b/sdks/nodejs-client/src/http/client.ts
index 44b63c9903..c233d9807d 100644
--- a/sdks/nodejs-client/src/http/client.ts
+++ b/sdks/nodejs-client/src/http/client.ts
@@ -1,11 +1,4 @@
-import axios from "axios";
-import type {
- AxiosError,
- AxiosInstance,
- AxiosRequestConfig,
- AxiosResponse,
-} from "axios";
-import type { Readable } from "node:stream";
+import { Readable } from "node:stream";
import {
DEFAULT_BASE_URL,
DEFAULT_MAX_RETRIES,
@@ -13,36 +6,69 @@ import {
DEFAULT_TIMEOUT_SECONDS,
} from "../types/common";
import type {
+ BinaryStream,
DifyClientConfig,
DifyResponse,
+ DifyStream,
Headers,
+ JsonValue,
QueryParams,
RequestMethod,
} from "../types/common";
-import type { DifyError } from "../errors/dify-error";
import {
APIError,
AuthenticationError,
+ DifyError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
+import type { SdkFormData } from "./form-data";
import { getFormDataHeaders, isFormData } from "./form-data";
import { createBinaryStream, createSseStream } from "./sse";
import { getRetryDelayMs, shouldRetry, sleep } from "./retry";
import { validateParams } from "../client/validation";
+import { hasStringProperty, isRecord } from "../internal/type-guards";
const DEFAULT_USER_AGENT = "dify-client-node";
-export type RequestOptions = {
+export type HttpResponseType = "json" | "bytes" | "stream" | "arraybuffer";
+
+export type HttpRequestBody =
+ | JsonValue
+ | Readable
+ | SdkFormData
+ | URLSearchParams
+ | ArrayBuffer
+ | ArrayBufferView
+ | Blob
+ | string
+ | null;
+
+export type ResponseDataFor =
+ TResponseType extends "stream"
+ ? Readable
+ : TResponseType extends "bytes" | "arraybuffer"
+ ? Buffer
+ : JsonValue | string | null;
+
+export type RawHttpResponse = {
+ data: TData;
+ status: number;
+ headers: Headers;
+ requestId?: string;
+ url: string;
+};
+
+export type RequestOptions = {
method: RequestMethod;
path: string;
query?: QueryParams;
- data?: unknown;
+ data?: HttpRequestBody;
headers?: Headers;
- responseType?: AxiosRequestConfig["responseType"];
+ responseType?: TResponseType;
};
export type HttpClientSettings = Required<
@@ -51,6 +77,23 @@ export type HttpClientSettings = Required<
apiKey: string;
};
+type FetchRequestInit = RequestInit & {
+ duplex?: "half";
+};
+
+type PreparedRequestBody = {
+ body?: BodyInit | null;
+ headers: Headers;
+ duplex?: "half";
+ replayable: boolean;
+};
+
+type TimeoutContext = {
+ cleanup: () => void;
+ reason: Error;
+ signal: AbortSignal;
+};
+
const normalizeSettings = (config: DifyClientConfig): HttpClientSettings => ({
apiKey: config.apiKey,
baseUrl: config.baseUrl ?? DEFAULT_BASE_URL,
@@ -60,19 +103,10 @@ const normalizeSettings = (config: DifyClientConfig): HttpClientSettings => ({
enableLogging: config.enableLogging ?? false,
});
-const normalizeHeaders = (headers: AxiosResponse["headers"]): Headers => {
+const normalizeHeaders = (headers: globalThis.Headers): Headers => {
const result: Headers = {};
- if (!headers) {
- return result;
- }
- Object.entries(headers).forEach(([key, value]) => {
- if (Array.isArray(value)) {
- result[key.toLowerCase()] = value.join(", ");
- } else if (typeof value === "string") {
- result[key.toLowerCase()] = value;
- } else if (typeof value === "number") {
- result[key.toLowerCase()] = value.toString();
- }
+ headers.forEach((value, key) => {
+ result[key.toLowerCase()] = value;
});
return result;
};
@@ -80,9 +114,18 @@ const normalizeHeaders = (headers: AxiosResponse["headers"]): Headers => {
const resolveRequestId = (headers: Headers): string | undefined =>
headers["x-request-id"] ?? headers["x-requestid"];
-const buildRequestUrl = (baseUrl: string, path: string): string => {
+const buildRequestUrl = (
+ baseUrl: string,
+ path: string,
+ query?: QueryParams
+): string => {
const trimmed = baseUrl.replace(/\/+$/, "");
- return `${trimmed}${path}`;
+ const url = new URL(`${trimmed}${path}`);
+ const queryString = buildQueryString(query);
+ if (queryString) {
+ url.search = queryString;
+ }
+ return url.toString();
};
const buildQueryString = (params?: QueryParams): string => {
@@ -121,24 +164,53 @@ const parseRetryAfterSeconds = (headerValue?: string): number | undefined => {
return undefined;
};
-const isReadableStream = (value: unknown): value is Readable => {
+const isPipeableStream = (value: unknown): value is { pipe: (destination: unknown) => unknown } => {
if (!value || typeof value !== "object") {
return false;
}
return typeof (value as { pipe?: unknown }).pipe === "function";
};
-const isUploadLikeRequest = (config?: AxiosRequestConfig): boolean => {
- const url = (config?.url ?? "").toLowerCase();
- if (!url) {
- return false;
+const toNodeReadable = (value: unknown): Readable | null => {
+ if (value instanceof Readable) {
+ return value;
}
+ if (!isPipeableStream(value)) {
+ return null;
+ }
+ const readable = new Readable({
+ read() {},
+ });
+ return readable.wrap(value as NodeJS.ReadableStream);
+};
+
+const isBinaryBody = (
+ value: unknown
+): value is ArrayBuffer | ArrayBufferView | Blob => {
+ if (value instanceof Blob) {
+ return true;
+ }
+ if (value instanceof ArrayBuffer) {
+ return true;
+ }
+ return ArrayBuffer.isView(value);
+};
+
+const isJsonBody = (value: unknown): value is Exclude =>
+ value === null ||
+ typeof value === "boolean" ||
+ typeof value === "number" ||
+ Array.isArray(value) ||
+ isRecord(value);
+
+const isUploadLikeRequest = (path: string): boolean => {
+ const normalizedPath = path.toLowerCase();
return (
- url.includes("upload") ||
- url.includes("/files/") ||
- url.includes("audio-to-text") ||
- url.includes("create_by_file") ||
- url.includes("update_by_file")
+ normalizedPath.includes("upload") ||
+ normalizedPath.includes("/files/") ||
+ normalizedPath.includes("audio-to-text") ||
+ normalizedPath.includes("create_by_file") ||
+ normalizedPath.includes("update_by_file")
);
};
@@ -146,88 +218,242 @@ const resolveErrorMessage = (status: number, responseBody: unknown): string => {
if (typeof responseBody === "string" && responseBody.trim().length > 0) {
return responseBody;
}
- if (
- responseBody &&
- typeof responseBody === "object" &&
- "message" in responseBody
- ) {
- const message = (responseBody as Record).message;
- if (typeof message === "string" && message.trim().length > 0) {
+ if (hasStringProperty(responseBody, "message")) {
+ const message = responseBody.message.trim();
+ if (message.length > 0) {
return message;
}
}
return `Request failed with status code ${status}`;
};
-const mapAxiosError = (error: unknown): DifyError => {
- if (axios.isAxiosError(error)) {
- const axiosError = error as AxiosError;
- if (axiosError.response) {
- const status = axiosError.response.status;
- const headers = normalizeHeaders(axiosError.response.headers);
- const requestId = resolveRequestId(headers);
- const responseBody = axiosError.response.data;
- const message = resolveErrorMessage(status, responseBody);
-
- if (status === 401) {
- return new AuthenticationError(message, {
- statusCode: status,
- responseBody,
- requestId,
- });
- }
- if (status === 429) {
- const retryAfter = parseRetryAfterSeconds(headers["retry-after"]);
- return new RateLimitError(message, {
- statusCode: status,
- responseBody,
- requestId,
- retryAfter,
- });
- }
- if (status === 422) {
- return new ValidationError(message, {
- statusCode: status,
- responseBody,
- requestId,
- });
- }
- if (status === 400) {
- if (isUploadLikeRequest(axiosError.config)) {
- return new FileUploadError(message, {
- statusCode: status,
- responseBody,
- requestId,
- });
- }
- }
- return new APIError(message, {
- statusCode: status,
- responseBody,
- requestId,
- });
- }
- if (axiosError.code === "ECONNABORTED") {
- return new TimeoutError("Request timed out", { cause: axiosError });
- }
- return new NetworkError(axiosError.message, { cause: axiosError });
+const parseJsonLikeText = (
+ value: string,
+ contentType?: string | null
+): JsonValue | string | null => {
+ if (value.length === 0) {
+ return null;
}
+ const shouldParseJson =
+ contentType?.includes("application/json") === true ||
+ contentType?.includes("+json") === true;
+ if (!shouldParseJson) {
+ try {
+ return JSON.parse(value) as JsonValue;
+ } catch {
+ return value;
+ }
+ }
+ return JSON.parse(value) as JsonValue;
+};
+
+const prepareRequestBody = (
+ method: RequestMethod,
+ data: HttpRequestBody | undefined
+): PreparedRequestBody => {
+ if (method === "GET" || data === undefined) {
+ return {
+ body: undefined,
+ headers: {},
+ replayable: true,
+ };
+ }
+
+ if (isFormData(data)) {
+ if ("getHeaders" in data && typeof data.getHeaders === "function") {
+ const readable = toNodeReadable(data);
+ if (!readable) {
+ throw new FileUploadError(
+ "Legacy FormData must be a readable stream when used with fetch"
+ );
+ }
+ return {
+ body: Readable.toWeb(readable) as BodyInit,
+ headers: getFormDataHeaders(data),
+ duplex: "half",
+ replayable: false,
+ };
+ }
+ return {
+ body: data as BodyInit,
+ headers: getFormDataHeaders(data),
+ replayable: true,
+ };
+ }
+
+ if (typeof data === "string") {
+ return {
+ body: data,
+ headers: {},
+ replayable: true,
+ };
+ }
+
+ const readable = toNodeReadable(data);
+ if (readable) {
+ return {
+ body: Readable.toWeb(readable) as BodyInit,
+ headers: {},
+ duplex: "half",
+ replayable: false,
+ };
+ }
+
+ if (data instanceof URLSearchParams || isBinaryBody(data)) {
+ const body =
+ ArrayBuffer.isView(data) && !(data instanceof Uint8Array)
+ ? new Uint8Array(data.buffer, data.byteOffset, data.byteLength)
+ : data;
+ return {
+ body: body as BodyInit,
+ headers: {},
+ replayable: true,
+ };
+ }
+
+ if (isJsonBody(data)) {
+ return {
+ body: JSON.stringify(data),
+ headers: {
+ "Content-Type": "application/json",
+ },
+ replayable: true,
+ };
+ }
+
+ throw new ValidationError("Unsupported request body type");
+};
+
+const createTimeoutContext = (timeoutMs: number): TimeoutContext => {
+ const controller = new AbortController();
+ const reason = new Error("Request timed out");
+ const timer = setTimeout(() => {
+ controller.abort(reason);
+ }, timeoutMs);
+ return {
+ signal: controller.signal,
+ reason,
+ cleanup: () => {
+ clearTimeout(timer);
+ },
+ };
+};
+
+const parseResponseBody = async (
+ response: Response,
+ responseType: TResponseType
+): Promise> => {
+ if (responseType === "stream") {
+ if (!response.body) {
+ throw new NetworkError("Response body is empty");
+ }
+ return Readable.fromWeb(
+ response.body as unknown as Parameters[0]
+ ) as ResponseDataFor;
+ }
+
+ if (responseType === "bytes" || responseType === "arraybuffer") {
+ const bytes = Buffer.from(await response.arrayBuffer());
+ return bytes as ResponseDataFor;
+ }
+
+ if (response.status === 204 || response.status === 205 || response.status === 304) {
+ return null as ResponseDataFor;
+ }
+
+ const text = await response.text();
+ try {
+ return parseJsonLikeText(
+ text,
+ response.headers.get("content-type")
+ ) as ResponseDataFor;
+ } catch (error) {
+ if (!response.ok && error instanceof SyntaxError) {
+ return text as ResponseDataFor;
+ }
+ throw error;
+ }
+};
+
+const mapHttpError = (
+ response: RawHttpResponse,
+ path: string
+): DifyError => {
+ const status = response.status;
+ const responseBody = response.data;
+ const message = resolveErrorMessage(status, responseBody);
+
+ if (status === 401) {
+ return new AuthenticationError(message, {
+ statusCode: status,
+ responseBody,
+ requestId: response.requestId,
+ });
+ }
+
+ if (status === 429) {
+ const retryAfter = parseRetryAfterSeconds(response.headers["retry-after"]);
+ return new RateLimitError(message, {
+ statusCode: status,
+ responseBody,
+ requestId: response.requestId,
+ retryAfter,
+ });
+ }
+
+ if (status === 422) {
+ return new ValidationError(message, {
+ statusCode: status,
+ responseBody,
+ requestId: response.requestId,
+ });
+ }
+
+ if (status === 400 && isUploadLikeRequest(path)) {
+ return new FileUploadError(message, {
+ statusCode: status,
+ responseBody,
+ requestId: response.requestId,
+ });
+ }
+
+ return new APIError(message, {
+ statusCode: status,
+ responseBody,
+ requestId: response.requestId,
+ });
+};
+
+const mapTransportError = (
+ error: unknown,
+ timeoutContext: TimeoutContext
+): DifyError => {
+ if (error instanceof DifyError) {
+ return error;
+ }
+
+ if (
+ timeoutContext.signal.aborted &&
+ timeoutContext.signal.reason === timeoutContext.reason
+ ) {
+ return new TimeoutError("Request timed out", { cause: error });
+ }
+
if (error instanceof Error) {
+ if (error.name === "AbortError" || error.name === "TimeoutError") {
+ return new TimeoutError("Request timed out", { cause: error });
+ }
return new NetworkError(error.message, { cause: error });
}
+
return new NetworkError("Unexpected network error", { cause: error });
};
export class HttpClient {
- private axios: AxiosInstance;
private settings: HttpClientSettings;
constructor(config: DifyClientConfig) {
this.settings = normalizeSettings(config);
- this.axios = axios.create({
- baseURL: this.settings.baseUrl,
- timeout: this.settings.timeout * 1000,
- });
}
updateApiKey(apiKey: string): void {
@@ -238,118 +464,123 @@ export class HttpClient {
return { ...this.settings };
}
- async request(options: RequestOptions): Promise> {
+ async request<
+ T,
+ TResponseType extends HttpResponseType = "json",
+ >(options: RequestOptions): Promise