mirror of
https://github.com/langgenius/dify.git
synced 2026-05-09 21:28:25 +08:00
Merge remote-tracking branch 'origin/main' into feat/dify-agent
This commit is contained in:
commit
468d9bca09
6
.github/workflows/api-tests.yml
vendored
6
.github/workflows/api-tests.yml
vendored
@ -16,7 +16,7 @@ concurrency:
|
||||
jobs:
|
||||
api-unit:
|
||||
name: API Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
env:
|
||||
COVERAGE_FILE: coverage-unit
|
||||
defaults:
|
||||
@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
api-integration:
|
||||
name: API Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
env:
|
||||
COVERAGE_FILE: coverage-integration
|
||||
STORAGE_TYPE: opendal
|
||||
@ -137,7 +137,7 @@ jobs:
|
||||
|
||||
api-coverage:
|
||||
name: API Coverage
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
needs:
|
||||
- api-unit
|
||||
- api-integration
|
||||
|
||||
2
.github/workflows/autofix.yml
vendored
2
.github/workflows/autofix.yml
vendored
@ -13,7 +13,7 @@ permissions:
|
||||
jobs:
|
||||
autofix:
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Complete merge group check
|
||||
if: github.event_name == 'merge_group'
|
||||
|
||||
46
.github/workflows/build-push.yml
vendored
46
.github/workflows/build-push.yml
vendored
@ -26,6 +26,9 @@ jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
if: github.repository == 'langgenius/dify'
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
@ -35,28 +38,28 @@ jobs:
|
||||
build_context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
platform: linux/amd64
|
||||
runs_on: ubuntu-latest
|
||||
runs_on: depot-ubuntu-24.04-4
|
||||
- service_name: "build-api-arm64"
|
||||
image_name_env: "DIFY_API_IMAGE_NAME"
|
||||
artifact_context: "api"
|
||||
build_context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
runs_on: depot-ubuntu-24.04-4
|
||||
- service_name: "build-web-amd64"
|
||||
image_name_env: "DIFY_WEB_IMAGE_NAME"
|
||||
artifact_context: "web"
|
||||
build_context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
platform: linux/amd64
|
||||
runs_on: ubuntu-latest
|
||||
runs_on: depot-ubuntu-24.04-4
|
||||
- service_name: "build-web-arm64"
|
||||
image_name_env: "DIFY_WEB_IMAGE_NAME"
|
||||
artifact_context: "web"
|
||||
build_context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
runs_on: depot-ubuntu-24.04-4
|
||||
|
||||
steps:
|
||||
- name: Prepare
|
||||
@ -70,8 +73,8 @@ jobs:
|
||||
username: ${{ env.DOCKERHUB_USER }}
|
||||
password: ${{ env.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
@ -81,16 +84,15 @@ jobs:
|
||||
|
||||
- name: Build Docker image
|
||||
id: build
|
||||
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: ${{ vars.DEPOT_PROJECT_ID }}
|
||||
context: ${{ matrix.build_context }}
|
||||
file: ${{ matrix.file }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env[matrix.image_name_env] }},push-by-digest=true,name-canonical=true,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.service_name }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
|
||||
|
||||
- name: Export digest
|
||||
env:
|
||||
@ -108,9 +110,33 @@ jobs:
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
fork-build-validate:
|
||||
if: github.repository != 'langgenius/dify'
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- service_name: "validate-api-amd64"
|
||||
build_context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
- service_name: "validate-web-amd64"
|
||||
build_context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@98e3b2c9eab4f4f98a95c0c0a3ea5e5e672fd2a8 # v3.10.0
|
||||
|
||||
- name: Validate Docker image
|
||||
uses: docker/build-push-action@5cd29d66b4a8d8e6f4d5dfe2e9329f0b1d446289 # v6.18.0
|
||||
with:
|
||||
push: false
|
||||
context: ${{ matrix.build_context }}
|
||||
file: ${{ matrix.file }}
|
||||
platforms: linux/amd64
|
||||
|
||||
create-manifest:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
if: github.repository == 'langgenius/dify'
|
||||
strategy:
|
||||
matrix:
|
||||
|
||||
26
.github/workflows/db-migration-test.yml
vendored
26
.github/workflows/db-migration-test.yml
vendored
@ -9,7 +9,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
db-migration-test-postgres:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@ -59,7 +59,7 @@ jobs:
|
||||
run: uv run --directory api flask upgrade-db
|
||||
|
||||
db-migration-test-mysql:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@ -110,6 +110,28 @@ jobs:
|
||||
sed -i 's/DB_PORT=5432/DB_PORT=3306/' .env
|
||||
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=root/' .env
|
||||
|
||||
# hoverkraft-tech/compose-action@v2.6.0 only waits for `docker compose up -d`
|
||||
# to return (container processes started); it does not wait on healthcheck
|
||||
# status. mysql:8.0's first-time init takes 15-30s, so without an explicit
|
||||
# wait the migration runs while InnoDB is still initialising and gets
|
||||
# killed with "Lost connection during query". Poll a real SELECT until it
|
||||
# succeeds.
|
||||
- name: Wait for MySQL to accept queries
|
||||
run: |
|
||||
set +e
|
||||
for i in $(seq 1 60); do
|
||||
if docker run --rm --network host mysql:8.0 \
|
||||
mysql -h 127.0.0.1 -P 3306 -uroot -pdifyai123456 \
|
||||
-e 'SELECT 1' >/dev/null 2>&1; then
|
||||
echo "MySQL ready after ${i}s"
|
||||
exit 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
echo "MySQL not ready after 60s; dumping container logs:"
|
||||
docker compose -f docker/docker-compose.middleware.yaml --profile mysql logs --tail=200 db_mysql
|
||||
exit 1
|
||||
|
||||
- name: Run DB Migration
|
||||
env:
|
||||
DEBUG: true
|
||||
|
||||
2
.github/workflows/deploy-agent-dev.yml
vendored
2
.github/workflows/deploy-agent-dev.yml
vendored
@ -13,7 +13,7 @@ on:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_branch == 'deploy/agent-dev'
|
||||
|
||||
2
.github/workflows/deploy-dev.yml
vendored
2
.github/workflows/deploy-dev.yml
vendored
@ -10,7 +10,7 @@ on:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_branch == 'deploy/dev'
|
||||
|
||||
2
.github/workflows/deploy-enterprise.yml
vendored
2
.github/workflows/deploy-enterprise.yml
vendored
@ -13,7 +13,7 @@ on:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_branch == 'deploy/enterprise'
|
||||
|
||||
2
.github/workflows/deploy-hitl.yml
vendored
2
.github/workflows/deploy-hitl.yml
vendored
@ -10,7 +10,7 @@ on:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_branch == 'build/feat/hitl'
|
||||
|
||||
47
.github/workflows/docker-build.yml
vendored
47
.github/workflows/docker-build.yml
vendored
@ -14,40 +14,69 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
build-docker:
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- service_name: "api-amd64"
|
||||
platform: linux/amd64
|
||||
runs_on: ubuntu-latest
|
||||
runs_on: depot-ubuntu-24.04-4
|
||||
context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
- service_name: "api-arm64"
|
||||
platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
runs_on: depot-ubuntu-24.04-4
|
||||
context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
- service_name: "web-amd64"
|
||||
platform: linux/amd64
|
||||
runs_on: ubuntu-latest
|
||||
runs_on: depot-ubuntu-24.04-4
|
||||
context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
- service_name: "web-arm64"
|
||||
platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
runs_on: depot-ubuntu-24.04-4
|
||||
context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: ${{ vars.DEPOT_PROJECT_ID }}
|
||||
push: false
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
build-docker-fork:
|
||||
if: github.event.pull_request.head.repo.full_name != github.repository
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- service_name: "api-amd64"
|
||||
context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
- service_name: "web-amd64"
|
||||
context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@98e3b2c9eab4f4f98a95c0c0a3ea5e5e672fd2a8 # v3.10.0
|
||||
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@5cd29d66b4a8d8e6f4d5dfe2e9329f0b1d446289 # v6.18.0
|
||||
with:
|
||||
push: false
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
platforms: linux/amd64
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@ -7,7 +7,7 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
with:
|
||||
|
||||
24
.github/workflows/main-ci.yml
vendored
24
.github/workflows/main-ci.yml
vendored
@ -23,7 +23,7 @@ concurrency:
|
||||
jobs:
|
||||
pre_job:
|
||||
name: Skip Duplicate Checks
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
outputs:
|
||||
should_skip: ${{ steps.skip_check.outputs.should_skip || 'false' }}
|
||||
steps:
|
||||
@ -39,7 +39,7 @@ jobs:
|
||||
name: Check Changed Files
|
||||
needs: pre_job
|
||||
if: needs.pre_job.outputs.should_skip != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
outputs:
|
||||
api-changed: ${{ steps.changes.outputs.api }}
|
||||
e2e-changed: ${{ steps.changes.outputs.e2e }}
|
||||
@ -141,7 +141,7 @@ jobs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.api-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Report skipped API tests
|
||||
run: echo "No API-related changes detected; skipping API tests."
|
||||
@ -154,7 +154,7 @@ jobs:
|
||||
- check-changes
|
||||
- api-tests-run
|
||||
- api-tests-skip
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Finalize API Tests status
|
||||
env:
|
||||
@ -201,7 +201,7 @@ jobs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.web-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Report skipped web tests
|
||||
run: echo "No web-related changes detected; skipping web tests."
|
||||
@ -214,7 +214,7 @@ jobs:
|
||||
- check-changes
|
||||
- web-tests-run
|
||||
- web-tests-skip
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Finalize Web Tests status
|
||||
env:
|
||||
@ -260,7 +260,7 @@ jobs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.e2e-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Report skipped web full-stack e2e
|
||||
run: echo "No E2E-related changes detected; skipping web full-stack E2E."
|
||||
@ -273,7 +273,7 @@ jobs:
|
||||
- check-changes
|
||||
- web-e2e-run
|
||||
- web-e2e-skip
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Finalize Web Full-Stack E2E status
|
||||
env:
|
||||
@ -325,7 +325,7 @@ jobs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.vdb-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Report skipped VDB tests
|
||||
run: echo "No VDB-related changes detected; skipping VDB tests."
|
||||
@ -338,7 +338,7 @@ jobs:
|
||||
- check-changes
|
||||
- vdb-tests-run
|
||||
- vdb-tests-skip
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Finalize VDB Tests status
|
||||
env:
|
||||
@ -384,7 +384,7 @@ jobs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.migration-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Report skipped DB migration tests
|
||||
run: echo "No migration-related changes detected; skipping DB migration tests."
|
||||
@ -397,7 +397,7 @@ jobs:
|
||||
- check-changes
|
||||
- db-migration-test-run
|
||||
- db-migration-test-skip
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Finalize DB Migration Test status
|
||||
env:
|
||||
|
||||
2
.github/workflows/pyrefly-diff-comment.yml
vendored
2
.github/workflows/pyrefly-diff-comment.yml
vendored
@ -12,7 +12,7 @@ permissions: {}
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment PR with pyrefly diff
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
2
.github/workflows/pyrefly-diff.yml
vendored
2
.github/workflows/pyrefly-diff.yml
vendored
@ -10,7 +10,7 @@ permissions:
|
||||
|
||||
jobs:
|
||||
pyrefly-diff:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
@ -12,7 +12,7 @@ permissions: {}
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment PR with type coverage
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
2
.github/workflows/pyrefly-type-coverage.yml
vendored
2
.github/workflows/pyrefly-type-coverage.yml
vendored
@ -10,7 +10,7 @@ permissions:
|
||||
|
||||
jobs:
|
||||
pyrefly-type-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
2
.github/workflows/semantic-pull-request.yml
vendored
2
.github/workflows/semantic-pull-request.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
||||
name: Validate PR title
|
||||
permissions:
|
||||
pull-requests: read
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- name: Complete merge group check
|
||||
if: github.event_name == 'merge_group'
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@ -12,7 +12,7 @@ on:
|
||||
jobs:
|
||||
stale:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
8
.github/workflows/style.yml
vendored
8
.github/workflows/style.yml
vendored
@ -15,7 +15,7 @@ permissions:
|
||||
jobs:
|
||||
python-style:
|
||||
name: Python Style
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
web-style:
|
||||
name: Web Style
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
@ -110,6 +110,8 @@ jobs:
|
||||
- name: Web tsslint
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
run: vp run lint:tss
|
||||
|
||||
- name: Web type check
|
||||
@ -131,7 +133,7 @@ jobs:
|
||||
|
||||
superlinter:
|
||||
name: SuperLinter
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
|
||||
2
.github/workflows/tool-test-sdks.yaml
vendored
2
.github/workflows/tool-test-sdks.yaml
vendored
@ -18,7 +18,7 @@ concurrency:
|
||||
jobs:
|
||||
build:
|
||||
name: unit test for Node.js SDK
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
||||
4
.github/workflows/translate-i18n-claude.yml
vendored
4
.github/workflows/translate-i18n-claude.yml
vendored
@ -35,7 +35,7 @@ concurrency:
|
||||
jobs:
|
||||
translate:
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
timeout-minutes: 120
|
||||
|
||||
steps:
|
||||
@ -158,7 +158,7 @@ jobs:
|
||||
|
||||
- name: Run Claude Code for Translation Sync
|
||||
if: steps.context.outputs.CHANGED_FILES != ''
|
||||
uses: anthropics/claude-code-action@38ec876110f9fbf8b950c79f534430740c3ac009 # v1.0.101
|
||||
uses: anthropics/claude-code-action@567fe954a4527e81f132d87d1bdbcc94f7737434 # v1.0.107
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.github/workflows/trigger-i18n-sync.yml
vendored
2
.github/workflows/trigger-i18n-sync.yml
vendored
@ -16,7 +16,7 @@ concurrency:
|
||||
jobs:
|
||||
trigger:
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
|
||||
2
.github/workflows/vdb-tests-full.yml
vendored
2
.github/workflows/vdb-tests-full.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
||||
test:
|
||||
name: Full VDB Tests
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
|
||||
2
.github/workflows/vdb-tests.yml
vendored
2
.github/workflows/vdb-tests.yml
vendored
@ -13,7 +13,7 @@ concurrency:
|
||||
jobs:
|
||||
test:
|
||||
name: VDB Smoke Tests
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
|
||||
2
.github/workflows/web-e2e.yml
vendored
2
.github/workflows/web-e2e.yml
vendored
@ -13,7 +13,7 @@ concurrency:
|
||||
jobs:
|
||||
test:
|
||||
name: Web Full-Stack E2E
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04-4
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
6
.github/workflows/web-tests.yml
vendored
6
.github/workflows/web-tests.yml
vendored
@ -16,7 +16,7 @@ concurrency:
|
||||
jobs:
|
||||
test:
|
||||
name: Web Tests (${{ matrix.shardIndex }}/${{ matrix.shardTotal }})
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04-4
|
||||
env:
|
||||
VITEST_COVERAGE_SCOPE: app-components
|
||||
strategy:
|
||||
@ -54,7 +54,7 @@ jobs:
|
||||
name: Merge Test Reports
|
||||
if: ${{ !cancelled() }}
|
||||
needs: [test]
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04-4
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
defaults:
|
||||
@ -92,7 +92,7 @@ jobs:
|
||||
|
||||
dify-ui-test:
|
||||
name: dify-ui Tests
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-24.04-4
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
defaults:
|
||||
|
||||
@ -38,6 +38,48 @@ class HitTestingPayload(BaseModel):
|
||||
|
||||
|
||||
class DatasetsHitTestingBase:
|
||||
@staticmethod
|
||||
def _normalize_hit_testing_query(query: Any) -> str:
|
||||
"""Return the user-visible query string from legacy and current response shapes."""
|
||||
if isinstance(query, str):
|
||||
return query
|
||||
|
||||
if isinstance(query, dict):
|
||||
content = query.get("content")
|
||||
if isinstance(content, str):
|
||||
return content
|
||||
|
||||
raise ValueError("Invalid hit testing query response")
|
||||
|
||||
@staticmethod
|
||||
def _normalize_hit_testing_records(records: Any) -> list[dict[str, Any]]:
|
||||
"""Coerce nullable collection fields into lists before response validation."""
|
||||
if not isinstance(records, list):
|
||||
return []
|
||||
|
||||
normalized_records: list[dict[str, Any]] = []
|
||||
for record in records:
|
||||
if not isinstance(record, dict):
|
||||
continue
|
||||
|
||||
normalized_record = dict(record)
|
||||
segment = normalized_record.get("segment")
|
||||
if isinstance(segment, dict):
|
||||
normalized_segment = dict(segment)
|
||||
if normalized_segment.get("keywords") is None:
|
||||
normalized_segment["keywords"] = []
|
||||
normalized_record["segment"] = normalized_segment
|
||||
|
||||
if normalized_record.get("child_chunks") is None:
|
||||
normalized_record["child_chunks"] = []
|
||||
|
||||
if normalized_record.get("files") is None:
|
||||
normalized_record["files"] = []
|
||||
|
||||
normalized_records.append(normalized_record)
|
||||
|
||||
return normalized_records
|
||||
|
||||
@staticmethod
|
||||
def get_and_validate_dataset(dataset_id: str):
|
||||
assert isinstance(current_user, Account)
|
||||
@ -75,7 +117,12 @@ class DatasetsHitTestingBase:
|
||||
attachment_ids=args.get("attachment_ids"),
|
||||
limit=10,
|
||||
)
|
||||
return {"query": response["query"], "records": marshal(response["records"], hit_testing_record_fields)}
|
||||
return {
|
||||
"query": DatasetsHitTestingBase._normalize_hit_testing_query(response.get("query")),
|
||||
"records": DatasetsHitTestingBase._normalize_hit_testing_records(
|
||||
marshal(response.get("records", []), hit_testing_record_fields)
|
||||
),
|
||||
}
|
||||
except services.errors.index.IndexNotInitializedError:
|
||||
raise DatasetNotInitializedError()
|
||||
except ProviderTokenNotInitError as ex:
|
||||
|
||||
@ -468,15 +468,98 @@ class DocumentAddByFileApi(DatasetApiResource):
|
||||
return documents_and_batch_fields, 200
|
||||
|
||||
|
||||
def _update_document_by_file(tenant_id: str, dataset_id: UUID, document_id: UUID) -> tuple[Mapping[str, object], int]:
|
||||
"""Update a document from an uploaded file for canonical and deprecated routes."""
|
||||
dataset_id_str = str(dataset_id)
|
||||
tenant_id_str = str(tenant_id)
|
||||
dataset = db.session.scalar(
|
||||
select(Dataset).where(Dataset.tenant_id == tenant_id_str, Dataset.id == dataset_id_str).limit(1)
|
||||
)
|
||||
|
||||
if not dataset:
|
||||
raise ValueError("Dataset does not exist.")
|
||||
|
||||
if dataset.provider == "external":
|
||||
raise ValueError("External datasets are not supported.")
|
||||
|
||||
args: dict[str, object] = {}
|
||||
if "data" in request.form:
|
||||
args = json.loads(request.form["data"])
|
||||
if "doc_form" not in args:
|
||||
args["doc_form"] = dataset.chunk_structure or "text_model"
|
||||
if "doc_language" not in args:
|
||||
args["doc_language"] = "English"
|
||||
|
||||
# indexing_technique is already set in dataset since this is an update
|
||||
args["indexing_technique"] = dataset.indexing_technique
|
||||
|
||||
if "file" in request.files:
|
||||
# save file info
|
||||
file = request.files["file"]
|
||||
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
|
||||
if not file.filename:
|
||||
raise FilenameNotExistsError
|
||||
|
||||
if not current_user:
|
||||
raise ValueError("current_user is required")
|
||||
|
||||
try:
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file.filename,
|
||||
content=file.read(),
|
||||
mimetype=file.mimetype,
|
||||
user=current_user,
|
||||
source="datasets",
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
data_source = {
|
||||
"type": "upload_file",
|
||||
"info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}},
|
||||
}
|
||||
args["data_source"] = data_source
|
||||
|
||||
# validate args
|
||||
args["original_document_id"] = str(document_id)
|
||||
|
||||
knowledge_config = KnowledgeConfig.model_validate(args)
|
||||
DocumentService.document_create_args_validate(knowledge_config)
|
||||
|
||||
try:
|
||||
documents, _ = DocumentService.save_document_with_dataset_id(
|
||||
dataset=dataset,
|
||||
knowledge_config=knowledge_config,
|
||||
account=dataset.created_by_account,
|
||||
dataset_process_rule=dataset.latest_process_rule if "process_rule" not in args else None,
|
||||
created_from="api",
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
document = documents[0]
|
||||
documents_and_batch_fields = {"document": marshal(document, document_fields), "batch": document.batch}
|
||||
return documents_and_batch_fields, 200
|
||||
|
||||
|
||||
@service_api_ns.route(
|
||||
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/update_by_file",
|
||||
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/update-by-file",
|
||||
)
|
||||
class DocumentUpdateByFileApi(DatasetApiResource):
|
||||
"""Resource for update documents."""
|
||||
class DeprecatedDocumentUpdateByFileApi(DatasetApiResource):
|
||||
"""Deprecated resource aliases for file document updates."""
|
||||
|
||||
@service_api_ns.doc("update_document_by_file")
|
||||
@service_api_ns.doc(description="Update an existing document by uploading a file")
|
||||
@service_api_ns.doc("update_document_by_file_deprecated")
|
||||
@service_api_ns.doc(deprecated=True)
|
||||
@service_api_ns.doc(
|
||||
description=(
|
||||
"Deprecated legacy alias for updating an existing document by uploading a file. "
|
||||
"Use PATCH /datasets/{dataset_id}/documents/{document_id} instead."
|
||||
)
|
||||
)
|
||||
@service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
|
||||
@service_api_ns.doc(
|
||||
responses={
|
||||
@ -487,82 +570,9 @@ class DocumentUpdateByFileApi(DatasetApiResource):
|
||||
)
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id, document_id):
|
||||
"""Update document by upload file."""
|
||||
dataset = db.session.scalar(
|
||||
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
|
||||
)
|
||||
|
||||
if not dataset:
|
||||
raise ValueError("Dataset does not exist.")
|
||||
|
||||
if dataset.provider == "external":
|
||||
raise ValueError("External datasets are not supported.")
|
||||
|
||||
args = {}
|
||||
if "data" in request.form:
|
||||
args = json.loads(request.form["data"])
|
||||
if "doc_form" not in args:
|
||||
args["doc_form"] = dataset.chunk_structure or "text_model"
|
||||
if "doc_language" not in args:
|
||||
args["doc_language"] = "English"
|
||||
|
||||
# get dataset info
|
||||
dataset_id = str(dataset_id)
|
||||
tenant_id = str(tenant_id)
|
||||
|
||||
# indexing_technique is already set in dataset since this is an update
|
||||
args["indexing_technique"] = dataset.indexing_technique
|
||||
|
||||
if "file" in request.files:
|
||||
# save file info
|
||||
file = request.files["file"]
|
||||
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
|
||||
if not file.filename:
|
||||
raise FilenameNotExistsError
|
||||
|
||||
if not current_user:
|
||||
raise ValueError("current_user is required")
|
||||
|
||||
try:
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file.filename,
|
||||
content=file.read(),
|
||||
mimetype=file.mimetype,
|
||||
user=current_user,
|
||||
source="datasets",
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
data_source = {
|
||||
"type": "upload_file",
|
||||
"info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}},
|
||||
}
|
||||
args["data_source"] = data_source
|
||||
# validate args
|
||||
args["original_document_id"] = str(document_id)
|
||||
|
||||
knowledge_config = KnowledgeConfig.model_validate(args)
|
||||
DocumentService.document_create_args_validate(knowledge_config)
|
||||
|
||||
try:
|
||||
documents, _ = DocumentService.save_document_with_dataset_id(
|
||||
dataset=dataset,
|
||||
knowledge_config=knowledge_config,
|
||||
account=dataset.created_by_account,
|
||||
dataset_process_rule=dataset.latest_process_rule if "process_rule" not in args else None,
|
||||
created_from="api",
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
document = documents[0]
|
||||
documents_and_batch_fields = {"document": marshal(document, document_fields), "batch": document.batch}
|
||||
return documents_and_batch_fields, 200
|
||||
def post(self, tenant_id: str, dataset_id: UUID, document_id: UUID):
|
||||
"""Update document by file through the deprecated file-update aliases."""
|
||||
return _update_document_by_file(tenant_id=tenant_id, dataset_id=dataset_id, document_id=document_id)
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/documents")
|
||||
@ -876,6 +886,22 @@ class DocumentApi(DatasetApiResource):
|
||||
|
||||
return response
|
||||
|
||||
@service_api_ns.doc("update_document_by_file")
|
||||
@service_api_ns.doc(description="Update an existing document by uploading a file")
|
||||
@service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
|
||||
@service_api_ns.doc(
|
||||
responses={
|
||||
200: "Document updated successfully",
|
||||
401: "Unauthorized - invalid API token",
|
||||
404: "Document not found",
|
||||
}
|
||||
)
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def patch(self, tenant_id: str, dataset_id: UUID, document_id: UUID):
|
||||
"""Update document by file on the canonical document resource."""
|
||||
return _update_document_by_file(tenant_id=tenant_id, dataset_id=dataset_id, document_id=document_id)
|
||||
|
||||
@service_api_ns.doc("delete_document")
|
||||
@service_api_ns.doc(description="Delete a document")
|
||||
@service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
|
||||
|
||||
@ -551,6 +551,7 @@ class RetrievalService:
|
||||
child_index_nodes = session.execute(child_chunk_stmt).scalars().all()
|
||||
|
||||
for i in child_index_nodes:
|
||||
assert i.index_node_id
|
||||
segment_ids.append(i.segment_id)
|
||||
if i.segment_id in child_chunk_map:
|
||||
child_chunk_map[i.segment_id].append(i)
|
||||
|
||||
@ -39,6 +39,58 @@ class AbstractVectorFactory(ABC):
|
||||
return index_struct_dict
|
||||
|
||||
|
||||
class _LazyEmbeddings(Embeddings):
|
||||
"""Lazy proxy that defers materializing the real embedding model.
|
||||
|
||||
Constructing the real embeddings (via ``ModelManager.get_model_instance``)
|
||||
transitively calls ``FeatureService.get_features`` → ``BillingService``
|
||||
HTTP GETs (see ``provider_manager.py``). Cleanup paths
|
||||
(``delete_by_ids`` / ``delete`` / ``text_exists``) do not need embeddings
|
||||
at all, so deferring this until an ``embed_*`` method is actually invoked
|
||||
keeps cleanup tasks resilient to transient billing-API failures and avoids
|
||||
leaving stranded ``document_segments`` / ``child_chunks`` whenever billing
|
||||
hiccups.
|
||||
|
||||
Existing callers that perform create / search operations are unaffected:
|
||||
the first ``embed_*`` call materializes the underlying model and the
|
||||
behavior is identical from that point on.
|
||||
"""
|
||||
|
||||
def __init__(self, dataset: Dataset):
|
||||
self._dataset = dataset
|
||||
self._real: Embeddings | None = None
|
||||
|
||||
def _ensure(self) -> Embeddings:
|
||||
if self._real is None:
|
||||
model_manager = ModelManager.for_tenant(tenant_id=self._dataset.tenant_id)
|
||||
embedding_model = model_manager.get_model_instance(
|
||||
tenant_id=self._dataset.tenant_id,
|
||||
provider=self._dataset.embedding_model_provider,
|
||||
model_type=ModelType.TEXT_EMBEDDING,
|
||||
model=self._dataset.embedding_model,
|
||||
)
|
||||
self._real = CacheEmbedding(embedding_model)
|
||||
return self._real
|
||||
|
||||
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
return self._ensure().embed_documents(texts)
|
||||
|
||||
def embed_multimodal_documents(self, multimodel_documents: list[dict[str, Any]]) -> list[list[float]]:
|
||||
return self._ensure().embed_multimodal_documents(multimodel_documents)
|
||||
|
||||
def embed_query(self, text: str) -> list[float]:
|
||||
return self._ensure().embed_query(text)
|
||||
|
||||
def embed_multimodal_query(self, multimodel_document: dict[str, Any]) -> list[float]:
|
||||
return self._ensure().embed_multimodal_query(multimodel_document)
|
||||
|
||||
async def aembed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
return await self._ensure().aembed_documents(texts)
|
||||
|
||||
async def aembed_query(self, text: str) -> list[float]:
|
||||
return await self._ensure().aembed_query(text)
|
||||
|
||||
|
||||
class Vector:
|
||||
def __init__(self, dataset: Dataset, attributes: list | None = None):
|
||||
if attributes is None:
|
||||
@ -60,7 +112,11 @@ class Vector:
|
||||
"original_chunk_id",
|
||||
]
|
||||
self._dataset = dataset
|
||||
self._embeddings = self._get_embeddings()
|
||||
# Use a lazy proxy so cleanup paths (delete_by_ids / delete / text_exists)
|
||||
# never transitively trigger billing API calls during ``Vector(dataset)``
|
||||
# construction. The real embedding model is materialized only when an
|
||||
# ``embed_*`` method is actually invoked (i.e. create / search paths).
|
||||
self._embeddings: Embeddings = _LazyEmbeddings(dataset)
|
||||
self._attributes = attributes
|
||||
self._vector_processor = self._init_vector()
|
||||
|
||||
|
||||
@ -11,6 +11,7 @@ from core.rag.models.document import AttachmentDocument, Document
|
||||
from extensions.ext_database import db
|
||||
from graphon.model_runtime.entities.model_entities import ModelType
|
||||
from models.dataset import ChildChunk, Dataset, DocumentSegment, SegmentAttachmentBinding
|
||||
from models.enums import SegmentType
|
||||
|
||||
|
||||
class DatasetDocumentStore:
|
||||
@ -127,6 +128,7 @@ class DatasetDocumentStore:
|
||||
if save_child:
|
||||
if doc.children:
|
||||
for position, child in enumerate(doc.children, start=1):
|
||||
assert self._document_id
|
||||
child_segment = ChildChunk(
|
||||
tenant_id=self._dataset.tenant_id,
|
||||
dataset_id=self._dataset.id,
|
||||
@ -137,7 +139,7 @@ class DatasetDocumentStore:
|
||||
index_node_hash=child.metadata.get("doc_hash"),
|
||||
content=child.page_content,
|
||||
word_count=len(child.page_content),
|
||||
type="automatic",
|
||||
type=SegmentType.AUTOMATIC,
|
||||
created_by=self._user_id,
|
||||
)
|
||||
db.session.add(child_segment)
|
||||
@ -163,6 +165,7 @@ class DatasetDocumentStore:
|
||||
)
|
||||
# add new child chunks
|
||||
for position, child in enumerate(doc.children, start=1):
|
||||
assert self._document_id
|
||||
child_segment = ChildChunk(
|
||||
tenant_id=self._dataset.tenant_id,
|
||||
dataset_id=self._dataset.id,
|
||||
@ -173,7 +176,7 @@ class DatasetDocumentStore:
|
||||
index_node_hash=child.metadata.get("doc_hash"),
|
||||
content=child.page_content,
|
||||
word_count=len(child.page_content),
|
||||
type="automatic",
|
||||
type=SegmentType.AUTOMATIC,
|
||||
created_by=self._user_id,
|
||||
)
|
||||
db.session.add(child_segment)
|
||||
|
||||
@ -1,56 +1,17 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum, auto
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class QuotaCharge:
|
||||
"""
|
||||
Result of a quota consumption operation.
|
||||
|
||||
Attributes:
|
||||
success: Whether the quota charge succeeded
|
||||
charge_id: UUID for refund, or None if failed/disabled
|
||||
"""
|
||||
|
||||
success: bool
|
||||
charge_id: str | None
|
||||
_quota_type: "QuotaType"
|
||||
|
||||
def refund(self) -> None:
|
||||
"""
|
||||
Refund this quota charge.
|
||||
|
||||
Safe to call even if charge failed or was disabled.
|
||||
This method guarantees no exceptions will be raised.
|
||||
"""
|
||||
if self.charge_id:
|
||||
self._quota_type.refund(self.charge_id)
|
||||
logger.info("Refunded quota for %s with charge_id: %s", self._quota_type.value, self.charge_id)
|
||||
|
||||
|
||||
class QuotaType(StrEnum):
|
||||
"""
|
||||
Supported quota types for tenant feature usage.
|
||||
|
||||
Add additional types here whenever new billable features become available.
|
||||
"""
|
||||
|
||||
# Trigger execution quota
|
||||
TRIGGER = auto()
|
||||
|
||||
# Workflow execution quota
|
||||
WORKFLOW = auto()
|
||||
|
||||
UNLIMITED = auto()
|
||||
|
||||
@property
|
||||
def billing_key(self) -> str:
|
||||
"""
|
||||
Get the billing key for the feature.
|
||||
"""
|
||||
match self:
|
||||
case QuotaType.TRIGGER:
|
||||
return "trigger_event"
|
||||
@ -58,152 +19,3 @@ class QuotaType(StrEnum):
|
||||
return "api_rate_limit"
|
||||
case _:
|
||||
raise ValueError(f"Invalid quota type: {self}")
|
||||
|
||||
def consume(self, tenant_id: str, amount: int = 1) -> QuotaCharge:
|
||||
"""
|
||||
Consume quota for the feature.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant identifier
|
||||
amount: Amount to consume (default: 1)
|
||||
|
||||
Returns:
|
||||
QuotaCharge with success status and charge_id for refund
|
||||
|
||||
Raises:
|
||||
QuotaExceededError: When quota is insufficient
|
||||
"""
|
||||
from configs import dify_config
|
||||
from services.billing_service import BillingService
|
||||
from services.errors.app import QuotaExceededError
|
||||
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
logger.debug("Billing disabled, allowing request for %s", tenant_id)
|
||||
return QuotaCharge(success=True, charge_id=None, _quota_type=self)
|
||||
|
||||
logger.info("Consuming %d %s quota for tenant %s", amount, self.value, tenant_id)
|
||||
|
||||
if amount <= 0:
|
||||
raise ValueError("Amount to consume must be greater than 0")
|
||||
|
||||
try:
|
||||
response = BillingService.update_tenant_feature_plan_usage(tenant_id, self.billing_key, delta=amount)
|
||||
|
||||
if response.get("result") != "success":
|
||||
logger.warning(
|
||||
"Failed to consume quota for %s, feature %s details: %s",
|
||||
tenant_id,
|
||||
self.value,
|
||||
response.get("detail"),
|
||||
)
|
||||
raise QuotaExceededError(feature=self.value, tenant_id=tenant_id, required=amount)
|
||||
|
||||
charge_id = response.get("history_id")
|
||||
logger.debug(
|
||||
"Successfully consumed %d %s quota for tenant %s, charge_id: %s",
|
||||
amount,
|
||||
self.value,
|
||||
tenant_id,
|
||||
charge_id,
|
||||
)
|
||||
return QuotaCharge(success=True, charge_id=charge_id, _quota_type=self)
|
||||
|
||||
except QuotaExceededError:
|
||||
raise
|
||||
except Exception:
|
||||
# fail-safe: allow request on billing errors
|
||||
logger.exception("Failed to consume quota for %s, feature %s", tenant_id, self.value)
|
||||
return unlimited()
|
||||
|
||||
def check(self, tenant_id: str, amount: int = 1) -> bool:
|
||||
"""
|
||||
Check if tenant has sufficient quota without consuming.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant identifier
|
||||
amount: Amount to check (default: 1)
|
||||
|
||||
Returns:
|
||||
True if quota is sufficient, False otherwise
|
||||
"""
|
||||
from configs import dify_config
|
||||
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
return True
|
||||
|
||||
if amount <= 0:
|
||||
raise ValueError("Amount to check must be greater than 0")
|
||||
|
||||
try:
|
||||
remaining = self.get_remaining(tenant_id)
|
||||
return remaining >= amount if remaining != -1 else True
|
||||
except Exception:
|
||||
logger.exception("Failed to check quota for %s, feature %s", tenant_id, self.value)
|
||||
# fail-safe: allow request on billing errors
|
||||
return True
|
||||
|
||||
def refund(self, charge_id: str) -> None:
|
||||
"""
|
||||
Refund quota using charge_id from consume().
|
||||
|
||||
This method guarantees no exceptions will be raised.
|
||||
All errors are logged but silently handled.
|
||||
|
||||
Args:
|
||||
charge_id: The UUID returned from consume()
|
||||
"""
|
||||
try:
|
||||
from configs import dify_config
|
||||
from services.billing_service import BillingService
|
||||
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
return
|
||||
|
||||
if not charge_id:
|
||||
logger.warning("Cannot refund: charge_id is empty")
|
||||
return
|
||||
|
||||
logger.info("Refunding %s quota with charge_id: %s", self.value, charge_id)
|
||||
|
||||
response = BillingService.refund_tenant_feature_plan_usage(charge_id)
|
||||
if response.get("result") == "success":
|
||||
logger.debug("Successfully refunded %s quota, charge_id: %s", self.value, charge_id)
|
||||
else:
|
||||
logger.warning("Refund failed for charge_id: %s", charge_id)
|
||||
|
||||
except Exception:
|
||||
# Catch ALL exceptions - refund must never fail
|
||||
logger.exception("Failed to refund quota for charge_id: %s", charge_id)
|
||||
# Don't raise - refund is best-effort and must be silent
|
||||
|
||||
def get_remaining(self, tenant_id: str) -> int:
|
||||
"""
|
||||
Get remaining quota for the tenant.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant identifier
|
||||
|
||||
Returns:
|
||||
Remaining quota amount
|
||||
"""
|
||||
from services.billing_service import BillingService
|
||||
|
||||
try:
|
||||
usage_info = BillingService.get_tenant_feature_plan_usage(tenant_id, self.billing_key)
|
||||
# Assuming the API returns a dict with 'remaining' or 'limit' and 'used'
|
||||
if isinstance(usage_info, dict):
|
||||
return usage_info.get("remaining", 0)
|
||||
# If it returns a simple number, treat it as remaining
|
||||
return int(usage_info) if usage_info else 0
|
||||
except Exception:
|
||||
logger.exception("Failed to get remaining quota for %s, feature %s", tenant_id, self.value)
|
||||
return -1
|
||||
|
||||
|
||||
def unlimited() -> QuotaCharge:
|
||||
"""
|
||||
Return a quota charge for unlimited quota.
|
||||
|
||||
This is useful for features that are not subject to quota limits, such as the UNLIMITED quota type.
|
||||
"""
|
||||
return QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.UNLIMITED)
|
||||
|
||||
@ -1036,7 +1036,7 @@ class DocumentSegment(Base):
|
||||
return attachment_list
|
||||
|
||||
|
||||
class ChildChunk(Base):
|
||||
class ChildChunk(TypeBase):
|
||||
__tablename__ = "child_chunks"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="child_chunk_pkey"),
|
||||
@ -1046,29 +1046,42 @@ class ChildChunk(Base):
|
||||
)
|
||||
|
||||
# initial fields
|
||||
id = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4()))
|
||||
tenant_id = mapped_column(StringUUID, nullable=False)
|
||||
dataset_id = mapped_column(StringUUID, nullable=False)
|
||||
document_id = mapped_column(StringUUID, nullable=False)
|
||||
segment_id = mapped_column(StringUUID, nullable=False)
|
||||
id: Mapped[str] = mapped_column(StringUUID, nullable=False, default_factory=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
document_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
segment_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
position: Mapped[int] = mapped_column(sa.Integer, nullable=False)
|
||||
content = mapped_column(LongText, nullable=False)
|
||||
content: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
word_count: Mapped[int] = mapped_column(sa.Integer, nullable=False)
|
||||
# indexing fields
|
||||
index_node_id = mapped_column(String(255), nullable=True)
|
||||
index_node_hash = mapped_column(String(255), nullable=True)
|
||||
type: Mapped[SegmentType] = mapped_column(
|
||||
EnumText(SegmentType, length=255), nullable=False, server_default=sa.text("'automatic'")
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=sa.func.current_timestamp(), init=False
|
||||
)
|
||||
created_by = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||
updated_by = mapped_column(StringUUID, nullable=True)
|
||||
updated_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, init=False)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=sa.func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=sa.func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
indexing_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
error = mapped_column(LongText, nullable=True)
|
||||
indexing_at: Mapped[datetime | None] = mapped_column(
|
||||
DateTime, nullable=True, insert_default=None, server_default=None, init=False
|
||||
)
|
||||
completed_at: Mapped[datetime | None] = mapped_column(
|
||||
DateTime, nullable=True, insert_default=None, server_default=None, init=False
|
||||
)
|
||||
index_node_id: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
index_node_hash: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
type: Mapped[SegmentType] = mapped_column(
|
||||
EnumText(SegmentType, length=255),
|
||||
nullable=False,
|
||||
server_default=sa.text("'automatic'"),
|
||||
default=SegmentType.AUTOMATIC,
|
||||
)
|
||||
error: Mapped[str | None] = mapped_column(LongText, nullable=True, init=False)
|
||||
|
||||
@property
|
||||
def dataset(self):
|
||||
|
||||
@ -1867,15 +1867,18 @@ class MessageAnnotation(TypeBase):
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
|
||||
StringUUID,
|
||||
insert_default=lambda: str(uuid4()),
|
||||
default_factory=lambda: str(uuid4()),
|
||||
init=False,
|
||||
)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID)
|
||||
question: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
content: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
hit_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0"), init=False)
|
||||
account_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
conversation_id: Mapped[str | None] = mapped_column(StringUUID, sa.ForeignKey("conversations.id"), default=None)
|
||||
message_id: Mapped[str | None] = mapped_column(StringUUID, default=None)
|
||||
hit_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0"), default=0)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
@ -2179,7 +2182,7 @@ class ApiToken(Base): # bug: this uses setattr so idk the field.
|
||||
return result
|
||||
|
||||
|
||||
class UploadFile(Base):
|
||||
class UploadFile(TypeBase):
|
||||
__tablename__ = "upload_files"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="upload_file_pkey"),
|
||||
@ -2187,9 +2190,12 @@ class UploadFile(Base):
|
||||
)
|
||||
|
||||
# NOTE: The `id` field is generated within the application to minimize extra roundtrips
|
||||
# (especially when generating `source_url`).
|
||||
# The `server_default` serves as a fallback mechanism.
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()))
|
||||
# (especially when generating `source_url`) and keep model metadata portable across databases.
|
||||
id: Mapped[str] = mapped_column(
|
||||
StringUUID,
|
||||
init=False,
|
||||
default_factory=lambda: str(uuid4()),
|
||||
)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
storage_type: Mapped[StorageType] = mapped_column(EnumText(StorageType, length=255), nullable=False)
|
||||
key: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
@ -2197,16 +2203,6 @@ class UploadFile(Base):
|
||||
size: Mapped[int] = mapped_column(sa.Integer, nullable=False)
|
||||
extension: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
mime_type: Mapped[str] = mapped_column(String(255), nullable=True)
|
||||
|
||||
# The `created_by_role` field indicates whether the file was created by an `Account` or an `EndUser`.
|
||||
# Its value is derived from the `CreatorUserRole` enumeration.
|
||||
created_by_role: Mapped[CreatorUserRole] = mapped_column(
|
||||
EnumText(CreatorUserRole, length=255),
|
||||
nullable=False,
|
||||
server_default=sa.text("'account'"),
|
||||
default=CreatorUserRole.ACCOUNT,
|
||||
)
|
||||
|
||||
# The `created_by` field stores the ID of the entity that created this upload file.
|
||||
#
|
||||
# If `created_by_role` is `ACCOUNT`, it corresponds to `Account.id`.
|
||||
@ -2225,10 +2221,18 @@ class UploadFile(Base):
|
||||
# `used` may indicate whether the file has been utilized by another service.
|
||||
used: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"))
|
||||
|
||||
# The `created_by_role` field indicates whether the file was created by an `Account` or an `EndUser`.
|
||||
# Its value is derived from the `CreatorUserRole` enumeration.
|
||||
created_by_role: Mapped[CreatorUserRole] = mapped_column(
|
||||
EnumText(CreatorUserRole, length=255),
|
||||
nullable=False,
|
||||
server_default=sa.text("'account'"),
|
||||
default=CreatorUserRole.ACCOUNT,
|
||||
)
|
||||
# `used_by` may indicate the ID of the user who utilized this file.
|
||||
used_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
|
||||
used_at: Mapped[datetime | None] = mapped_column(sa.DateTime, nullable=True)
|
||||
hash: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
used_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
used_at: Mapped[datetime | None] = mapped_column(sa.DateTime, nullable=True, default=None)
|
||||
hash: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
source_url: Mapped[str] = mapped_column(LongText, default="")
|
||||
|
||||
def __init__(
|
||||
|
||||
@ -50,7 +50,7 @@ from libs.uuid_utils import uuidv7
|
||||
from ._workflow_exc import NodeNotFoundError, WorkflowDataError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .model import AppMode, UploadFile
|
||||
from .model import AppMode
|
||||
|
||||
|
||||
from constants import DEFAULT_FILE_NUMBER_LIMITS, HIDDEN_VALUE
|
||||
@ -63,6 +63,10 @@ from .account import Account
|
||||
from .base import Base, DefaultFieldsDCMixin, TypeBase
|
||||
from .engine import db
|
||||
from .enums import CreatorUserRole, DraftVariableType, ExecutionOffLoadType, WorkflowRunTriggeredFrom
|
||||
|
||||
# UploadFile uses TypeBase while workflow execution offload models use Base, so relationships
|
||||
# must target the class object directly instead of relying on string lookup across registries.
|
||||
from .model import UploadFile
|
||||
from .types import EnumText, LongText, StringUUID
|
||||
from .utils.file_input_compat import (
|
||||
build_file_from_mapping_without_lookup,
|
||||
@ -1096,8 +1100,6 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo
|
||||
|
||||
@staticmethod
|
||||
def _load_full_content(session: orm.Session, file_id: str, storage: Storage):
|
||||
from .model import UploadFile
|
||||
|
||||
stmt = sa.select(UploadFile).where(UploadFile.id == file_id)
|
||||
file = session.scalars(stmt).first()
|
||||
assert file is not None, f"UploadFile with id {file_id} should exist but not"
|
||||
@ -1191,10 +1193,11 @@ class WorkflowNodeExecutionOffload(Base):
|
||||
)
|
||||
|
||||
file: Mapped[Optional["UploadFile"]] = orm.relationship(
|
||||
UploadFile,
|
||||
foreign_keys=[file_id],
|
||||
lazy="raise",
|
||||
uselist=False,
|
||||
primaryjoin="WorkflowNodeExecutionOffload.file_id == UploadFile.id",
|
||||
primaryjoin=lambda: orm.foreign(WorkflowNodeExecutionOffload.file_id) == UploadFile.id,
|
||||
)
|
||||
|
||||
|
||||
@ -1968,10 +1971,11 @@ class WorkflowDraftVariableFile(Base):
|
||||
|
||||
# Relationship to UploadFile
|
||||
upload_file: Mapped["UploadFile"] = orm.relationship(
|
||||
UploadFile,
|
||||
foreign_keys=[upload_file_id],
|
||||
lazy="raise",
|
||||
uselist=False,
|
||||
primaryjoin="WorkflowDraftVariableFile.upload_file_id == UploadFile.id",
|
||||
primaryjoin=lambda: orm.foreign(WorkflowDraftVariableFile.upload_file_id) == UploadFile.id,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@ -6,9 +6,10 @@ requires-python = "~=3.12.0"
|
||||
dependencies = [
|
||||
# Legacy: mature and widely deployed
|
||||
"bleach>=6.3.0",
|
||||
"boto3>=1.42.91",
|
||||
"boto3>=1.42.96",
|
||||
"celery>=5.6.3",
|
||||
"croniter>=6.2.2",
|
||||
"flask>=3.1.3,<4.0.0",
|
||||
"flask-cors>=6.0.2",
|
||||
"gevent>=26.4.0",
|
||||
"gevent-websocket>=0.10.1",
|
||||
@ -16,7 +17,7 @@ dependencies = [
|
||||
"google-api-python-client>=2.194.0",
|
||||
"gunicorn>=25.3.0",
|
||||
"psycogreen>=1.0.2",
|
||||
"psycopg2-binary>=2.9.11",
|
||||
"psycopg2-binary>=2.9.12",
|
||||
"python-socketio>=5.13.0",
|
||||
"redis[hiredis]>=7.4.0",
|
||||
"sendgrid>=6.12.5",
|
||||
@ -32,13 +33,13 @@ dependencies = [
|
||||
"flask-restx>=1.3.2,<2.0.0",
|
||||
"google-cloud-aiplatform>=1.148.1,<2.0.0",
|
||||
"httpx[socks]>=0.28.1,<1.0.0",
|
||||
"opentelemetry-distro>=0.62b0,<1.0.0",
|
||||
"opentelemetry-distro>=0.62b1,<1.0.0",
|
||||
"opentelemetry-instrumentation-celery>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-flask>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-httpx>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-redis>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-sqlalchemy>=0.62b0,<1.0.0",
|
||||
"opentelemetry-propagator-b3>=1.41.0,<2.0.0",
|
||||
"opentelemetry-propagator-b3>=1.41.1,<2.0.0",
|
||||
"readabilipy>=0.3.0,<1.0.0",
|
||||
"resend>=2.27.0,<3.0.0",
|
||||
|
||||
@ -117,7 +118,7 @@ dev = [
|
||||
"faker>=40.15.0",
|
||||
"lxml-stubs>=0.5.1",
|
||||
"basedpyright>=1.39.3",
|
||||
"ruff>=0.15.11",
|
||||
"ruff>=0.15.12",
|
||||
"pytest>=9.0.3",
|
||||
"pytest-benchmark>=5.2.3",
|
||||
"pytest-cov>=7.1.0",
|
||||
@ -144,7 +145,7 @@ dev = [
|
||||
"types-pexpect>=4.9.0",
|
||||
"types-protobuf>=7.34.1",
|
||||
"types-psutil>=7.2.2",
|
||||
"types-psycopg2>=2.9.21",
|
||||
"types-psycopg2>=2.9.21.20260422",
|
||||
"types-pygments>=2.20.0",
|
||||
"types-pymysql>=1.1.0",
|
||||
"types-python-dateutil>=2.9.0",
|
||||
@ -157,9 +158,9 @@ dev = [
|
||||
"types-tensorflow>=2.18.0.20260408",
|
||||
"types-tqdm>=4.67.3.20260408",
|
||||
"types-ujson>=5.10.0",
|
||||
"boto3-stubs>=1.42.92",
|
||||
"boto3-stubs>=1.42.96",
|
||||
"types-jmespath>=1.1.0.20260408",
|
||||
"hypothesis>=6.152.1",
|
||||
"hypothesis>=6.152.3",
|
||||
"types_pyOpenSSL>=24.1.0",
|
||||
"types_cffi>=2.0.0.20260408",
|
||||
"types_setuptools>=82.0.0.20260408",
|
||||
@ -169,12 +170,12 @@ dev = [
|
||||
"import-linter>=2.3",
|
||||
"types-redis>=4.6.0.20241004",
|
||||
"celery-types>=0.23.0",
|
||||
"mypy>=1.20.1",
|
||||
"mypy>=1.20.2",
|
||||
# "locust>=2.40.4", # Temporarily removed due to compatibility issues. Uncomment when resolved.
|
||||
"pytest-timeout>=2.4.0",
|
||||
"pytest-xdist>=3.8.0",
|
||||
"pyrefly>=0.62.0",
|
||||
"xinference-client>=2.5.0",
|
||||
"xinference-client>=2.7.0",
|
||||
]
|
||||
|
||||
############################################################
|
||||
@ -184,12 +185,12 @@ dev = [
|
||||
storage = [
|
||||
"azure-storage-blob>=12.28.0",
|
||||
"bce-python-sdk>=0.9.70",
|
||||
"cos-python-sdk-v5>=1.9.41",
|
||||
"cos-python-sdk-v5>=1.9.42",
|
||||
"esdk-obs-python>=3.22.2",
|
||||
"google-cloud-storage>=3.10.1",
|
||||
"opendal>=0.46.0",
|
||||
"oss2>=2.19.1",
|
||||
"supabase>=2.28.3",
|
||||
"supabase>=2.29.0",
|
||||
"tos>=2.9.0",
|
||||
]
|
||||
|
||||
@ -266,7 +267,7 @@ vdb-vastbase = ["dify-vdb-vastbase"]
|
||||
vdb-vikingdb = ["dify-vdb-vikingdb"]
|
||||
vdb-weaviate = ["dify-vdb-weaviate"]
|
||||
# Optional client used by some tests / integrations (not a vector backend plugin)
|
||||
vdb-xinference = ["xinference-client>=2.5.0"]
|
||||
vdb-xinference = ["xinference-client>=2.7.0"]
|
||||
|
||||
trace-all = [
|
||||
"dify-trace-aliyun",
|
||||
|
||||
@ -133,7 +133,14 @@ class AppAnnotationService:
|
||||
raise ValueError("'question' is required when 'message_id' is not provided")
|
||||
question = maybe_question
|
||||
|
||||
annotation = MessageAnnotation(app_id=app.id, content=answer, question=question, account_id=current_user.id)
|
||||
annotation = MessageAnnotation(
|
||||
app_id=app.id,
|
||||
conversation_id=None,
|
||||
message_id=None,
|
||||
content=answer,
|
||||
question=question,
|
||||
account_id=current_user.id,
|
||||
)
|
||||
db.session.add(annotation)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@ -18,12 +18,13 @@ from core.app.features.rate_limiting import RateLimit
|
||||
from core.app.features.rate_limiting.rate_limit import rate_limit_context
|
||||
from core.app.layers.pause_state_persist_layer import PauseStateLayerConfig
|
||||
from core.db import session_factory
|
||||
from enums.quota_type import QuotaType, unlimited
|
||||
from enums.quota_type import QuotaType
|
||||
from extensions.otel import AppGenerateHandler, trace_span
|
||||
from models.model import Account, App, AppMode, EndUser
|
||||
from models.workflow import Workflow, WorkflowRun
|
||||
from services.errors.app import QuotaExceededError, WorkflowIdFormatError, WorkflowNotFoundError
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
from services.quota_service import QuotaService, unlimited
|
||||
from services.workflow_service import WorkflowService
|
||||
from tasks.app_generate.workflow_execute_task import AppExecutionParams, workflow_based_app_execution_task
|
||||
|
||||
@ -106,7 +107,7 @@ class AppGenerateService:
|
||||
quota_charge = unlimited()
|
||||
if dify_config.BILLING_ENABLED:
|
||||
try:
|
||||
quota_charge = QuotaType.WORKFLOW.consume(app_model.tenant_id)
|
||||
quota_charge = QuotaService.reserve(QuotaType.WORKFLOW, app_model.tenant_id)
|
||||
except QuotaExceededError:
|
||||
raise InvokeRateLimitError(f"Workflow execution quota limit reached for tenant {app_model.tenant_id}")
|
||||
|
||||
@ -116,6 +117,7 @@ class AppGenerateService:
|
||||
request_id = RateLimit.gen_request_key()
|
||||
try:
|
||||
request_id = rate_limit.enter(request_id)
|
||||
quota_charge.commit()
|
||||
effective_mode = (
|
||||
AppMode.AGENT_CHAT if app_model.is_agent and app_model.mode != AppMode.AGENT_CHAT else app_model.mode
|
||||
)
|
||||
|
||||
@ -22,6 +22,7 @@ from models.trigger import WorkflowTriggerLog, WorkflowTriggerLogDict
|
||||
from models.workflow import Workflow
|
||||
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
|
||||
from services.errors.app import QuotaExceededError, WorkflowNotFoundError, WorkflowQuotaLimitError
|
||||
from services.quota_service import QuotaService, unlimited
|
||||
from services.workflow.entities import AsyncTriggerResponse, TriggerData, WorkflowTaskData
|
||||
from services.workflow.queue_dispatcher import QueueDispatcherManager, QueuePriority
|
||||
from services.workflow_service import WorkflowService
|
||||
@ -88,7 +89,10 @@ class AsyncWorkflowService:
|
||||
raise WorkflowNotFoundError(f"App not found: {trigger_data.app_id}")
|
||||
|
||||
# 2. Get workflow
|
||||
workflow = cls._get_workflow(workflow_service, app_model, trigger_data.workflow_id)
|
||||
workflow = cls._get_workflow(workflow_service, app_model, trigger_data.workflow_id, session=session)
|
||||
|
||||
# commit read only session before starting the billig rpc call
|
||||
session.commit()
|
||||
|
||||
# 3. Get dispatcher based on tenant subscription
|
||||
dispatcher = dispatcher_manager.get_dispatcher(trigger_data.tenant_id)
|
||||
@ -131,9 +135,10 @@ class AsyncWorkflowService:
|
||||
trigger_log = trigger_log_repo.create(trigger_log)
|
||||
session.commit()
|
||||
|
||||
# 7. Check and consume quota
|
||||
# 7. Reserve quota (commit after successful dispatch)
|
||||
quota_charge = unlimited()
|
||||
try:
|
||||
QuotaType.WORKFLOW.consume(trigger_data.tenant_id)
|
||||
quota_charge = QuotaService.reserve(QuotaType.WORKFLOW, trigger_data.tenant_id)
|
||||
except QuotaExceededError as e:
|
||||
# Update trigger log status
|
||||
trigger_log.status = WorkflowTriggerStatus.RATE_LIMITED
|
||||
@ -153,13 +158,18 @@ class AsyncWorkflowService:
|
||||
# 9. Dispatch to appropriate queue
|
||||
task_data_dict = task_data.model_dump(mode="json")
|
||||
|
||||
task: AsyncResult[Any] | None = None
|
||||
if queue_name == QueuePriority.PROFESSIONAL:
|
||||
task = execute_workflow_professional.delay(task_data_dict)
|
||||
elif queue_name == QueuePriority.TEAM:
|
||||
task = execute_workflow_team.delay(task_data_dict)
|
||||
else: # SANDBOX
|
||||
task = execute_workflow_sandbox.delay(task_data_dict)
|
||||
try:
|
||||
task: AsyncResult[Any] | None = None
|
||||
if queue_name == QueuePriority.PROFESSIONAL:
|
||||
task = execute_workflow_professional.delay(task_data_dict)
|
||||
elif queue_name == QueuePriority.TEAM:
|
||||
task = execute_workflow_team.delay(task_data_dict)
|
||||
else: # SANDBOX
|
||||
task = execute_workflow_sandbox.delay(task_data_dict)
|
||||
quota_charge.commit()
|
||||
except Exception:
|
||||
quota_charge.refund()
|
||||
raise
|
||||
|
||||
# 10. Update trigger log with task info
|
||||
trigger_log.status = WorkflowTriggerStatus.QUEUED
|
||||
@ -295,13 +305,21 @@ class AsyncWorkflowService:
|
||||
return [log.to_dict() for log in logs]
|
||||
|
||||
@staticmethod
|
||||
def _get_workflow(workflow_service: WorkflowService, app_model: App, workflow_id: str | None = None) -> Workflow:
|
||||
def _get_workflow(
|
||||
workflow_service: WorkflowService,
|
||||
app_model: App,
|
||||
workflow_id: str | None = None,
|
||||
session: Session | None = None,
|
||||
) -> Workflow:
|
||||
"""
|
||||
Get workflow for the app
|
||||
|
||||
Args:
|
||||
app_model: App model instance
|
||||
workflow_id: Optional specific workflow ID
|
||||
session: Reuse this SQLAlchemy session for the lookup when provided,
|
||||
so the caller's explicit session bears the connection cost
|
||||
instead of Flask's request-scoped ``db.session``.
|
||||
|
||||
Returns:
|
||||
Workflow instance
|
||||
@ -311,12 +329,12 @@ class AsyncWorkflowService:
|
||||
"""
|
||||
if workflow_id:
|
||||
# Get specific published workflow
|
||||
workflow = workflow_service.get_published_workflow_by_id(app_model, workflow_id)
|
||||
workflow = workflow_service.get_published_workflow_by_id(app_model, workflow_id, session=session)
|
||||
if not workflow:
|
||||
raise WorkflowNotFoundError(f"Published workflow not found: {workflow_id}")
|
||||
else:
|
||||
# Get default published workflow
|
||||
workflow = workflow_service.get_published_workflow(app_model)
|
||||
workflow = workflow_service.get_published_workflow(app_model, session=session)
|
||||
if not workflow:
|
||||
raise WorkflowNotFoundError(f"No published workflow found for app: {app_model.id}")
|
||||
|
||||
|
||||
@ -32,6 +32,50 @@ class SubscriptionPlan(TypedDict):
|
||||
expiration_date: int
|
||||
|
||||
|
||||
class QuotaReserveResult(TypedDict):
|
||||
reservation_id: str
|
||||
available: int
|
||||
reserved: int
|
||||
|
||||
|
||||
class QuotaCommitResult(TypedDict):
|
||||
available: int
|
||||
reserved: int
|
||||
refunded: int
|
||||
|
||||
|
||||
class QuotaReleaseResult(TypedDict):
|
||||
available: int
|
||||
reserved: int
|
||||
released: int
|
||||
|
||||
|
||||
_quota_reserve_adapter = TypeAdapter(QuotaReserveResult)
|
||||
_quota_commit_adapter = TypeAdapter(QuotaCommitResult)
|
||||
_quota_release_adapter = TypeAdapter(QuotaReleaseResult)
|
||||
|
||||
|
||||
class _TenantFeatureQuota(TypedDict):
|
||||
usage: int
|
||||
limit: int
|
||||
reset_date: NotRequired[int]
|
||||
|
||||
|
||||
class TenantFeatureQuotaInfo(TypedDict):
|
||||
"""Response of /quota/info.
|
||||
|
||||
NOTE (hj24):
|
||||
- Same convention as BillingInfo: billing may return int fields as str,
|
||||
always keep non-strict mode to auto-coerce.
|
||||
"""
|
||||
|
||||
trigger_event: _TenantFeatureQuota
|
||||
api_rate_limit: _TenantFeatureQuota
|
||||
|
||||
|
||||
_tenant_feature_quota_info_adapter = TypeAdapter(TenantFeatureQuotaInfo)
|
||||
|
||||
|
||||
class _BillingQuota(TypedDict):
|
||||
size: int
|
||||
limit: int
|
||||
@ -149,11 +193,63 @@ class BillingService:
|
||||
|
||||
@classmethod
|
||||
def get_tenant_feature_plan_usage_info(cls, tenant_id: str):
|
||||
"""Deprecated: Use get_quota_info instead."""
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
usage_info = cls._send_request("GET", "/tenant-feature-usage/info", params=params)
|
||||
return usage_info
|
||||
|
||||
@classmethod
|
||||
def get_quota_info(cls, tenant_id: str) -> TenantFeatureQuotaInfo:
|
||||
params = {"tenant_id": tenant_id}
|
||||
return _tenant_feature_quota_info_adapter.validate_python(
|
||||
cls._send_request("GET", "/quota/info", params=params)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def quota_reserve(
|
||||
cls, tenant_id: str, feature_key: str, request_id: str, amount: int = 1, meta: dict | None = None
|
||||
) -> QuotaReserveResult:
|
||||
"""Reserve quota before task execution."""
|
||||
payload: dict = {
|
||||
"tenant_id": tenant_id,
|
||||
"feature_key": feature_key,
|
||||
"request_id": request_id,
|
||||
"amount": amount,
|
||||
}
|
||||
if meta:
|
||||
payload["meta"] = meta
|
||||
return _quota_reserve_adapter.validate_python(cls._send_request("POST", "/quota/reserve", json=payload))
|
||||
|
||||
@classmethod
|
||||
def quota_commit(
|
||||
cls, tenant_id: str, feature_key: str, reservation_id: str, actual_amount: int, meta: dict | None = None
|
||||
) -> QuotaCommitResult:
|
||||
"""Commit a reservation with actual consumption."""
|
||||
payload: dict = {
|
||||
"tenant_id": tenant_id,
|
||||
"feature_key": feature_key,
|
||||
"reservation_id": reservation_id,
|
||||
"actual_amount": actual_amount,
|
||||
}
|
||||
if meta:
|
||||
payload["meta"] = meta
|
||||
return _quota_commit_adapter.validate_python(cls._send_request("POST", "/quota/commit", json=payload))
|
||||
|
||||
@classmethod
|
||||
def quota_release(cls, tenant_id: str, feature_key: str, reservation_id: str) -> QuotaReleaseResult:
|
||||
"""Release a reservation (cancel, return frozen quota)."""
|
||||
return _quota_release_adapter.validate_python(
|
||||
cls._send_request(
|
||||
"POST",
|
||||
"/quota/release",
|
||||
json={
|
||||
"tenant_id": tenant_id,
|
||||
"feature_key": feature_key,
|
||||
"reservation_id": reservation_id,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_knowledge_rate_limit(cls, tenant_id: str) -> KnowledgeRateLimitDict:
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
@ -3748,6 +3748,7 @@ class SegmentService:
|
||||
ChildChunk.segment_id == segment.id,
|
||||
)
|
||||
)
|
||||
assert current_user.current_tenant_id
|
||||
child_chunk = ChildChunk(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
@ -3758,7 +3759,7 @@ class SegmentService:
|
||||
index_node_hash=index_node_hash,
|
||||
content=content,
|
||||
word_count=len(content),
|
||||
type="customized",
|
||||
type=SegmentType.CUSTOMIZED,
|
||||
created_by=current_user.id,
|
||||
)
|
||||
db.session.add(child_chunk)
|
||||
@ -3818,6 +3819,7 @@ class SegmentService:
|
||||
if new_child_chunks_args:
|
||||
child_chunk_count = len(child_chunks)
|
||||
for position, args in enumerate(new_child_chunks_args, start=child_chunk_count + 1):
|
||||
assert current_user.current_tenant_id
|
||||
index_node_id = str(uuid.uuid4())
|
||||
index_node_hash = helper.generate_text_hash(args.content)
|
||||
child_chunk = ChildChunk(
|
||||
@ -3830,7 +3832,7 @@ class SegmentService:
|
||||
index_node_hash=index_node_hash,
|
||||
content=args.content,
|
||||
word_count=len(args.content),
|
||||
type="customized",
|
||||
type=SegmentType.CUSTOMIZED,
|
||||
created_by=current_user.id,
|
||||
)
|
||||
|
||||
|
||||
@ -290,7 +290,7 @@ class FeatureService:
|
||||
def _fulfill_params_from_billing_api(cls, features: FeatureModel, tenant_id: str):
|
||||
billing_info = BillingService.get_info(tenant_id)
|
||||
|
||||
features_usage_info = BillingService.get_tenant_feature_plan_usage_info(tenant_id)
|
||||
features_usage_info = BillingService.get_quota_info(tenant_id)
|
||||
|
||||
features.billing.enabled = billing_info["enabled"]
|
||||
features.billing.subscription.plan = billing_info["subscription"]["plan"]
|
||||
|
||||
233
api/services/quota_service.py
Normal file
233
api/services/quota_service.py
Normal file
@ -0,0 +1,233 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from enums.quota_type import QuotaType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class QuotaCharge:
|
||||
"""
|
||||
Result of a quota reservation (Reserve phase).
|
||||
|
||||
Lifecycle:
|
||||
charge = QuotaService.consume(QuotaType.TRIGGER, tenant_id)
|
||||
try:
|
||||
do_work()
|
||||
charge.commit() # Confirm consumption
|
||||
except:
|
||||
charge.refund() # Release frozen quota
|
||||
|
||||
If neither commit() nor refund() is called, the billing system's
|
||||
cleanup CronJob will auto-release the reservation within ~75 seconds.
|
||||
"""
|
||||
|
||||
success: bool
|
||||
charge_id: str | None # reservation_id
|
||||
_quota_type: QuotaType
|
||||
_tenant_id: str | None = None
|
||||
_feature_key: str | None = None
|
||||
_amount: int = 0
|
||||
_committed: bool = field(default=False, repr=False)
|
||||
|
||||
def commit(self, actual_amount: int | None = None) -> None:
|
||||
"""
|
||||
Confirm the consumption with actual amount.
|
||||
|
||||
Args:
|
||||
actual_amount: Actual amount consumed. Defaults to the reserved amount.
|
||||
If less than reserved, the difference is refunded automatically.
|
||||
"""
|
||||
if self._committed or not self.charge_id or not self._tenant_id or not self._feature_key:
|
||||
return
|
||||
|
||||
try:
|
||||
from services.billing_service import BillingService
|
||||
|
||||
amount = actual_amount if actual_amount is not None else self._amount
|
||||
BillingService.quota_commit(
|
||||
tenant_id=self._tenant_id,
|
||||
feature_key=self._feature_key,
|
||||
reservation_id=self.charge_id,
|
||||
actual_amount=amount,
|
||||
)
|
||||
self._committed = True
|
||||
logger.debug(
|
||||
"Committed %s quota for tenant %s, reservation_id: %s, amount: %d",
|
||||
self._quota_type,
|
||||
self._tenant_id,
|
||||
self.charge_id,
|
||||
amount,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to commit quota, reservation_id: %s", self.charge_id)
|
||||
|
||||
def refund(self) -> None:
|
||||
"""
|
||||
Release the reserved quota (cancel the charge).
|
||||
|
||||
Safe to call even if:
|
||||
- charge failed or was disabled (charge_id is None)
|
||||
- already committed (Release after Commit is a no-op)
|
||||
- already refunded (idempotent)
|
||||
|
||||
This method guarantees no exceptions will be raised.
|
||||
"""
|
||||
if not self.charge_id or not self._tenant_id or not self._feature_key:
|
||||
return
|
||||
|
||||
QuotaService.release(self._quota_type, self.charge_id, self._tenant_id, self._feature_key)
|
||||
|
||||
|
||||
def unlimited() -> QuotaCharge:
|
||||
from enums.quota_type import QuotaType
|
||||
|
||||
return QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.UNLIMITED)
|
||||
|
||||
|
||||
class QuotaService:
|
||||
"""Orchestrates quota reserve / commit / release lifecycle via BillingService."""
|
||||
|
||||
@staticmethod
|
||||
def consume(quota_type: QuotaType, tenant_id: str, amount: int = 1) -> QuotaCharge:
|
||||
"""
|
||||
Reserve + immediate Commit (one-shot mode).
|
||||
|
||||
The returned QuotaCharge supports .refund() which calls Release.
|
||||
For two-phase usage (e.g. streaming), use reserve() directly.
|
||||
"""
|
||||
charge = QuotaService.reserve(quota_type, tenant_id, amount)
|
||||
if charge.success and charge.charge_id:
|
||||
charge.commit()
|
||||
return charge
|
||||
|
||||
@staticmethod
|
||||
def reserve(quota_type: QuotaType, tenant_id: str, amount: int = 1) -> QuotaCharge:
|
||||
"""
|
||||
Reserve quota before task execution (Reserve phase only).
|
||||
|
||||
The caller MUST call charge.commit() after the task succeeds,
|
||||
or charge.refund() if the task fails.
|
||||
|
||||
Raises:
|
||||
QuotaExceededError: When quota is insufficient
|
||||
"""
|
||||
from services.billing_service import BillingService
|
||||
from services.errors.app import QuotaExceededError
|
||||
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
logger.debug("Billing disabled, allowing request for %s", tenant_id)
|
||||
return QuotaCharge(success=True, charge_id=None, _quota_type=quota_type)
|
||||
|
||||
logger.info("Reserving %d %s quota for tenant %s", amount, quota_type.value, tenant_id)
|
||||
|
||||
if amount <= 0:
|
||||
raise ValueError("Amount to reserve must be greater than 0")
|
||||
|
||||
request_id = str(uuid.uuid4())
|
||||
feature_key = quota_type.billing_key
|
||||
|
||||
try:
|
||||
reserve_resp = BillingService.quota_reserve(
|
||||
tenant_id=tenant_id,
|
||||
feature_key=feature_key,
|
||||
request_id=request_id,
|
||||
amount=amount,
|
||||
)
|
||||
|
||||
reservation_id = reserve_resp.get("reservation_id")
|
||||
if not reservation_id:
|
||||
logger.warning(
|
||||
"Reserve returned no reservation_id for %s, feature %s, response: %s",
|
||||
tenant_id,
|
||||
quota_type.value,
|
||||
reserve_resp,
|
||||
)
|
||||
raise QuotaExceededError(feature=quota_type.value, tenant_id=tenant_id, required=amount)
|
||||
|
||||
logger.debug(
|
||||
"Reserved %d %s quota for tenant %s, reservation_id: %s",
|
||||
amount,
|
||||
quota_type.value,
|
||||
tenant_id,
|
||||
reservation_id,
|
||||
)
|
||||
return QuotaCharge(
|
||||
success=True,
|
||||
charge_id=reservation_id,
|
||||
_quota_type=quota_type,
|
||||
_tenant_id=tenant_id,
|
||||
_feature_key=feature_key,
|
||||
_amount=amount,
|
||||
)
|
||||
|
||||
except QuotaExceededError:
|
||||
raise
|
||||
except ValueError:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to reserve quota for %s, feature %s", tenant_id, quota_type.value)
|
||||
return unlimited()
|
||||
|
||||
@staticmethod
|
||||
def check(quota_type: QuotaType, tenant_id: str, amount: int = 1) -> bool:
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
return True
|
||||
|
||||
if amount <= 0:
|
||||
raise ValueError("Amount to check must be greater than 0")
|
||||
|
||||
try:
|
||||
remaining = QuotaService.get_remaining(quota_type, tenant_id)
|
||||
return remaining >= amount if remaining != -1 else True
|
||||
except Exception:
|
||||
logger.exception("Failed to check quota for %s, feature %s", tenant_id, quota_type.value)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def release(quota_type: QuotaType, reservation_id: str, tenant_id: str, feature_key: str) -> None:
|
||||
"""Release a reservation. Guarantees no exceptions."""
|
||||
try:
|
||||
from services.billing_service import BillingService
|
||||
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
return
|
||||
|
||||
if not reservation_id:
|
||||
return
|
||||
|
||||
logger.info("Releasing %s quota, reservation_id: %s", quota_type.value, reservation_id)
|
||||
BillingService.quota_release(
|
||||
tenant_id=tenant_id,
|
||||
feature_key=feature_key,
|
||||
reservation_id=reservation_id,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to release quota, reservation_id: %s", reservation_id)
|
||||
|
||||
@staticmethod
|
||||
def get_remaining(quota_type: QuotaType, tenant_id: str) -> int:
|
||||
from services.billing_service import BillingService
|
||||
|
||||
try:
|
||||
usage_info = BillingService.get_quota_info(tenant_id)
|
||||
if isinstance(usage_info, dict):
|
||||
feature_info = usage_info.get(quota_type.billing_key, {})
|
||||
if isinstance(feature_info, dict):
|
||||
limit = feature_info.get("limit", 0)
|
||||
usage = feature_info.get("usage", 0)
|
||||
if limit == -1:
|
||||
return -1
|
||||
return max(0, limit - usage)
|
||||
return 0
|
||||
except Exception:
|
||||
logger.exception("Failed to get remaining quota for %s, feature %s", tenant_id, quota_type.value)
|
||||
return -1
|
||||
@ -38,6 +38,7 @@ from models.workflow import Workflow
|
||||
from services.async_workflow_service import AsyncWorkflowService
|
||||
from services.end_user_service import EndUserService
|
||||
from services.errors.app import QuotaExceededError
|
||||
from services.quota_service import QuotaService
|
||||
from services.trigger.app_trigger_service import AppTriggerService
|
||||
from services.workflow.entities import WebhookTriggerData
|
||||
|
||||
@ -798,45 +799,47 @@ class WebhookService:
|
||||
Exception: If workflow execution fails
|
||||
"""
|
||||
try:
|
||||
with Session(db.engine) as session:
|
||||
# Prepare inputs for the webhook node
|
||||
# The webhook node expects webhook_data in the inputs
|
||||
workflow_inputs = cls.build_workflow_inputs(webhook_data)
|
||||
workflow_inputs = cls.build_workflow_inputs(webhook_data)
|
||||
|
||||
# Create trigger data
|
||||
trigger_data = WebhookTriggerData(
|
||||
app_id=webhook_trigger.app_id,
|
||||
workflow_id=workflow.id,
|
||||
root_node_id=webhook_trigger.node_id, # Start from the webhook node
|
||||
inputs=workflow_inputs,
|
||||
tenant_id=webhook_trigger.tenant_id,
|
||||
trigger_data = WebhookTriggerData(
|
||||
app_id=webhook_trigger.app_id,
|
||||
workflow_id=workflow.id,
|
||||
root_node_id=webhook_trigger.node_id,
|
||||
inputs=workflow_inputs,
|
||||
tenant_id=webhook_trigger.tenant_id,
|
||||
)
|
||||
|
||||
end_user = EndUserService.get_or_create_end_user_by_type(
|
||||
type=InvokeFrom.TRIGGER,
|
||||
tenant_id=webhook_trigger.tenant_id,
|
||||
app_id=webhook_trigger.app_id,
|
||||
user_id=None,
|
||||
)
|
||||
|
||||
try:
|
||||
quota_charge = QuotaService.reserve(QuotaType.TRIGGER, webhook_trigger.tenant_id)
|
||||
except QuotaExceededError:
|
||||
AppTriggerService.mark_tenant_triggers_rate_limited(webhook_trigger.tenant_id)
|
||||
logger.info(
|
||||
"Tenant %s rate limited, skipping webhook trigger %s",
|
||||
webhook_trigger.tenant_id,
|
||||
webhook_trigger.webhook_id,
|
||||
)
|
||||
raise
|
||||
|
||||
end_user = EndUserService.get_or_create_end_user_by_type(
|
||||
type=InvokeFrom.TRIGGER,
|
||||
tenant_id=webhook_trigger.tenant_id,
|
||||
app_id=webhook_trigger.app_id,
|
||||
user_id=None,
|
||||
)
|
||||
|
||||
# consume quota before triggering workflow execution
|
||||
try:
|
||||
QuotaType.TRIGGER.consume(webhook_trigger.tenant_id)
|
||||
except QuotaExceededError:
|
||||
AppTriggerService.mark_tenant_triggers_rate_limited(webhook_trigger.tenant_id)
|
||||
logger.info(
|
||||
"Tenant %s rate limited, skipping webhook trigger %s",
|
||||
webhook_trigger.tenant_id,
|
||||
webhook_trigger.webhook_id,
|
||||
try:
|
||||
# NOTE: don not use `with sessionmaker(bind=db.engine, expire_on_commit=False).begin()`
|
||||
# trigger_workflow_async need to handle multipe session commits internally
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
AsyncWorkflowService.trigger_workflow_async(
|
||||
session,
|
||||
end_user,
|
||||
trigger_data,
|
||||
)
|
||||
raise
|
||||
|
||||
# Trigger workflow execution asynchronously
|
||||
AsyncWorkflowService.trigger_workflow_async(
|
||||
session,
|
||||
end_user,
|
||||
trigger_data,
|
||||
)
|
||||
quota_charge.commit()
|
||||
except Exception:
|
||||
quota_charge.refund()
|
||||
raise
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to trigger workflow for webhook %s", webhook_trigger.webhook_id)
|
||||
|
||||
@ -16,6 +16,7 @@ from graphon.model_runtime.entities.model_entities import ModelType
|
||||
from models import UploadFile
|
||||
from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegment, SegmentAttachmentBinding
|
||||
from models.dataset import Document as DatasetDocument
|
||||
from models.enums import SegmentType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -178,7 +179,7 @@ class VectorService:
|
||||
index_node_hash=child_chunk.metadata["doc_hash"],
|
||||
content=child_chunk.page_content,
|
||||
word_count=len(child_chunk.page_content),
|
||||
type="automatic",
|
||||
type=SegmentType.AUTOMATIC,
|
||||
created_by=dataset_document.created_by,
|
||||
)
|
||||
db.session.add(child_segment)
|
||||
@ -222,6 +223,7 @@ class VectorService:
|
||||
)
|
||||
documents.append(new_child_document)
|
||||
for update_child_chunk in update_child_chunks:
|
||||
assert update_child_chunk.index_node_id
|
||||
child_document = Document(
|
||||
page_content=update_child_chunk.content,
|
||||
metadata={
|
||||
@ -234,6 +236,7 @@ class VectorService:
|
||||
documents.append(child_document)
|
||||
delete_node_ids.append(update_child_chunk.index_node_id)
|
||||
for delete_child_chunk in delete_child_chunks:
|
||||
assert delete_child_chunk.index_node_id
|
||||
delete_node_ids.append(delete_child_chunk.index_node_id)
|
||||
if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY:
|
||||
# update vector index
|
||||
@ -246,6 +249,7 @@ class VectorService:
|
||||
@classmethod
|
||||
def delete_child_chunk_vector(cls, child_chunk: ChildChunk, dataset: Dataset):
|
||||
vector = Vector(dataset=dataset)
|
||||
assert child_chunk.index_node_id
|
||||
vector.delete_by_ids([child_chunk.index_node_id])
|
||||
|
||||
@classmethod
|
||||
|
||||
@ -156,11 +156,18 @@ class WorkflowService:
|
||||
# return draft workflow
|
||||
return workflow
|
||||
|
||||
def get_published_workflow_by_id(self, app_model: App, workflow_id: str) -> Workflow | None:
|
||||
def get_published_workflow_by_id(
|
||||
self, app_model: App, workflow_id: str, session: Session | None = None
|
||||
) -> Workflow | None:
|
||||
"""
|
||||
fetch published workflow by workflow_id
|
||||
|
||||
When ``session`` is provided, reuse it so callers that already hold a
|
||||
Session avoid checking out an extra request-scoped ``db.session``
|
||||
connection. Falls back to ``db.session`` for backward compatibility.
|
||||
"""
|
||||
workflow = db.session.scalar(
|
||||
bind = session if session is not None else db.session
|
||||
workflow = bind.scalar(
|
||||
select(Workflow)
|
||||
.where(
|
||||
Workflow.tenant_id == app_model.tenant_id,
|
||||
@ -178,16 +185,20 @@ class WorkflowService:
|
||||
)
|
||||
return workflow
|
||||
|
||||
def get_published_workflow(self, app_model: App) -> Workflow | None:
|
||||
def get_published_workflow(self, app_model: App, session: Session | None = None) -> Workflow | None:
|
||||
"""
|
||||
Get published workflow
|
||||
|
||||
When ``session`` is provided, reuse it so callers that already hold a
|
||||
Session avoid checking out an extra request-scoped ``db.session``
|
||||
connection. Falls back to ``db.session`` for backward compatibility.
|
||||
"""
|
||||
|
||||
if not app_model.workflow_id:
|
||||
return None
|
||||
|
||||
# fetch published workflow by workflow_id
|
||||
workflow = db.session.scalar(
|
||||
bind = session if session is not None else db.session
|
||||
workflow = bind.scalar(
|
||||
select(Workflow)
|
||||
.where(
|
||||
Workflow.tenant_id == app_model.tenant_id,
|
||||
|
||||
@ -61,13 +61,31 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
|
||||
# check segment is exist
|
||||
if index_node_ids:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
if dataset:
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
# Wrap vector / keyword index cleanup in try/except so that a transient
|
||||
# failure here (e.g. billing API hiccup propagated via FeatureService when
|
||||
# ModelManager is initialized inside ``Vector(dataset)``) does not abort
|
||||
# the entire task and leave document_segments / child_chunks / image_files
|
||||
# / metadata bindings stranded in PG. Mirrors the pattern already used in
|
||||
# ``clean_dataset_task`` so the document row's hard delete (already
|
||||
# committed by the caller) does not produce orphan PG rows just because
|
||||
# the vector backend or one of its transitive dependencies was unhappy.
|
||||
try:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
if dataset:
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to clean vector / keyword index in clean_document_task, "
|
||||
"document_id=%s, dataset_id=%s, index_node_ids_count=%d. "
|
||||
"Continuing with PG / storage cleanup; vector orphans can be reaped later.",
|
||||
document_id,
|
||||
dataset_id,
|
||||
len(index_node_ids),
|
||||
)
|
||||
|
||||
total_image_files = []
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
|
||||
@ -40,12 +40,29 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str):
|
||||
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
|
||||
total_index_node_ids.extend([segment.index_node_id for segment in segments])
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
if dataset:
|
||||
index_processor.clean(
|
||||
dataset, total_index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
# Wrap vector / keyword index cleanup in try/except so that a transient
|
||||
# failure here (e.g. billing API hiccup propagated via FeatureService when
|
||||
# ``ModelManager`` is initialized inside ``Vector(dataset)``) does not abort
|
||||
# the task and leave the already-deleted documents' segments stranded in PG.
|
||||
# The Document rows are hard-deleted in the previous session block, so any
|
||||
# exception escaping this task would produce orphans that no later request
|
||||
# can reference back. Mirrors the pattern in ``clean_dataset_task``.
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
if dataset:
|
||||
index_processor.clean(
|
||||
dataset, total_index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to clean vector / keyword index in clean_notion_document_task, "
|
||||
"dataset_id=%s, document_ids=%s, index_node_ids_count=%d. "
|
||||
"Continuing with segment deletion; vector orphans can be reaped later.",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
len(total_index_node_ids),
|
||||
)
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
|
||||
|
||||
@ -11,6 +11,7 @@ from core.plugin.entities.plugin import PluginInstallationSource
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.account import TenantPluginAutoUpgradeStrategy
|
||||
from services.plugin.plugin_service import PluginService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -171,14 +172,13 @@ def process_tenant_plugin_autoupgrade_check_task(
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
_ = manager.upgrade_plugin(
|
||||
# Use the service that downloads and uploads the package to the daemon
|
||||
# first; calling manager.upgrade_plugin directly skips that step and the
|
||||
# daemon fails because the package never reaches its local bucket.
|
||||
_ = PluginService.upgrade_plugin_with_marketplace(
|
||||
tenant_id,
|
||||
original_unique_identifier,
|
||||
new_unique_identifier,
|
||||
PluginInstallationSource.Marketplace,
|
||||
{
|
||||
"plugin_unique_identifier": new_unique_identifier,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
click.echo(click.style(f"Error when upgrading plugin: {e}", fg="red"))
|
||||
|
||||
@ -27,7 +27,7 @@ from core.trigger.entities.entities import TriggerProviderEntity
|
||||
from core.trigger.provider import PluginTriggerProviderController
|
||||
from core.trigger.trigger_manager import TriggerManager
|
||||
from core.workflow.nodes.trigger_plugin.entities import TriggerEventNodeData
|
||||
from enums.quota_type import QuotaType, unlimited
|
||||
from enums.quota_type import QuotaType
|
||||
from graphon.enums import WorkflowExecutionStatus
|
||||
from models.enums import (
|
||||
AppTriggerType,
|
||||
@ -42,6 +42,7 @@ from models.workflow import Workflow, WorkflowAppLog, WorkflowAppLogCreatedFrom,
|
||||
from services.async_workflow_service import AsyncWorkflowService
|
||||
from services.end_user_service import EndUserService
|
||||
from services.errors.app import QuotaExceededError
|
||||
from services.quota_service import QuotaService, unlimited
|
||||
from services.trigger.app_trigger_service import AppTriggerService
|
||||
from services.trigger.trigger_provider_service import TriggerProviderService
|
||||
from services.trigger.trigger_request_service import TriggerHttpRequestCachingService
|
||||
@ -258,59 +259,58 @@ def dispatch_triggered_workflow(
|
||||
tenant_id=subscription.tenant_id, provider_id=TriggerProviderID(subscription.provider_id)
|
||||
)
|
||||
trigger_entity: TriggerProviderEntity = provider_controller.entity
|
||||
|
||||
# Ensure expire_on_commit is set to False to remain workflows available
|
||||
with session_factory.create_session() as session:
|
||||
workflows: Mapping[str, Workflow] = _get_latest_workflows_by_app_ids(session, subscribers)
|
||||
|
||||
end_users: Mapping[str, EndUser] = EndUserService.create_end_user_batch(
|
||||
type=InvokeFrom.TRIGGER,
|
||||
tenant_id=subscription.tenant_id,
|
||||
app_ids=[plugin_trigger.app_id for plugin_trigger in subscribers],
|
||||
user_id=user_id,
|
||||
)
|
||||
for plugin_trigger in subscribers:
|
||||
# Get workflow from mapping
|
||||
workflow: Workflow | None = workflows.get(plugin_trigger.app_id)
|
||||
if not workflow:
|
||||
logger.error(
|
||||
"Workflow not found for app %s",
|
||||
plugin_trigger.app_id,
|
||||
)
|
||||
continue
|
||||
end_users: Mapping[str, EndUser] = EndUserService.create_end_user_batch(
|
||||
type=InvokeFrom.TRIGGER,
|
||||
tenant_id=subscription.tenant_id,
|
||||
app_ids=[plugin_trigger.app_id for plugin_trigger in subscribers],
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
# Find the trigger node in the workflow
|
||||
event_node = None
|
||||
for node_id, node_config in workflow.walk_nodes(TRIGGER_PLUGIN_NODE_TYPE):
|
||||
if node_id == plugin_trigger.node_id:
|
||||
event_node = node_config
|
||||
break
|
||||
|
||||
if not event_node:
|
||||
logger.error("Trigger event node not found for app %s", plugin_trigger.app_id)
|
||||
continue
|
||||
|
||||
# invoke trigger
|
||||
trigger_metadata = PluginTriggerMetadata(
|
||||
plugin_unique_identifier=provider_controller.plugin_unique_identifier or "",
|
||||
endpoint_id=subscription.endpoint_id,
|
||||
provider_id=subscription.provider_id,
|
||||
event_name=event_name,
|
||||
icon_filename=trigger_entity.identity.icon or "",
|
||||
icon_dark_filename=trigger_entity.identity.icon_dark or "",
|
||||
for plugin_trigger in subscribers:
|
||||
workflow: Workflow | None = workflows.get(plugin_trigger.app_id)
|
||||
if not workflow:
|
||||
logger.error(
|
||||
"Workflow not found for app %s",
|
||||
plugin_trigger.app_id,
|
||||
)
|
||||
continue
|
||||
|
||||
# consume quota before invoking trigger
|
||||
quota_charge = unlimited()
|
||||
try:
|
||||
quota_charge = QuotaType.TRIGGER.consume(subscription.tenant_id)
|
||||
except QuotaExceededError:
|
||||
AppTriggerService.mark_tenant_triggers_rate_limited(subscription.tenant_id)
|
||||
logger.info(
|
||||
"Tenant %s rate limited, skipping plugin trigger %s", subscription.tenant_id, plugin_trigger.id
|
||||
)
|
||||
return 0
|
||||
event_node = None
|
||||
for node_id, node_config in workflow.walk_nodes(TRIGGER_PLUGIN_NODE_TYPE):
|
||||
if node_id == plugin_trigger.node_id:
|
||||
event_node = node_config
|
||||
break
|
||||
|
||||
node_data: TriggerEventNodeData = TriggerEventNodeData.model_validate(event_node)
|
||||
invoke_response: TriggerInvokeEventResponse | None = None
|
||||
if not event_node:
|
||||
logger.error("Trigger event node not found for app %s", plugin_trigger.app_id)
|
||||
continue
|
||||
|
||||
trigger_metadata = PluginTriggerMetadata(
|
||||
plugin_unique_identifier=provider_controller.plugin_unique_identifier or "",
|
||||
endpoint_id=subscription.endpoint_id,
|
||||
provider_id=subscription.provider_id,
|
||||
event_name=event_name,
|
||||
icon_filename=trigger_entity.identity.icon or "",
|
||||
icon_dark_filename=trigger_entity.identity.icon_dark or "",
|
||||
)
|
||||
|
||||
quota_charge = unlimited()
|
||||
try:
|
||||
quota_charge = QuotaService.reserve(QuotaType.TRIGGER, subscription.tenant_id)
|
||||
except QuotaExceededError:
|
||||
AppTriggerService.mark_tenant_triggers_rate_limited(subscription.tenant_id)
|
||||
logger.info("Tenant %s rate limited, skipping plugin trigger %s", subscription.tenant_id, plugin_trigger.id)
|
||||
return dispatched_count
|
||||
|
||||
node_data: TriggerEventNodeData = TriggerEventNodeData.model_validate(event_node)
|
||||
invoke_response: TriggerInvokeEventResponse | None = None
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
invoke_response = TriggerManager.invoke_trigger_event(
|
||||
tenant_id=subscription.tenant_id,
|
||||
@ -387,6 +387,7 @@ def dispatch_triggered_workflow(
|
||||
raise ValueError(f"End user not found for app {plugin_trigger.app_id}")
|
||||
|
||||
AsyncWorkflowService.trigger_workflow_async(session=session, user=end_user, trigger_data=trigger_data)
|
||||
quota_charge.commit()
|
||||
dispatched_count += 1
|
||||
logger.info(
|
||||
"Triggered workflow for app %s with trigger event %s",
|
||||
@ -401,7 +402,7 @@ def dispatch_triggered_workflow(
|
||||
plugin_trigger.app_id,
|
||||
)
|
||||
|
||||
return dispatched_count
|
||||
return dispatched_count
|
||||
|
||||
|
||||
def dispatch_triggered_workflows(
|
||||
|
||||
@ -8,10 +8,11 @@ from core.workflow.nodes.trigger_schedule.exc import (
|
||||
ScheduleNotFoundError,
|
||||
TenantOwnerNotFoundError,
|
||||
)
|
||||
from enums.quota_type import QuotaType, unlimited
|
||||
from enums.quota_type import QuotaType
|
||||
from models.trigger import WorkflowSchedulePlan
|
||||
from services.async_workflow_service import AsyncWorkflowService
|
||||
from services.errors.app import QuotaExceededError
|
||||
from services.quota_service import QuotaService, unlimited
|
||||
from services.trigger.app_trigger_service import AppTriggerService
|
||||
from services.trigger.schedule_service import ScheduleService
|
||||
from services.workflow.entities import ScheduleTriggerData
|
||||
@ -32,6 +33,7 @@ def run_schedule_trigger(schedule_id: str) -> None:
|
||||
TenantOwnerNotFoundError: If no owner/admin for tenant
|
||||
ScheduleExecutionError: If workflow trigger fails
|
||||
"""
|
||||
# Ensure expire_on_commit is set to False to remain schedule/tenant_owner available
|
||||
with session_factory.create_session() as session:
|
||||
schedule = session.get(WorkflowSchedulePlan, schedule_id)
|
||||
if not schedule:
|
||||
@ -41,16 +43,16 @@ def run_schedule_trigger(schedule_id: str) -> None:
|
||||
if not tenant_owner:
|
||||
raise TenantOwnerNotFoundError(f"No owner or admin found for tenant {schedule.tenant_id}")
|
||||
|
||||
quota_charge = unlimited()
|
||||
try:
|
||||
quota_charge = QuotaType.TRIGGER.consume(schedule.tenant_id)
|
||||
except QuotaExceededError:
|
||||
AppTriggerService.mark_tenant_triggers_rate_limited(schedule.tenant_id)
|
||||
logger.info("Tenant %s rate limited, skipping schedule trigger %s", schedule.tenant_id, schedule_id)
|
||||
return
|
||||
quota_charge = unlimited()
|
||||
try:
|
||||
quota_charge = QuotaService.reserve(QuotaType.TRIGGER, schedule.tenant_id)
|
||||
except QuotaExceededError:
|
||||
AppTriggerService.mark_tenant_triggers_rate_limited(schedule.tenant_id)
|
||||
logger.info("Tenant %s rate limited, skipping schedule trigger %s", schedule.tenant_id, schedule_id)
|
||||
return
|
||||
|
||||
try:
|
||||
# Production dispatch: Trigger the workflow normally
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
response = AsyncWorkflowService.trigger_workflow_async(
|
||||
session=session,
|
||||
user=tenant_owner,
|
||||
@ -61,9 +63,10 @@ def run_schedule_trigger(schedule_id: str) -> None:
|
||||
tenant_id=schedule.tenant_id,
|
||||
),
|
||||
)
|
||||
logger.info("Schedule %s triggered workflow: %s", schedule_id, response.workflow_trigger_log_id)
|
||||
except Exception as e:
|
||||
quota_charge.refund()
|
||||
raise ScheduleExecutionError(
|
||||
f"Failed to trigger workflow for schedule {schedule_id}, app {schedule.app_id}"
|
||||
) from e
|
||||
quota_charge.commit()
|
||||
logger.info("Schedule %s triggered workflow: %s", schedule_id, response.workflow_trigger_log_id)
|
||||
except Exception as e:
|
||||
quota_charge.refund()
|
||||
raise ScheduleExecutionError(
|
||||
f"Failed to trigger workflow for schedule {schedule_id}, app {schedule.app_id}"
|
||||
) from e
|
||||
|
||||
@ -36,12 +36,19 @@ class TestAppGenerateService:
|
||||
) as mock_message_based_generator,
|
||||
patch("services.account_service.FeatureService", autospec=True) as mock_account_feature_service,
|
||||
patch("services.app_generate_service.dify_config", autospec=True) as mock_dify_config,
|
||||
patch("services.quota_service.dify_config", autospec=True) as mock_quota_dify_config,
|
||||
patch("configs.dify_config", autospec=True) as mock_global_dify_config,
|
||||
):
|
||||
# Setup default mock returns for billing service
|
||||
mock_billing_service.update_tenant_feature_plan_usage.return_value = {
|
||||
"result": "success",
|
||||
"history_id": "test_history_id",
|
||||
mock_billing_service.quota_reserve.return_value = {
|
||||
"reservation_id": "test-reservation-id",
|
||||
"available": 100,
|
||||
"reserved": 1,
|
||||
}
|
||||
mock_billing_service.quota_commit.return_value = {
|
||||
"available": 99,
|
||||
"reserved": 0,
|
||||
"refunded": 0,
|
||||
}
|
||||
|
||||
# Setup default mock returns for workflow service
|
||||
@ -101,6 +108,8 @@ class TestAppGenerateService:
|
||||
mock_dify_config.APP_DEFAULT_ACTIVE_REQUESTS = 100
|
||||
mock_dify_config.APP_DAILY_RATE_LIMIT = 1000
|
||||
|
||||
mock_quota_dify_config.BILLING_ENABLED = False
|
||||
|
||||
mock_global_dify_config.BILLING_ENABLED = False
|
||||
mock_global_dify_config.APP_MAX_ACTIVE_REQUESTS = 100
|
||||
mock_global_dify_config.APP_DAILY_RATE_LIMIT = 1000
|
||||
@ -118,6 +127,7 @@ class TestAppGenerateService:
|
||||
"message_based_generator": mock_message_based_generator,
|
||||
"account_feature_service": mock_account_feature_service,
|
||||
"dify_config": mock_dify_config,
|
||||
"quota_dify_config": mock_quota_dify_config,
|
||||
"global_dify_config": mock_global_dify_config,
|
||||
}
|
||||
|
||||
@ -465,6 +475,7 @@ class TestAppGenerateService:
|
||||
|
||||
# Set BILLING_ENABLED to True for this test
|
||||
mock_external_service_dependencies["dify_config"].BILLING_ENABLED = True
|
||||
mock_external_service_dependencies["quota_dify_config"].BILLING_ENABLED = True
|
||||
mock_external_service_dependencies["global_dify_config"].BILLING_ENABLED = True
|
||||
|
||||
# Setup test arguments
|
||||
@ -478,8 +489,10 @@ class TestAppGenerateService:
|
||||
# Verify the result
|
||||
assert result == ["test_response"]
|
||||
|
||||
# Verify billing service was called to consume quota
|
||||
mock_external_service_dependencies["billing_service"].update_tenant_feature_plan_usage.assert_called_once()
|
||||
# Verify billing two-phase quota (reserve + commit)
|
||||
billing = mock_external_service_dependencies["billing_service"]
|
||||
billing.quota_reserve.assert_called_once()
|
||||
billing.quota_commit.assert_called_once()
|
||||
|
||||
def test_generate_with_invalid_app_mode(
|
||||
self, db_session_with_containers: Session, mock_external_service_dependencies
|
||||
|
||||
@ -10,6 +10,7 @@ from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.trigger.constants import TRIGGER_WEBHOOK_NODE_TYPE
|
||||
from enums.quota_type import QuotaType
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.enums import AppTriggerStatus, AppTriggerType
|
||||
from models.model import App
|
||||
@ -290,17 +291,26 @@ class TestWebhookServiceTriggerExecutionWithContainers:
|
||||
end_user = SimpleNamespace(id=str(uuid4()))
|
||||
webhook_data = {"body": {"value": 1}, "headers": {}, "query_params": {}, "files": {}, "method": "POST"}
|
||||
|
||||
quota_charge = MagicMock()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"services.trigger.webhook_service.EndUserService.get_or_create_end_user_by_type",
|
||||
return_value=end_user,
|
||||
),
|
||||
patch("services.trigger.webhook_service.QuotaType.TRIGGER.consume") as mock_consume,
|
||||
patch(
|
||||
"services.trigger.webhook_service.QuotaService.reserve",
|
||||
return_value=quota_charge,
|
||||
) as mock_reserve,
|
||||
patch("services.trigger.webhook_service.AsyncWorkflowService.trigger_workflow_async") as mock_trigger,
|
||||
):
|
||||
WebhookService.trigger_workflow_execution(webhook_trigger, webhook_data, workflow)
|
||||
|
||||
mock_consume.assert_called_once_with(webhook_trigger.tenant_id)
|
||||
mock_reserve.assert_called_once()
|
||||
reserve_args = mock_reserve.call_args.args
|
||||
assert reserve_args[0] == QuotaType.TRIGGER
|
||||
assert reserve_args[1] == webhook_trigger.tenant_id
|
||||
quota_charge.commit.assert_called_once()
|
||||
mock_trigger.assert_called_once()
|
||||
trigger_args = mock_trigger.call_args.args
|
||||
assert trigger_args[1] is end_user
|
||||
@ -327,7 +337,7 @@ class TestWebhookServiceTriggerExecutionWithContainers:
|
||||
return_value=SimpleNamespace(id=str(uuid4())),
|
||||
),
|
||||
patch(
|
||||
"services.trigger.webhook_service.QuotaType.TRIGGER.consume",
|
||||
"services.trigger.webhook_service.QuotaService.reserve",
|
||||
side_effect=QuotaExceededError(feature="trigger", tenant_id=tenant.id, required=1),
|
||||
),
|
||||
patch(
|
||||
|
||||
@ -602,14 +602,25 @@ class TestCleanNotionDocumentTask:
|
||||
# Note: This test successfully verifies database operations.
|
||||
# IndexProcessor verification would require more sophisticated mocking.
|
||||
|
||||
def test_clean_notion_document_task_database_transaction_rollback(
|
||||
def test_clean_notion_document_task_continues_when_index_processor_fails(
|
||||
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test cleanup task behavior when database operations fail.
|
||||
Index processor failure (e.g. transient billing API error propagated via
|
||||
``FeatureService`` when ``Vector(dataset)`` lazily resolves the embedding
|
||||
model) must NOT abort the cleanup task. The Document rows have already
|
||||
been hard-deleted in the first session block before vector cleanup runs,
|
||||
so any uncaught exception escaping the task would strand
|
||||
``DocumentSegment`` rows in PG with no parent ``Document``.
|
||||
|
||||
This test verifies that the task properly handles database errors
|
||||
and maintains data consistency.
|
||||
Contract: the task swallows the index_processor exception, logs it, and
|
||||
proceeds to delete the segments — leaving PG consistent. (Vector orphans,
|
||||
if any, can be reaped later by an offline scanner.)
|
||||
|
||||
Regression guard for the production incident where ``clean_document_task``
|
||||
/ ``clean_notion_document_task`` failed with
|
||||
``ValueError("Unable to retrieve billing information...")`` and left
|
||||
tens of thousands of orphan segments per affected tenant.
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
@ -672,17 +683,28 @@ class TestCleanNotionDocumentTask:
|
||||
db_session_with_containers.add(segment)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Mock index processor to raise an exception
|
||||
# Simulate the production failure mode: index_processor.clean() raises a
|
||||
# ValueError mirroring ``BillingService._send_request`` returning non-200.
|
||||
mock_index_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
|
||||
mock_index_processor.clean.side_effect = Exception("Index processor error")
|
||||
mock_index_processor.clean.side_effect = ValueError(
|
||||
"Unable to retrieve billing information. Please try again later or contact support."
|
||||
)
|
||||
|
||||
# Execute cleanup task - current implementation propagates the exception
|
||||
with pytest.raises(Exception, match="Index processor error"):
|
||||
clean_notion_document_task([document.id], dataset.id)
|
||||
# Execute cleanup task — must NOT raise even though clean() raises.
|
||||
# Before the safety-net wrapper this would have re-raised the ValueError,
|
||||
# aborting the task and leaving DocumentSegment stranded in PG.
|
||||
clean_notion_document_task([document.id], dataset.id)
|
||||
|
||||
# Note: This test demonstrates the task's error handling capability.
|
||||
# Even with external service errors, the database operations complete successfully.
|
||||
# In a production environment, proper error handling would determine transaction rollback behavior.
|
||||
# Vector cleanup was attempted exactly once.
|
||||
mock_index_processor.clean.assert_called_once()
|
||||
|
||||
# The crucial assertion: despite the index processor failure, the
|
||||
# final session block (line 51-52, ``DELETE FROM document_segments``)
|
||||
# still ran and committed. This is what the wrapper buys us — without
|
||||
# it the production incident left tens of thousands of orphan segments
|
||||
# per affected tenant. Aligns with the assertion shape used by the
|
||||
# happy-path test (``test_clean_notion_document_task_success``).
|
||||
assert _count_segments(db_session_with_containers, DocumentSegment.document_id == document.id) == 0
|
||||
|
||||
def test_clean_notion_document_task_with_large_number_of_documents(
|
||||
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
|
||||
|
||||
@ -605,9 +605,9 @@ def test_schedule_trigger_creates_trigger_log(
|
||||
)
|
||||
|
||||
# Mock quota to avoid rate limiting
|
||||
from enums import quota_type
|
||||
from services import quota_service
|
||||
|
||||
monkeypatch.setattr(quota_type.QuotaType.TRIGGER, "consume", lambda _tenant_id: quota_type.unlimited())
|
||||
monkeypatch.setattr(quota_service.QuotaService, "reserve", lambda *_args, **_kwargs: quota_service.unlimited())
|
||||
|
||||
# Execute schedule trigger
|
||||
workflow_schedule_tasks.run_schedule_trigger(plan.id)
|
||||
|
||||
@ -134,6 +134,42 @@ class TestPerformHitTesting:
|
||||
assert result["query"] == "hello"
|
||||
assert result["records"] == []
|
||||
|
||||
def test_success_normalizes_legacy_query_and_nullable_list_fields(self, dataset):
|
||||
response = {
|
||||
"query": {"content": "hello"},
|
||||
"records": [
|
||||
{
|
||||
"segment": {"id": "segment-1", "keywords": None},
|
||||
"child_chunks": None,
|
||||
"files": None,
|
||||
"score": 0.8,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
HitTestingService,
|
||||
"retrieve",
|
||||
return_value=response,
|
||||
),
|
||||
patch(
|
||||
"controllers.console.datasets.hit_testing_base.marshal",
|
||||
return_value=response["records"],
|
||||
),
|
||||
):
|
||||
result = DatasetsHitTestingBase.perform_hit_testing(dataset, {"query": "hello"})
|
||||
|
||||
assert result["query"] == "hello"
|
||||
assert result["records"] == [
|
||||
{
|
||||
"segment": {"id": "segment-1", "keywords": []},
|
||||
"child_chunks": [],
|
||||
"files": [],
|
||||
"score": 0.8,
|
||||
}
|
||||
]
|
||||
|
||||
def test_index_not_initialized(self, dataset):
|
||||
with patch.object(
|
||||
HitTestingService,
|
||||
|
||||
@ -23,6 +23,7 @@ from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
from controllers.service_api.dataset.document import (
|
||||
DeprecatedDocumentAddByTextApi,
|
||||
DeprecatedDocumentUpdateByFileApi,
|
||||
DeprecatedDocumentUpdateByTextApi,
|
||||
DocumentAddByFileApi,
|
||||
DocumentAddByTextApi,
|
||||
@ -32,7 +33,6 @@ from controllers.service_api.dataset.document import (
|
||||
DocumentListQuery,
|
||||
DocumentTextCreatePayload,
|
||||
DocumentTextUpdate,
|
||||
DocumentUpdateByFileApi,
|
||||
DocumentUpdateByTextApi,
|
||||
InvalidMetadataError,
|
||||
)
|
||||
@ -1095,8 +1095,8 @@ class TestArchivedDocumentImmutableError:
|
||||
assert error.code == 403
|
||||
|
||||
|
||||
class TestDocumentTextRouteDeprecation:
|
||||
"""Test that legacy underscore text routes stay marked deprecated."""
|
||||
class TestDocumentRouteDeprecation:
|
||||
"""Test that legacy document routes stay marked deprecated."""
|
||||
|
||||
def test_create_by_text_legacy_alias_is_deprecated(self):
|
||||
"""Ensure only the legacy create-by-text alias is marked deprecated."""
|
||||
@ -1108,10 +1108,15 @@ class TestDocumentTextRouteDeprecation:
|
||||
assert DeprecatedDocumentUpdateByTextApi.post.__apidoc__["deprecated"] is True
|
||||
assert DocumentUpdateByTextApi.post.__apidoc__.get("deprecated") is not True
|
||||
|
||||
def test_update_by_file_legacy_aliases_are_deprecated(self):
|
||||
"""Ensure only the legacy file-update aliases are marked deprecated."""
|
||||
assert DeprecatedDocumentUpdateByFileApi.post.__apidoc__["deprecated"] is True
|
||||
assert DocumentApi.patch.__apidoc__.get("deprecated") is not True
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Endpoint tests for DocumentUpdateByTextApi, DocumentAddByFileApi,
|
||||
# DocumentUpdateByFileApi.
|
||||
# and the canonical/deprecated document file update routes.
|
||||
#
|
||||
# These controllers use ``@cloud_edition_billing_resource_check`` (does NOT
|
||||
# preserve ``__wrapped__``) and ``@cloud_edition_billing_rate_limit_check``
|
||||
@ -1359,13 +1364,52 @@ class TestDocumentAddByFileApiPost:
|
||||
api.post(tenant_id=mock_tenant.id, dataset_id=mock_dataset.id)
|
||||
|
||||
|
||||
class TestDocumentUpdateByFileApiPost:
|
||||
"""Test suite for DocumentUpdateByFileApi.post() endpoint.
|
||||
class TestDocumentUpdateByFileApiPatch:
|
||||
"""Test suite for the canonical document file update endpoint.
|
||||
|
||||
``post`` is wrapped by ``@cloud_edition_billing_resource_check`` and
|
||||
``patch`` is wrapped by ``@cloud_edition_billing_resource_check`` and
|
||||
``@cloud_edition_billing_rate_limit_check``.
|
||||
"""
|
||||
|
||||
@pytest.mark.parametrize("route_name", ["update_by_file", "update-by-file"])
|
||||
@patch("controllers.service_api.dataset.document._update_document_by_file")
|
||||
@patch("controllers.service_api.wraps.FeatureService")
|
||||
@patch("controllers.service_api.wraps.validate_and_get_api_token")
|
||||
def test_update_by_file_deprecated_aliases_delegate_to_shared_handler(
|
||||
self,
|
||||
mock_validate_token,
|
||||
mock_feature_svc,
|
||||
mock_update_document_by_file,
|
||||
route_name,
|
||||
app,
|
||||
mock_tenant,
|
||||
mock_dataset,
|
||||
):
|
||||
"""Test legacy POST aliases still dispatch while marked deprecated."""
|
||||
_setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
|
||||
mock_update_document_by_file.return_value = ({"document": {"id": "doc-1"}, "batch": "batch-1"}, 200)
|
||||
|
||||
doc_id = str(uuid.uuid4())
|
||||
with app.test_request_context(
|
||||
f"/datasets/{mock_dataset.id}/documents/{doc_id}/{route_name}",
|
||||
method="POST",
|
||||
headers={"Authorization": "Bearer test_token"},
|
||||
):
|
||||
api = DeprecatedDocumentUpdateByFileApi()
|
||||
response, status = api.post(
|
||||
tenant_id=mock_tenant.id,
|
||||
dataset_id=mock_dataset.id,
|
||||
document_id=doc_id,
|
||||
)
|
||||
|
||||
assert status == 200
|
||||
assert response["batch"] == "batch-1"
|
||||
mock_update_document_by_file.assert_called_once_with(
|
||||
tenant_id=mock_tenant.id,
|
||||
dataset_id=mock_dataset.id,
|
||||
document_id=doc_id,
|
||||
)
|
||||
|
||||
@patch("controllers.service_api.dataset.document.db")
|
||||
@patch("controllers.service_api.wraps.FeatureService")
|
||||
@patch("controllers.service_api.wraps.validate_and_get_api_token")
|
||||
@ -1387,15 +1431,15 @@ class TestDocumentUpdateByFileApiPost:
|
||||
doc_id = str(uuid.uuid4())
|
||||
data = {"file": (BytesIO(b"content"), "test.pdf", "application/pdf")}
|
||||
with app.test_request_context(
|
||||
f"/datasets/{mock_dataset.id}/documents/{doc_id}/update_by_file",
|
||||
method="POST",
|
||||
f"/datasets/{mock_dataset.id}/documents/{doc_id}",
|
||||
method="PATCH",
|
||||
content_type="multipart/form-data",
|
||||
data=data,
|
||||
headers={"Authorization": "Bearer test_token"},
|
||||
):
|
||||
api = DocumentUpdateByFileApi()
|
||||
api = DocumentApi()
|
||||
with pytest.raises(ValueError, match="Dataset does not exist"):
|
||||
api.post(
|
||||
api.patch(
|
||||
tenant_id=mock_tenant.id,
|
||||
dataset_id=mock_dataset.id,
|
||||
document_id=doc_id,
|
||||
@ -1423,15 +1467,15 @@ class TestDocumentUpdateByFileApiPost:
|
||||
doc_id = str(uuid.uuid4())
|
||||
data = {"file": (BytesIO(b"content"), "test.pdf", "application/pdf")}
|
||||
with app.test_request_context(
|
||||
f"/datasets/{mock_dataset.id}/documents/{doc_id}/update_by_file",
|
||||
method="POST",
|
||||
f"/datasets/{mock_dataset.id}/documents/{doc_id}",
|
||||
method="PATCH",
|
||||
content_type="multipart/form-data",
|
||||
data=data,
|
||||
headers={"Authorization": "Bearer test_token"},
|
||||
):
|
||||
api = DocumentUpdateByFileApi()
|
||||
api = DocumentApi()
|
||||
with pytest.raises(ValueError, match="External datasets"):
|
||||
api.post(
|
||||
api.patch(
|
||||
tenant_id=mock_tenant.id,
|
||||
dataset_id=mock_dataset.id,
|
||||
document_id=doc_id,
|
||||
@ -1482,14 +1526,14 @@ class TestDocumentUpdateByFileApiPost:
|
||||
doc_id = str(uuid.uuid4())
|
||||
data = {"file": (BytesIO(b"file content"), "test.pdf", "application/pdf")}
|
||||
with app.test_request_context(
|
||||
f"/datasets/{mock_dataset.id}/documents/{doc_id}/update_by_file",
|
||||
method="POST",
|
||||
f"/datasets/{mock_dataset.id}/documents/{doc_id}",
|
||||
method="PATCH",
|
||||
content_type="multipart/form-data",
|
||||
data=data,
|
||||
headers={"Authorization": "Bearer test_token"},
|
||||
):
|
||||
api = DocumentUpdateByFileApi()
|
||||
response, status = api.post(
|
||||
api = DocumentApi()
|
||||
response, status = api.patch(
|
||||
tenant_id=mock_tenant.id,
|
||||
dataset_id=mock_dataset.id,
|
||||
document_id=doc_id,
|
||||
|
||||
@ -171,6 +171,57 @@ class TestHitTestingApiPost:
|
||||
assert passed_retrieval_model["search_method"] == "semantic_search"
|
||||
assert passed_retrieval_model["top_k"] == 10
|
||||
|
||||
@patch("controllers.service_api.dataset.hit_testing.service_api_ns")
|
||||
@patch("controllers.console.datasets.hit_testing_base.marshal")
|
||||
@patch("controllers.console.datasets.hit_testing_base.HitTestingService")
|
||||
@patch("controllers.console.datasets.hit_testing_base.DatasetService")
|
||||
@patch("controllers.console.datasets.hit_testing_base.current_user", new_callable=lambda: Mock(spec=Account))
|
||||
def test_post_normalizes_legacy_query_and_nullable_list_fields(
|
||||
self,
|
||||
mock_current_user,
|
||||
mock_dataset_svc,
|
||||
mock_hit_svc,
|
||||
mock_marshal,
|
||||
mock_ns,
|
||||
app,
|
||||
):
|
||||
"""Test service API normalizes legacy query shape and nullable list fields."""
|
||||
dataset_id = str(uuid.uuid4())
|
||||
tenant_id = str(uuid.uuid4())
|
||||
|
||||
mock_dataset = Mock()
|
||||
mock_dataset.id = dataset_id
|
||||
|
||||
mock_dataset_svc.get_dataset.return_value = mock_dataset
|
||||
mock_dataset_svc.check_dataset_permission.return_value = None
|
||||
|
||||
mock_hit_svc.retrieve.return_value = {"query": {"content": "legacy query"}, "records": ["placeholder"]}
|
||||
mock_hit_svc.hit_testing_args_check.return_value = None
|
||||
mock_marshal.return_value = [
|
||||
{
|
||||
"segment": {"id": "segment-1", "keywords": None},
|
||||
"child_chunks": None,
|
||||
"files": None,
|
||||
"score": 0.9,
|
||||
}
|
||||
]
|
||||
|
||||
mock_ns.payload = {"query": "legacy query"}
|
||||
|
||||
with app.test_request_context():
|
||||
api = HitTestingApi()
|
||||
response = HitTestingApi.post.__wrapped__(api, tenant_id, dataset_id)
|
||||
|
||||
assert response["query"] == "legacy query"
|
||||
assert response["records"] == [
|
||||
{
|
||||
"segment": {"id": "segment-1", "keywords": []},
|
||||
"child_chunks": [],
|
||||
"files": [],
|
||||
"score": 0.9,
|
||||
}
|
||||
]
|
||||
|
||||
@patch("controllers.service_api.dataset.hit_testing.service_api_ns")
|
||||
@patch("controllers.console.datasets.hit_testing_base.DatasetService")
|
||||
@patch("controllers.console.datasets.hit_testing_base.current_user", new_callable=lambda: Mock(spec=Account))
|
||||
|
||||
@ -146,10 +146,7 @@ def test_get_vector_factory_entry_point_overrides_builtin(vector_factory_module,
|
||||
def test_vector_init_uses_default_and_custom_attributes(vector_factory_module):
|
||||
dataset = SimpleNamespace(id="dataset-1")
|
||||
|
||||
with (
|
||||
patch.object(vector_factory_module.Vector, "_get_embeddings", return_value="embeddings"),
|
||||
patch.object(vector_factory_module.Vector, "_init_vector", return_value="processor"),
|
||||
):
|
||||
with patch.object(vector_factory_module.Vector, "_init_vector", return_value="processor"):
|
||||
default_vector = vector_factory_module.Vector(dataset)
|
||||
custom_vector = vector_factory_module.Vector(dataset, attributes=["doc_id"])
|
||||
|
||||
@ -166,10 +163,57 @@ def test_vector_init_uses_default_and_custom_attributes(vector_factory_module):
|
||||
"original_chunk_id",
|
||||
]
|
||||
assert custom_vector._attributes == ["doc_id"]
|
||||
assert default_vector._embeddings == "embeddings"
|
||||
# ``_embeddings`` is now a lazy proxy that defers materializing the real
|
||||
# embedding model until ``embed_*`` is invoked, so cleanup paths never
|
||||
# trigger billing/feature-service calls during ``Vector(dataset)``
|
||||
# construction. See ``_LazyEmbeddings``.
|
||||
assert isinstance(default_vector._embeddings, vector_factory_module._LazyEmbeddings)
|
||||
assert default_vector._vector_processor == "processor"
|
||||
|
||||
|
||||
def test_lazy_embeddings_defer_real_load_until_first_embed_call(vector_factory_module, monkeypatch):
|
||||
"""``Vector(dataset)`` must not transitively call ``ModelManager`` during
|
||||
construction. The real embedding model should only be materialized on the
|
||||
first ``embed_*`` call (i.e. create / search paths) so cleanup paths
|
||||
(``delete_by_ids`` / ``delete``) remain resilient to billing-API failures.
|
||||
"""
|
||||
for_tenant_mock = MagicMock(side_effect=AssertionError("ModelManager.for_tenant must not be called eagerly"))
|
||||
monkeypatch.setattr(vector_factory_module.ModelManager, "for_tenant", for_tenant_mock)
|
||||
|
||||
dataset = SimpleNamespace(
|
||||
tenant_id="tenant-1",
|
||||
embedding_model_provider="openai",
|
||||
embedding_model="text-embedding-3-small",
|
||||
)
|
||||
|
||||
proxy = vector_factory_module._LazyEmbeddings(dataset)
|
||||
|
||||
# Construction alone does not trigger ModelManager / FeatureService / BillingService.
|
||||
for_tenant_mock.assert_not_called()
|
||||
|
||||
# Exercising an embed_* method materializes the real model exactly once.
|
||||
inner_model = MagicMock()
|
||||
inner_model.embed_documents.return_value = [[0.1, 0.2]]
|
||||
cached_embedding_mock = MagicMock(return_value=inner_model)
|
||||
real_for_tenant = MagicMock()
|
||||
real_for_tenant.get_model_instance.return_value = "embedding-model-instance"
|
||||
monkeypatch.setattr(vector_factory_module.ModelManager, "for_tenant", MagicMock(return_value=real_for_tenant))
|
||||
monkeypatch.setattr(vector_factory_module, "CacheEmbedding", cached_embedding_mock)
|
||||
|
||||
result = proxy.embed_documents(["hello"])
|
||||
|
||||
assert result == [[0.1, 0.2]]
|
||||
cached_embedding_mock.assert_called_once_with("embedding-model-instance")
|
||||
inner_model.embed_documents.assert_called_once_with(["hello"])
|
||||
|
||||
# Subsequent calls reuse the materialized model (no re-resolution).
|
||||
inner_model.embed_documents.reset_mock()
|
||||
cached_embedding_mock.reset_mock()
|
||||
proxy.embed_documents(["world"])
|
||||
cached_embedding_mock.assert_not_called()
|
||||
inner_model.embed_documents.assert_called_once_with(["world"])
|
||||
|
||||
|
||||
def test_init_vector_prefers_dataset_index_struct(vector_factory_module, monkeypatch):
|
||||
calls = {"vector_type": None, "init_args": None}
|
||||
|
||||
|
||||
0
api/tests/unit_tests/enums/__init__.py
Normal file
0
api/tests/unit_tests/enums/__init__.py
Normal file
349
api/tests/unit_tests/enums/test_quota_type.py
Normal file
349
api/tests/unit_tests/enums/test_quota_type.py
Normal file
@ -0,0 +1,349 @@
|
||||
"""Unit tests for QuotaType, QuotaService, and QuotaCharge."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from enums.quota_type import QuotaType
|
||||
from services.quota_service import QuotaCharge, QuotaService, unlimited
|
||||
|
||||
|
||||
class TestQuotaType:
|
||||
def test_billing_key_trigger(self):
|
||||
assert QuotaType.TRIGGER.billing_key == "trigger_event"
|
||||
|
||||
def test_billing_key_workflow(self):
|
||||
assert QuotaType.WORKFLOW.billing_key == "api_rate_limit"
|
||||
|
||||
def test_billing_key_unlimited_raises(self):
|
||||
with pytest.raises(ValueError, match="Invalid quota type"):
|
||||
_ = QuotaType.UNLIMITED.billing_key
|
||||
|
||||
|
||||
class TestQuotaService:
|
||||
def test_reserve_billing_disabled(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService"),
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = False
|
||||
charge = QuotaService.reserve(QuotaType.TRIGGER, "t1")
|
||||
assert charge.success is True
|
||||
assert charge.charge_id is None
|
||||
|
||||
def test_reserve_zero_amount_raises(self):
|
||||
with patch("services.quota_service.dify_config") as mock_cfg:
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
with pytest.raises(ValueError, match="greater than 0"):
|
||||
QuotaService.reserve(QuotaType.TRIGGER, "t1", amount=0)
|
||||
|
||||
def test_reserve_success(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
mock_bs.quota_reserve.return_value = {"reservation_id": "rid-1", "available": 99}
|
||||
|
||||
charge = QuotaService.reserve(QuotaType.TRIGGER, "t1", amount=1)
|
||||
|
||||
assert charge.success is True
|
||||
assert charge.charge_id == "rid-1"
|
||||
assert charge._tenant_id == "t1"
|
||||
assert charge._feature_key == "trigger_event"
|
||||
assert charge._amount == 1
|
||||
mock_bs.quota_reserve.assert_called_once()
|
||||
|
||||
def test_reserve_no_reservation_id_raises(self):
|
||||
from services.errors.app import QuotaExceededError
|
||||
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
mock_bs.quota_reserve.return_value = {}
|
||||
|
||||
with pytest.raises(QuotaExceededError):
|
||||
QuotaService.reserve(QuotaType.TRIGGER, "t1")
|
||||
|
||||
def test_reserve_quota_exceeded_propagates(self):
|
||||
from services.errors.app import QuotaExceededError
|
||||
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
mock_bs.quota_reserve.side_effect = QuotaExceededError(feature="trigger", tenant_id="t1", required=1)
|
||||
|
||||
with pytest.raises(QuotaExceededError):
|
||||
QuotaService.reserve(QuotaType.TRIGGER, "t1")
|
||||
|
||||
def test_reserve_api_exception_returns_unlimited(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
mock_bs.quota_reserve.side_effect = RuntimeError("network")
|
||||
|
||||
charge = QuotaService.reserve(QuotaType.TRIGGER, "t1")
|
||||
assert charge.success is True
|
||||
assert charge.charge_id is None
|
||||
|
||||
def test_consume_calls_reserve_and_commit(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
mock_bs.quota_reserve.return_value = {"reservation_id": "rid-c"}
|
||||
mock_bs.quota_commit.return_value = {}
|
||||
|
||||
charge = QuotaService.consume(QuotaType.TRIGGER, "t1")
|
||||
assert charge.success is True
|
||||
mock_bs.quota_commit.assert_called_once()
|
||||
|
||||
def test_check_billing_disabled(self):
|
||||
with patch("services.quota_service.dify_config") as mock_cfg:
|
||||
mock_cfg.BILLING_ENABLED = False
|
||||
assert QuotaService.check(QuotaType.TRIGGER, "t1") is True
|
||||
|
||||
def test_check_zero_amount_raises(self):
|
||||
with patch("services.quota_service.dify_config") as mock_cfg:
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
with pytest.raises(ValueError, match="greater than 0"):
|
||||
QuotaService.check(QuotaType.TRIGGER, "t1", amount=0)
|
||||
|
||||
def test_check_sufficient_quota(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch.object(QuotaService, "get_remaining", return_value=100),
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
assert QuotaService.check(QuotaType.TRIGGER, "t1", amount=50) is True
|
||||
|
||||
def test_check_insufficient_quota(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch.object(QuotaService, "get_remaining", return_value=5),
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
assert QuotaService.check(QuotaType.TRIGGER, "t1", amount=10) is False
|
||||
|
||||
def test_check_unlimited_quota(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch.object(QuotaService, "get_remaining", return_value=-1),
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
assert QuotaService.check(QuotaType.TRIGGER, "t1", amount=999) is True
|
||||
|
||||
def test_check_exception_returns_true(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch.object(QuotaService, "get_remaining", side_effect=RuntimeError),
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
assert QuotaService.check(QuotaType.TRIGGER, "t1") is True
|
||||
|
||||
def test_release_billing_disabled(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = False
|
||||
QuotaService.release(QuotaType.TRIGGER, "rid-1", "t1", "trigger_event")
|
||||
mock_bs.quota_release.assert_not_called()
|
||||
|
||||
def test_release_empty_reservation(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
QuotaService.release(QuotaType.TRIGGER, "", "t1", "trigger_event")
|
||||
mock_bs.quota_release.assert_not_called()
|
||||
|
||||
def test_release_success(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
mock_bs.quota_release.return_value = {}
|
||||
QuotaService.release(QuotaType.TRIGGER, "rid-1", "t1", "trigger_event")
|
||||
mock_bs.quota_release.assert_called_once_with(
|
||||
tenant_id="t1", feature_key="trigger_event", reservation_id="rid-1"
|
||||
)
|
||||
|
||||
def test_release_exception_swallowed(self):
|
||||
with (
|
||||
patch("services.quota_service.dify_config") as mock_cfg,
|
||||
patch("services.billing_service.BillingService") as mock_bs,
|
||||
):
|
||||
mock_cfg.BILLING_ENABLED = True
|
||||
mock_bs.quota_release.side_effect = RuntimeError("fail")
|
||||
QuotaService.release(QuotaType.TRIGGER, "rid-1", "t1", "trigger_event")
|
||||
|
||||
def test_get_remaining_normal(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.get_quota_info.return_value = {"trigger_event": {"limit": 100, "usage": 30}}
|
||||
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 70
|
||||
|
||||
def test_get_remaining_unlimited(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.get_quota_info.return_value = {"trigger_event": {"limit": -1, "usage": 0}}
|
||||
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == -1
|
||||
|
||||
def test_get_remaining_over_limit_returns_zero(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.get_quota_info.return_value = {"trigger_event": {"limit": 10, "usage": 15}}
|
||||
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 0
|
||||
|
||||
def test_get_remaining_exception_returns_neg1(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.get_quota_info.side_effect = RuntimeError
|
||||
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == -1
|
||||
|
||||
def test_get_remaining_empty_response(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.get_quota_info.return_value = {}
|
||||
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 0
|
||||
|
||||
def test_get_remaining_non_dict_response(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.get_quota_info.return_value = "invalid"
|
||||
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 0
|
||||
|
||||
def test_get_remaining_feature_not_in_response(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.get_quota_info.return_value = {"other_feature": {"limit": 100, "usage": 0}}
|
||||
remaining = QuotaService.get_remaining(QuotaType.TRIGGER, "t1")
|
||||
assert remaining == 0
|
||||
|
||||
def test_get_remaining_non_dict_feature_info(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.get_quota_info.return_value = {"trigger_event": "not_a_dict"}
|
||||
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 0
|
||||
|
||||
|
||||
class TestQuotaCharge:
|
||||
def test_commit_success(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.quota_commit.return_value = {}
|
||||
charge = QuotaCharge(
|
||||
success=True,
|
||||
charge_id="rid-1",
|
||||
_quota_type=QuotaType.TRIGGER,
|
||||
_tenant_id="t1",
|
||||
_feature_key="trigger_event",
|
||||
_amount=1,
|
||||
)
|
||||
charge.commit()
|
||||
mock_bs.quota_commit.assert_called_once_with(
|
||||
tenant_id="t1",
|
||||
feature_key="trigger_event",
|
||||
reservation_id="rid-1",
|
||||
actual_amount=1,
|
||||
)
|
||||
assert charge._committed is True
|
||||
|
||||
def test_commit_with_actual_amount(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.quota_commit.return_value = {}
|
||||
charge = QuotaCharge(
|
||||
success=True,
|
||||
charge_id="rid-1",
|
||||
_quota_type=QuotaType.TRIGGER,
|
||||
_tenant_id="t1",
|
||||
_feature_key="trigger_event",
|
||||
_amount=10,
|
||||
)
|
||||
charge.commit(actual_amount=5)
|
||||
call_kwargs = mock_bs.quota_commit.call_args[1]
|
||||
assert call_kwargs["actual_amount"] == 5
|
||||
|
||||
def test_commit_idempotent(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.quota_commit.return_value = {}
|
||||
charge = QuotaCharge(
|
||||
success=True,
|
||||
charge_id="rid-1",
|
||||
_quota_type=QuotaType.TRIGGER,
|
||||
_tenant_id="t1",
|
||||
_feature_key="trigger_event",
|
||||
_amount=1,
|
||||
)
|
||||
charge.commit()
|
||||
charge.commit()
|
||||
assert mock_bs.quota_commit.call_count == 1
|
||||
|
||||
def test_commit_no_charge_id_noop(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
charge = QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.TRIGGER)
|
||||
charge.commit()
|
||||
mock_bs.quota_commit.assert_not_called()
|
||||
|
||||
def test_commit_no_tenant_id_noop(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
charge = QuotaCharge(
|
||||
success=True,
|
||||
charge_id="rid-1",
|
||||
_quota_type=QuotaType.TRIGGER,
|
||||
_tenant_id=None,
|
||||
_feature_key="trigger_event",
|
||||
)
|
||||
charge.commit()
|
||||
mock_bs.quota_commit.assert_not_called()
|
||||
|
||||
def test_commit_exception_swallowed(self):
|
||||
with patch("services.billing_service.BillingService") as mock_bs:
|
||||
mock_bs.quota_commit.side_effect = RuntimeError("fail")
|
||||
charge = QuotaCharge(
|
||||
success=True,
|
||||
charge_id="rid-1",
|
||||
_quota_type=QuotaType.TRIGGER,
|
||||
_tenant_id="t1",
|
||||
_feature_key="trigger_event",
|
||||
_amount=1,
|
||||
)
|
||||
charge.commit()
|
||||
|
||||
def test_refund_success(self):
|
||||
with patch.object(QuotaService, "release") as mock_rel:
|
||||
charge = QuotaCharge(
|
||||
success=True,
|
||||
charge_id="rid-1",
|
||||
_quota_type=QuotaType.TRIGGER,
|
||||
_tenant_id="t1",
|
||||
_feature_key="trigger_event",
|
||||
)
|
||||
charge.refund()
|
||||
mock_rel.assert_called_once_with(QuotaType.TRIGGER, "rid-1", "t1", "trigger_event")
|
||||
|
||||
def test_refund_no_charge_id_noop(self):
|
||||
with patch.object(QuotaService, "release") as mock_rel:
|
||||
charge = QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.TRIGGER)
|
||||
charge.refund()
|
||||
mock_rel.assert_not_called()
|
||||
|
||||
def test_refund_no_tenant_id_noop(self):
|
||||
with patch.object(QuotaService, "release") as mock_rel:
|
||||
charge = QuotaCharge(
|
||||
success=True,
|
||||
charge_id="rid-1",
|
||||
_quota_type=QuotaType.TRIGGER,
|
||||
_tenant_id=None,
|
||||
)
|
||||
charge.refund()
|
||||
mock_rel.assert_not_called()
|
||||
|
||||
|
||||
class TestUnlimited:
|
||||
def test_unlimited_returns_success_with_no_charge_id(self):
|
||||
charge = unlimited()
|
||||
assert charge.success is True
|
||||
assert charge.charge_id is None
|
||||
assert charge._quota_type == QuotaType.UNLIMITED
|
||||
@ -711,6 +711,8 @@ class TestMessageAnnotation:
|
||||
annotation = MessageAnnotation(
|
||||
app_id=app_id,
|
||||
question="What is AI?",
|
||||
conversation_id=None,
|
||||
message_id=None,
|
||||
content="AI stands for Artificial Intelligence.",
|
||||
account_id=account_id,
|
||||
)
|
||||
@ -728,6 +730,8 @@ class TestMessageAnnotation:
|
||||
annotation = MessageAnnotation(
|
||||
app_id=str(uuid4()),
|
||||
question="Test question",
|
||||
conversation_id=None,
|
||||
message_id=None,
|
||||
content="Test content",
|
||||
account_id=str(uuid4()),
|
||||
)
|
||||
@ -1068,6 +1072,8 @@ class TestModelIntegration:
|
||||
app_id=app_id,
|
||||
question="What is AI?",
|
||||
content="AI stands for Artificial Intelligence.",
|
||||
conversation_id=None,
|
||||
message_id=message_id,
|
||||
account_id=account_id,
|
||||
)
|
||||
annotation.id = annotation_id
|
||||
|
||||
@ -45,7 +45,7 @@ class TestWorkflowModelValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph=graph,
|
||||
features=features,
|
||||
@ -58,7 +58,7 @@ class TestWorkflowModelValidation:
|
||||
# Assert
|
||||
assert workflow.tenant_id == tenant_id
|
||||
assert workflow.app_id == app_id
|
||||
assert workflow.type == WorkflowType.WORKFLOW.value
|
||||
assert workflow.type == WorkflowType.WORKFLOW
|
||||
assert workflow.version == "draft"
|
||||
assert workflow.graph == graph
|
||||
assert workflow.created_by == created_by
|
||||
@ -68,7 +68,7 @@ class TestWorkflowModelValidation:
|
||||
def test_workflow_type_enum_values(self):
|
||||
"""Test WorkflowType enum values."""
|
||||
# Assert
|
||||
assert WorkflowType.WORKFLOW.value == "workflow"
|
||||
assert WorkflowType.WORKFLOW == "workflow"
|
||||
assert WorkflowType.CHAT.value == "chat"
|
||||
assert WorkflowType.RAG_PIPELINE.value == "rag-pipeline"
|
||||
|
||||
@ -89,7 +89,7 @@ class TestWorkflowModelValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph=json.dumps(graph_data),
|
||||
features="{}",
|
||||
@ -114,7 +114,7 @@ class TestWorkflowModelValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph="{}",
|
||||
features=json.dumps(features_data),
|
||||
@ -138,7 +138,7 @@ class TestWorkflowModelValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="v1.0",
|
||||
graph="{}",
|
||||
features="{}",
|
||||
@ -176,11 +176,11 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
workflow_id=workflow_id,
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
version="draft",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=created_by,
|
||||
)
|
||||
|
||||
@ -188,9 +188,9 @@ class TestWorkflowRunStateTransitions:
|
||||
assert workflow_run.tenant_id == tenant_id
|
||||
assert workflow_run.app_id == app_id
|
||||
assert workflow_run.workflow_id == workflow_id
|
||||
assert workflow_run.type == WorkflowType.WORKFLOW.value
|
||||
assert workflow_run.triggered_from == WorkflowRunTriggeredFrom.DEBUGGING.value
|
||||
assert workflow_run.status == WorkflowExecutionStatus.RUNNING.value
|
||||
assert workflow_run.type == WorkflowType.WORKFLOW
|
||||
assert workflow_run.triggered_from == WorkflowRunTriggeredFrom.DEBUGGING
|
||||
assert workflow_run.status == WorkflowExecutionStatus.RUNNING
|
||||
assert workflow_run.created_by == created_by
|
||||
|
||||
def test_workflow_run_state_transition_running_to_succeeded(self):
|
||||
@ -200,21 +200,21 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.END_USER.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.END_USER,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
# Act
|
||||
workflow_run.status = WorkflowExecutionStatus.SUCCEEDED.value
|
||||
workflow_run.status = WorkflowExecutionStatus.SUCCEEDED
|
||||
workflow_run.finished_at = datetime.now(UTC)
|
||||
workflow_run.elapsed_time = 2.5
|
||||
|
||||
# Assert
|
||||
assert workflow_run.status == WorkflowExecutionStatus.SUCCEEDED.value
|
||||
assert workflow_run.status == WorkflowExecutionStatus.SUCCEEDED
|
||||
assert workflow_run.finished_at is not None
|
||||
assert workflow_run.elapsed_time == 2.5
|
||||
|
||||
@ -225,21 +225,21 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
# Act
|
||||
workflow_run.status = WorkflowExecutionStatus.FAILED.value
|
||||
workflow_run.status = WorkflowExecutionStatus.FAILED
|
||||
workflow_run.error = "Node execution failed: Invalid input"
|
||||
workflow_run.finished_at = datetime.now(UTC)
|
||||
|
||||
# Assert
|
||||
assert workflow_run.status == WorkflowExecutionStatus.FAILED.value
|
||||
assert workflow_run.status == WorkflowExecutionStatus.FAILED
|
||||
assert workflow_run.error == "Node execution failed: Invalid input"
|
||||
assert workflow_run.finished_at is not None
|
||||
|
||||
@ -250,20 +250,20 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
version="draft",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
# Act
|
||||
workflow_run.status = WorkflowExecutionStatus.STOPPED.value
|
||||
workflow_run.status = WorkflowExecutionStatus.STOPPED
|
||||
workflow_run.finished_at = datetime.now(UTC)
|
||||
|
||||
# Assert
|
||||
assert workflow_run.status == WorkflowExecutionStatus.STOPPED.value
|
||||
assert workflow_run.status == WorkflowExecutionStatus.STOPPED
|
||||
assert workflow_run.finished_at is not None
|
||||
|
||||
def test_workflow_run_state_transition_running_to_paused(self):
|
||||
@ -273,19 +273,19 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.END_USER.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.END_USER,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
# Act
|
||||
workflow_run.status = WorkflowExecutionStatus.PAUSED.value
|
||||
workflow_run.status = WorkflowExecutionStatus.PAUSED
|
||||
|
||||
# Assert
|
||||
assert workflow_run.status == WorkflowExecutionStatus.PAUSED.value
|
||||
assert workflow_run.status == WorkflowExecutionStatus.PAUSED
|
||||
assert workflow_run.finished_at is None # Not finished when paused
|
||||
|
||||
def test_workflow_run_state_transition_paused_to_running(self):
|
||||
@ -295,19 +295,19 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.PAUSED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.PAUSED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
# Act
|
||||
workflow_run.status = WorkflowExecutionStatus.RUNNING.value
|
||||
workflow_run.status = WorkflowExecutionStatus.RUNNING
|
||||
|
||||
# Assert
|
||||
assert workflow_run.status == WorkflowExecutionStatus.RUNNING.value
|
||||
assert workflow_run.status == WorkflowExecutionStatus.RUNNING
|
||||
|
||||
def test_workflow_run_with_partial_succeeded_status(self):
|
||||
"""Test workflow run with partial-succeeded status."""
|
||||
@ -316,17 +316,17 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
exceptions_count=2,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert workflow_run.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED.value
|
||||
assert workflow_run.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED
|
||||
assert workflow_run.exceptions_count == 2
|
||||
|
||||
def test_workflow_run_with_inputs_and_outputs(self):
|
||||
@ -340,11 +340,11 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.END_USER.value,
|
||||
status=WorkflowExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.END_USER,
|
||||
created_by=str(uuid4()),
|
||||
inputs=json.dumps(inputs),
|
||||
outputs=json.dumps(outputs),
|
||||
@ -362,11 +362,11 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
version="draft",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
graph=json.dumps(graph),
|
||||
)
|
||||
@ -391,11 +391,11 @@ class TestWorkflowRunStateTransitions:
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
workflow_id=workflow_id,
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=created_by,
|
||||
total_tokens=1500,
|
||||
total_steps=5,
|
||||
@ -410,7 +410,7 @@ class TestWorkflowRunStateTransitions:
|
||||
assert result["tenant_id"] == tenant_id
|
||||
assert result["app_id"] == app_id
|
||||
assert result["workflow_id"] == workflow_id
|
||||
assert result["status"] == WorkflowExecutionStatus.SUCCEEDED.value
|
||||
assert result["status"] == WorkflowExecutionStatus.SUCCEEDED
|
||||
assert result["total_tokens"] == 1500
|
||||
assert result["total_steps"] == 5
|
||||
|
||||
@ -422,18 +422,18 @@ class TestWorkflowRunStateTransitions:
|
||||
"tenant_id": str(uuid4()),
|
||||
"app_id": str(uuid4()),
|
||||
"workflow_id": str(uuid4()),
|
||||
"type": WorkflowType.WORKFLOW.value,
|
||||
"triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
"type": WorkflowType.WORKFLOW,
|
||||
"triggered_from": WorkflowRunTriggeredFrom.APP_RUN,
|
||||
"version": "v1.0",
|
||||
"graph": {"nodes": [], "edges": []},
|
||||
"inputs": {"query": "test"},
|
||||
"status": WorkflowExecutionStatus.SUCCEEDED.value,
|
||||
"status": WorkflowExecutionStatus.SUCCEEDED,
|
||||
"outputs": {"result": "success"},
|
||||
"error": None,
|
||||
"elapsed_time": 3.5,
|
||||
"total_tokens": 2000,
|
||||
"total_steps": 10,
|
||||
"created_by_role": CreatorUserRole.ACCOUNT.value,
|
||||
"created_by_role": CreatorUserRole.ACCOUNT,
|
||||
"created_by": str(uuid4()),
|
||||
"created_at": datetime.now(UTC),
|
||||
"finished_at": datetime.now(UTC),
|
||||
@ -446,7 +446,7 @@ class TestWorkflowRunStateTransitions:
|
||||
# Assert
|
||||
assert workflow_run.id == data["id"]
|
||||
assert workflow_run.workflow_id == data["workflow_id"]
|
||||
assert workflow_run.status == WorkflowExecutionStatus.SUCCEEDED.value
|
||||
assert workflow_run.status == WorkflowExecutionStatus.SUCCEEDED
|
||||
assert workflow_run.total_tokens == 2000
|
||||
|
||||
|
||||
@ -467,14 +467,14 @@ class TestNodeExecutionRelationships:
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
workflow_id=workflow_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=workflow_run_id,
|
||||
index=1,
|
||||
node_id="start",
|
||||
node_type=BuiltinNodeTypes.START,
|
||||
title="Start Node",
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=created_by,
|
||||
)
|
||||
|
||||
@ -498,15 +498,15 @@ class TestNodeExecutionRelationships:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=2,
|
||||
predecessor_node_id=predecessor_node_id,
|
||||
node_id=current_node_id,
|
||||
node_type=BuiltinNodeTypes.LLM,
|
||||
title="LLM Node",
|
||||
status=WorkflowNodeExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
@ -528,8 +528,8 @@ class TestNodeExecutionRelationships:
|
||||
node_id="llm_test",
|
||||
node_type=BuiltinNodeTypes.LLM,
|
||||
title="Test LLM",
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
@ -549,14 +549,14 @@ class TestNodeExecutionRelationships:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=1,
|
||||
node_id="llm_1",
|
||||
node_type=BuiltinNodeTypes.LLM,
|
||||
title="LLM Node",
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
inputs=json.dumps(inputs),
|
||||
outputs=json.dumps(outputs),
|
||||
@ -575,24 +575,24 @@ class TestNodeExecutionRelationships:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=1,
|
||||
node_id="code_1",
|
||||
node_type=BuiltinNodeTypes.CODE,
|
||||
title="Code Node",
|
||||
status=WorkflowNodeExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
# Act - transition to succeeded
|
||||
node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value
|
||||
node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED
|
||||
node_execution.elapsed_time = 1.2
|
||||
node_execution.finished_at = datetime.now(UTC)
|
||||
|
||||
# Assert
|
||||
assert node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED.value
|
||||
assert node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED
|
||||
assert node_execution.elapsed_time == 1.2
|
||||
assert node_execution.finished_at is not None
|
||||
|
||||
@ -606,20 +606,20 @@ class TestNodeExecutionRelationships:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=3,
|
||||
node_id="code_1",
|
||||
node_type=BuiltinNodeTypes.CODE,
|
||||
title="Code Node",
|
||||
status=WorkflowNodeExecutionStatus.FAILED.value,
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
error=error_message,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert node_execution.status == WorkflowNodeExecutionStatus.FAILED.value
|
||||
assert node_execution.status == WorkflowNodeExecutionStatus.FAILED
|
||||
assert node_execution.error == error_message
|
||||
|
||||
def test_node_execution_with_metadata(self):
|
||||
@ -637,14 +637,14 @@ class TestNodeExecutionRelationships:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=1,
|
||||
node_id="llm_1",
|
||||
node_type=BuiltinNodeTypes.LLM,
|
||||
title="LLM Node",
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
execution_metadata=json.dumps(metadata),
|
||||
)
|
||||
@ -660,14 +660,14 @@ class TestNodeExecutionRelationships:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=1,
|
||||
node_id="start",
|
||||
node_type=BuiltinNodeTypes.START,
|
||||
title="Start",
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
execution_metadata=None,
|
||||
)
|
||||
@ -696,14 +696,14 @@ class TestNodeExecutionRelationships:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=1,
|
||||
node_id=f"{node_type}_1",
|
||||
node_type=node_type,
|
||||
title=title,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
)
|
||||
|
||||
@ -734,7 +734,7 @@ class TestGraphConfigurationValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph=json.dumps(graph_config),
|
||||
features="{}",
|
||||
@ -761,7 +761,7 @@ class TestGraphConfigurationValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph=json.dumps(graph_config),
|
||||
features="{}",
|
||||
@ -802,7 +802,7 @@ class TestGraphConfigurationValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph=json.dumps(graph_config),
|
||||
features="{}",
|
||||
@ -835,11 +835,11 @@ class TestGraphConfigurationValidation:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
graph=json.dumps(original_graph),
|
||||
)
|
||||
@ -872,7 +872,7 @@ class TestGraphConfigurationValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph=json.dumps(graph_config),
|
||||
features="{}",
|
||||
@ -912,7 +912,7 @@ class TestGraphConfigurationValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph=json.dumps(graph_config),
|
||||
features="{}",
|
||||
@ -933,7 +933,7 @@ class TestGraphConfigurationValidation:
|
||||
workflow = Workflow.new(
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version="draft",
|
||||
graph=None,
|
||||
features="{}",
|
||||
@ -956,11 +956,11 @@ class TestGraphConfigurationValidation:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
inputs=None,
|
||||
)
|
||||
@ -978,11 +978,11 @@ class TestGraphConfigurationValidation:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
type=WorkflowType.WORKFLOW.value,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
|
||||
version="v1.0",
|
||||
status=WorkflowExecutionStatus.RUNNING.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
outputs=None,
|
||||
)
|
||||
@ -1000,14 +1000,14 @@ class TestGraphConfigurationValidation:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=1,
|
||||
node_id="start",
|
||||
node_type=BuiltinNodeTypes.START,
|
||||
title="Start",
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
inputs=None,
|
||||
)
|
||||
@ -1025,14 +1025,14 @@ class TestGraphConfigurationValidation:
|
||||
tenant_id=str(uuid4()),
|
||||
app_id=str(uuid4()),
|
||||
workflow_id=str(uuid4()),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
workflow_run_id=str(uuid4()),
|
||||
index=1,
|
||||
node_id="start",
|
||||
node_type=BuiltinNodeTypes.START,
|
||||
title="Start",
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
||||
created_by_role=CreatorUserRole.ACCOUNT.value,
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid4()),
|
||||
outputs=None,
|
||||
)
|
||||
|
||||
70
api/tests/unit_tests/oss/__mock/baidu_obs.py
Normal file
70
api/tests/unit_tests/oss/__mock/baidu_obs.py
Normal file
@ -0,0 +1,70 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
from io import BytesIO
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
from _pytest.monkeypatch import MonkeyPatch
|
||||
from baidubce.services.bos.bos_client import BosClient
|
||||
|
||||
from tests.unit_tests.oss.__mock.base import (
|
||||
get_example_bucket,
|
||||
get_example_data,
|
||||
get_example_filename,
|
||||
get_example_filepath,
|
||||
)
|
||||
|
||||
|
||||
class MockBaiduObsClass:
|
||||
def __init__(self, config=None):
|
||||
self.bucket_name = get_example_bucket()
|
||||
self.key = get_example_filename()
|
||||
self.content = get_example_data()
|
||||
self.filepath = get_example_filepath()
|
||||
|
||||
def put_object(self, bucket_name, key, data, content_length=None, content_md5=None, **kwargs):
|
||||
assert bucket_name == self.bucket_name
|
||||
assert key == self.key
|
||||
assert data == self.content
|
||||
assert content_length == len(self.content)
|
||||
expected_md5 = base64.standard_b64encode(hashlib.md5(self.content).digest())
|
||||
assert content_md5 == expected_md5
|
||||
|
||||
def get_object(self, bucket_name, key, **kwargs):
|
||||
assert bucket_name == self.bucket_name
|
||||
assert key == self.key
|
||||
return SimpleNamespace(data=BytesIO(self.content))
|
||||
|
||||
def get_object_to_file(self, bucket_name, key, file_name, **kwargs):
|
||||
assert bucket_name == self.bucket_name
|
||||
assert key == self.key
|
||||
assert file_name == self.filepath
|
||||
|
||||
def get_object_meta_data(self, bucket_name, key, **kwargs):
|
||||
assert bucket_name == self.bucket_name
|
||||
assert key == self.key
|
||||
return SimpleNamespace(status=200)
|
||||
|
||||
def delete_object(self, bucket_name, key, **kwargs):
|
||||
assert bucket_name == self.bucket_name
|
||||
assert key == self.key
|
||||
|
||||
|
||||
MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_baidu_obs_mock(monkeypatch: MonkeyPatch):
|
||||
if MOCK:
|
||||
monkeypatch.setattr(BosClient, "__init__", MockBaiduObsClass.__init__)
|
||||
monkeypatch.setattr(BosClient, "put_object", MockBaiduObsClass.put_object)
|
||||
monkeypatch.setattr(BosClient, "get_object", MockBaiduObsClass.get_object)
|
||||
monkeypatch.setattr(BosClient, "get_object_to_file", MockBaiduObsClass.get_object_to_file)
|
||||
monkeypatch.setattr(BosClient, "get_object_meta_data", MockBaiduObsClass.get_object_meta_data)
|
||||
monkeypatch.setattr(BosClient, "delete_object", MockBaiduObsClass.delete_object)
|
||||
|
||||
yield
|
||||
|
||||
if MOCK:
|
||||
monkeypatch.undo()
|
||||
1
api/tests/unit_tests/oss/baidu_obs/__init__.py
Normal file
1
api/tests/unit_tests/oss/baidu_obs/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
|
||||
60
api/tests/unit_tests/oss/baidu_obs/test_baidu_obs.py
Normal file
60
api/tests/unit_tests/oss/baidu_obs/test_baidu_obs.py
Normal file
@ -0,0 +1,60 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from baidubce.auth.bce_credentials import BceCredentials
|
||||
from baidubce.bce_client_configuration import BceClientConfiguration
|
||||
|
||||
from extensions.storage.baidu_obs_storage import BaiduObsStorage
|
||||
from tests.unit_tests.oss.__mock.base import (
|
||||
BaseStorageTest,
|
||||
get_example_bucket,
|
||||
)
|
||||
|
||||
pytest_plugins = ("tests.unit_tests.oss.__mock.baidu_obs",)
|
||||
|
||||
|
||||
class TestBaiduObs(BaseStorageTest):
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_method(self, setup_baidu_obs_mock):
|
||||
"""Executed before each test method."""
|
||||
with (
|
||||
patch.object(BceCredentials, "__init__", return_value=None),
|
||||
patch.object(BceClientConfiguration, "__init__", return_value=None),
|
||||
):
|
||||
self.storage = BaiduObsStorage()
|
||||
self.storage.bucket_name = get_example_bucket()
|
||||
|
||||
|
||||
class TestBaiduObsConfiguration:
|
||||
def test_init_with_config(self):
|
||||
mock_dify_config = MagicMock()
|
||||
mock_dify_config.BAIDU_OBS_BUCKET_NAME = "test-bucket"
|
||||
mock_dify_config.BAIDU_OBS_ACCESS_KEY = "test-access-key"
|
||||
mock_dify_config.BAIDU_OBS_SECRET_KEY = "test-secret-key"
|
||||
mock_dify_config.BAIDU_OBS_ENDPOINT = "https://bj.bcebos.com"
|
||||
|
||||
mock_credentials = MagicMock(name="credentials")
|
||||
mock_config = MagicMock(name="config")
|
||||
mock_client = MagicMock(name="client")
|
||||
|
||||
with (
|
||||
patch("extensions.storage.baidu_obs_storage.dify_config", mock_dify_config),
|
||||
patch("extensions.storage.baidu_obs_storage.BceCredentials", return_value=mock_credentials) as credentials,
|
||||
patch(
|
||||
"extensions.storage.baidu_obs_storage.BceClientConfiguration", return_value=mock_config
|
||||
) as configuration,
|
||||
patch("extensions.storage.baidu_obs_storage.BosClient", return_value=mock_client) as client_cls,
|
||||
):
|
||||
storage = BaiduObsStorage()
|
||||
|
||||
assert storage.bucket_name == "test-bucket"
|
||||
assert storage.client == mock_client
|
||||
credentials.assert_called_once_with(
|
||||
access_key_id="test-access-key",
|
||||
secret_access_key="test-secret-key",
|
||||
)
|
||||
configuration.assert_called_once_with(
|
||||
credentials=mock_credentials,
|
||||
endpoint="https://bj.bcebos.com",
|
||||
)
|
||||
client_cls.assert_called_once_with(config=mock_config)
|
||||
@ -365,7 +365,6 @@ def _make_segment(
|
||||
|
||||
def _make_child_chunk() -> ChildChunk:
|
||||
return ChildChunk(
|
||||
id="child-a",
|
||||
tenant_id="tenant-1",
|
||||
dataset_id="dataset-1",
|
||||
document_id="doc-1",
|
||||
|
||||
@ -238,6 +238,8 @@ class TestAppAnnotationServiceUpInsert:
|
||||
assert result == annotation_instance
|
||||
mock_cls.assert_called_once_with(
|
||||
app_id=app.id,
|
||||
conversation_id=None,
|
||||
message_id=None,
|
||||
content="hello",
|
||||
question="q1",
|
||||
account_id=current_user.id,
|
||||
|
||||
@ -23,6 +23,7 @@ import pytest
|
||||
|
||||
import services.app_generate_service as ags_module
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from enums.quota_type import QuotaType
|
||||
from models.model import AppMode
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import WorkflowIdFormatError, WorkflowNotFoundError
|
||||
@ -448,8 +449,8 @@ class TestGenerateBilling:
|
||||
def test_billing_enabled_consumes_quota(self, mocker, monkeypatch):
|
||||
monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True)
|
||||
quota_charge = MagicMock()
|
||||
consume_mock = mocker.patch(
|
||||
"services.app_generate_service.QuotaType.WORKFLOW.consume",
|
||||
reserve_mock = mocker.patch(
|
||||
"services.app_generate_service.QuotaService.reserve",
|
||||
return_value=quota_charge,
|
||||
)
|
||||
mocker.patch(
|
||||
@ -468,7 +469,8 @@ class TestGenerateBilling:
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
streaming=False,
|
||||
)
|
||||
consume_mock.assert_called_once_with("tenant-id")
|
||||
reserve_mock.assert_called_once_with(QuotaType.WORKFLOW, "tenant-id")
|
||||
quota_charge.commit.assert_called_once()
|
||||
|
||||
def test_billing_quota_exceeded_raises_rate_limit_error(self, mocker, monkeypatch):
|
||||
from services.errors.app import QuotaExceededError
|
||||
@ -476,7 +478,7 @@ class TestGenerateBilling:
|
||||
|
||||
monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True)
|
||||
mocker.patch(
|
||||
"services.app_generate_service.QuotaType.WORKFLOW.consume",
|
||||
"services.app_generate_service.QuotaService.reserve",
|
||||
side_effect=QuotaExceededError(feature="workflow", tenant_id="t", required=1),
|
||||
)
|
||||
|
||||
@ -493,7 +495,7 @@ class TestGenerateBilling:
|
||||
monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True)
|
||||
quota_charge = MagicMock()
|
||||
mocker.patch(
|
||||
"services.app_generate_service.QuotaType.WORKFLOW.consume",
|
||||
"services.app_generate_service.QuotaService.reserve",
|
||||
return_value=quota_charge,
|
||||
)
|
||||
mocker.patch(
|
||||
|
||||
@ -57,7 +57,7 @@ class TestAsyncWorkflowService:
|
||||
- repo: SQLAlchemyWorkflowTriggerLogRepository
|
||||
- dispatcher_manager_class: QueueDispatcherManager class
|
||||
- dispatcher: dispatcher instance
|
||||
- quota_workflow: QuotaType.WORKFLOW
|
||||
- quota_service: QuotaService mock
|
||||
- get_workflow: AsyncWorkflowService._get_workflow method
|
||||
- professional_task: execute_workflow_professional
|
||||
- team_task: execute_workflow_team
|
||||
@ -72,7 +72,7 @@ class TestAsyncWorkflowService:
|
||||
mock_repo.create.side_effect = _create_side_effect
|
||||
|
||||
mock_dispatcher = MagicMock()
|
||||
quota_workflow = MagicMock()
|
||||
mock_quota_service = MagicMock()
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
@ -88,8 +88,8 @@ class TestAsyncWorkflowService:
|
||||
) as mock_get_workflow,
|
||||
patch.object(
|
||||
async_workflow_service_module,
|
||||
"QuotaType",
|
||||
new=SimpleNamespace(WORKFLOW=quota_workflow),
|
||||
"QuotaService",
|
||||
new=mock_quota_service,
|
||||
),
|
||||
patch.object(async_workflow_service_module, "execute_workflow_professional") as mock_professional_task,
|
||||
patch.object(async_workflow_service_module, "execute_workflow_team") as mock_team_task,
|
||||
@ -102,7 +102,7 @@ class TestAsyncWorkflowService:
|
||||
"repo": mock_repo,
|
||||
"dispatcher_manager_class": mock_dispatcher_manager_class,
|
||||
"dispatcher": mock_dispatcher,
|
||||
"quota_workflow": quota_workflow,
|
||||
"quota_service": mock_quota_service,
|
||||
"get_workflow": mock_get_workflow,
|
||||
"professional_task": mock_professional_task,
|
||||
"team_task": mock_team_task,
|
||||
@ -141,6 +141,9 @@ class TestAsyncWorkflowService:
|
||||
mocks["team_task"].delay.return_value = task_result
|
||||
mocks["sandbox_task"].delay.return_value = task_result
|
||||
|
||||
quota_charge_mock = MagicMock()
|
||||
mocks["quota_service"].reserve.return_value = quota_charge_mock
|
||||
|
||||
class DummyAccount:
|
||||
def __init__(self, user_id: str):
|
||||
self.id = user_id
|
||||
@ -158,8 +161,9 @@ class TestAsyncWorkflowService:
|
||||
assert result.status == "queued"
|
||||
assert result.queue == queue_name
|
||||
|
||||
mocks["quota_workflow"].consume.assert_called_once_with("tenant-123")
|
||||
assert session.commit.call_count == 2
|
||||
mocks["quota_service"].reserve.assert_called_once()
|
||||
quota_charge_mock.commit.assert_called_once()
|
||||
assert session.commit.call_count == 3
|
||||
|
||||
created_log = mocks["repo"].create.call_args[0][0]
|
||||
assert created_log.status == WorkflowTriggerStatus.QUEUED
|
||||
@ -245,7 +249,7 @@ class TestAsyncWorkflowService:
|
||||
mocks = async_workflow_trigger_mocks
|
||||
mocks["dispatcher"].get_queue_name.return_value = QueuePriority.TEAM
|
||||
mocks["get_workflow"].return_value = workflow
|
||||
mocks["quota_workflow"].consume.side_effect = QuotaExceededError(
|
||||
mocks["quota_service"].reserve.side_effect = QuotaExceededError(
|
||||
feature="workflow",
|
||||
tenant_id="tenant-123",
|
||||
required=1,
|
||||
@ -262,7 +266,7 @@ class TestAsyncWorkflowService:
|
||||
trigger_data=trigger_data,
|
||||
)
|
||||
|
||||
assert session.commit.call_count == 2
|
||||
assert session.commit.call_count == 3
|
||||
updated_log = mocks["repo"].update.call_args[0][0]
|
||||
assert updated_log.status == WorkflowTriggerStatus.RATE_LIMITED
|
||||
assert "Quota limit reached" in updated_log.error
|
||||
@ -465,7 +469,7 @@ class TestAsyncWorkflowServiceGetWorkflow:
|
||||
|
||||
# Assert
|
||||
assert result == workflow
|
||||
workflow_service.get_published_workflow_by_id.assert_called_once_with(app_model, "workflow-123")
|
||||
workflow_service.get_published_workflow_by_id.assert_called_once_with(app_model, "workflow-123", session=None)
|
||||
workflow_service.get_published_workflow.assert_not_called()
|
||||
|
||||
def test_should_raise_when_specific_workflow_id_not_found(self):
|
||||
@ -493,7 +497,7 @@ class TestAsyncWorkflowServiceGetWorkflow:
|
||||
|
||||
# Assert
|
||||
assert result == workflow
|
||||
workflow_service.get_published_workflow.assert_called_once_with(app_model)
|
||||
workflow_service.get_published_workflow.assert_called_once_with(app_model, session=None)
|
||||
workflow_service.get_published_workflow_by_id.assert_not_called()
|
||||
|
||||
def test_should_raise_when_default_published_workflow_not_found(self):
|
||||
|
||||
@ -425,7 +425,7 @@ class TestBillingServiceUsageCalculation:
|
||||
yield mock
|
||||
|
||||
def test_get_tenant_feature_plan_usage_info(self, mock_send_request):
|
||||
"""Test retrieval of tenant feature plan usage information."""
|
||||
"""Test retrieval of tenant feature plan usage information (legacy endpoint)."""
|
||||
# Arrange
|
||||
tenant_id = "tenant-123"
|
||||
expected_response = {"features": {"trigger": {"used": 50, "limit": 100}, "workflow": {"used": 20, "limit": 50}}}
|
||||
@ -438,6 +438,20 @@ class TestBillingServiceUsageCalculation:
|
||||
assert result == expected_response
|
||||
mock_send_request.assert_called_once_with("GET", "/tenant-feature-usage/info", params={"tenant_id": tenant_id})
|
||||
|
||||
def test_get_quota_info(self, mock_send_request):
|
||||
"""Test retrieval of quota info from new endpoint."""
|
||||
# Arrange
|
||||
tenant_id = "tenant-123"
|
||||
expected_response = {"trigger_event": {"limit": 100, "usage": 30}, "api_rate_limit": {"limit": -1, "usage": 0}}
|
||||
mock_send_request.return_value = expected_response
|
||||
|
||||
# Act
|
||||
result = BillingService.get_quota_info(tenant_id)
|
||||
|
||||
# Assert
|
||||
assert result == expected_response
|
||||
mock_send_request.assert_called_once_with("GET", "/quota/info", params={"tenant_id": tenant_id})
|
||||
|
||||
def test_update_tenant_feature_plan_usage_positive_delta(self, mock_send_request):
|
||||
"""Test updating tenant feature usage with positive delta (adding credits)."""
|
||||
# Arrange
|
||||
@ -515,6 +529,150 @@ class TestBillingServiceUsageCalculation:
|
||||
)
|
||||
|
||||
|
||||
class TestBillingServiceQuotaOperations:
|
||||
"""Unit tests for quota reserve/commit/release operations."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_send_request(self):
|
||||
with patch.object(BillingService, "_send_request") as mock:
|
||||
yield mock
|
||||
|
||||
def test_quota_reserve_success(self, mock_send_request):
|
||||
expected = {"reservation_id": "rid-1", "available": 99, "reserved": 1}
|
||||
mock_send_request.return_value = expected
|
||||
|
||||
result = BillingService.quota_reserve(tenant_id="t1", feature_key="trigger_event", request_id="req-1", amount=1)
|
||||
|
||||
assert result == expected
|
||||
mock_send_request.assert_called_once_with(
|
||||
"POST",
|
||||
"/quota/reserve",
|
||||
json={"tenant_id": "t1", "feature_key": "trigger_event", "request_id": "req-1", "amount": 1},
|
||||
)
|
||||
|
||||
def test_quota_reserve_coerces_string_to_int(self, mock_send_request):
|
||||
"""Test that TypeAdapter coerces string values to int."""
|
||||
mock_send_request.return_value = {"reservation_id": "rid-str", "available": "99", "reserved": "1"}
|
||||
|
||||
result = BillingService.quota_reserve(tenant_id="t1", feature_key="trigger_event", request_id="req-s", amount=1)
|
||||
|
||||
assert result["available"] == 99
|
||||
assert isinstance(result["available"], int)
|
||||
assert result["reserved"] == 1
|
||||
assert isinstance(result["reserved"], int)
|
||||
|
||||
def test_quota_reserve_with_meta(self, mock_send_request):
|
||||
mock_send_request.return_value = {"reservation_id": "rid-2", "available": 98, "reserved": 1}
|
||||
meta = {"source": "webhook"}
|
||||
|
||||
BillingService.quota_reserve(
|
||||
tenant_id="t1", feature_key="trigger_event", request_id="req-2", amount=1, meta=meta
|
||||
)
|
||||
|
||||
call_json = mock_send_request.call_args[1]["json"]
|
||||
assert call_json["meta"] == {"source": "webhook"}
|
||||
|
||||
def test_quota_commit_success(self, mock_send_request):
|
||||
expected = {"available": 98, "reserved": 0, "refunded": 0}
|
||||
mock_send_request.return_value = expected
|
||||
|
||||
result = BillingService.quota_commit(
|
||||
tenant_id="t1", feature_key="trigger_event", reservation_id="rid-1", actual_amount=1
|
||||
)
|
||||
|
||||
assert result == expected
|
||||
mock_send_request.assert_called_once_with(
|
||||
"POST",
|
||||
"/quota/commit",
|
||||
json={
|
||||
"tenant_id": "t1",
|
||||
"feature_key": "trigger_event",
|
||||
"reservation_id": "rid-1",
|
||||
"actual_amount": 1,
|
||||
},
|
||||
)
|
||||
|
||||
def test_quota_commit_coerces_string_to_int(self, mock_send_request):
|
||||
"""Test that TypeAdapter coerces string values to int."""
|
||||
mock_send_request.return_value = {"available": "97", "reserved": "0", "refunded": "1"}
|
||||
|
||||
result = BillingService.quota_commit(
|
||||
tenant_id="t1", feature_key="trigger_event", reservation_id="rid-s", actual_amount=1
|
||||
)
|
||||
|
||||
assert result["available"] == 97
|
||||
assert isinstance(result["available"], int)
|
||||
assert result["refunded"] == 1
|
||||
assert isinstance(result["refunded"], int)
|
||||
|
||||
def test_quota_commit_with_meta(self, mock_send_request):
|
||||
mock_send_request.return_value = {"available": 97, "reserved": 0, "refunded": 0}
|
||||
meta = {"reason": "partial"}
|
||||
|
||||
BillingService.quota_commit(
|
||||
tenant_id="t1", feature_key="trigger_event", reservation_id="rid-1", actual_amount=1, meta=meta
|
||||
)
|
||||
|
||||
call_json = mock_send_request.call_args[1]["json"]
|
||||
assert call_json["meta"] == {"reason": "partial"}
|
||||
|
||||
def test_quota_release_success(self, mock_send_request):
|
||||
expected = {"available": 100, "reserved": 0, "released": 1}
|
||||
mock_send_request.return_value = expected
|
||||
|
||||
result = BillingService.quota_release(tenant_id="t1", feature_key="trigger_event", reservation_id="rid-1")
|
||||
|
||||
assert result == expected
|
||||
mock_send_request.assert_called_once_with(
|
||||
"POST",
|
||||
"/quota/release",
|
||||
json={"tenant_id": "t1", "feature_key": "trigger_event", "reservation_id": "rid-1"},
|
||||
)
|
||||
|
||||
def test_quota_release_coerces_string_to_int(self, mock_send_request):
|
||||
"""Test that TypeAdapter coerces string values to int."""
|
||||
mock_send_request.return_value = {"available": "100", "reserved": "0", "released": "1"}
|
||||
|
||||
result = BillingService.quota_release(tenant_id="t1", feature_key="trigger_event", reservation_id="rid-s")
|
||||
|
||||
assert result["available"] == 100
|
||||
assert isinstance(result["available"], int)
|
||||
assert result["released"] == 1
|
||||
assert isinstance(result["released"], int)
|
||||
|
||||
def test_get_quota_info_coerces_string_to_int(self, mock_send_request):
|
||||
"""Test that TypeAdapter coerces string values to int for get_quota_info."""
|
||||
mock_send_request.return_value = {
|
||||
"trigger_event": {"usage": "42", "limit": "3000", "reset_date": "1700000000"},
|
||||
"api_rate_limit": {"usage": "10", "limit": "-1", "reset_date": "-1"},
|
||||
}
|
||||
|
||||
result = BillingService.get_quota_info("t1")
|
||||
|
||||
assert result["trigger_event"]["usage"] == 42
|
||||
assert isinstance(result["trigger_event"]["usage"], int)
|
||||
assert result["trigger_event"]["limit"] == 3000
|
||||
assert isinstance(result["trigger_event"]["limit"], int)
|
||||
assert result["trigger_event"]["reset_date"] == 1700000000
|
||||
assert isinstance(result["trigger_event"]["reset_date"], int)
|
||||
assert result["api_rate_limit"]["limit"] == -1
|
||||
assert isinstance(result["api_rate_limit"]["limit"], int)
|
||||
|
||||
def test_get_quota_info_accepts_int_values(self, mock_send_request):
|
||||
"""Test that get_quota_info works with native int values."""
|
||||
expected = {
|
||||
"trigger_event": {"usage": 42, "limit": 3000, "reset_date": 1700000000},
|
||||
"api_rate_limit": {"usage": 0, "limit": -1},
|
||||
}
|
||||
mock_send_request.return_value = expected
|
||||
|
||||
result = BillingService.get_quota_info("t1")
|
||||
|
||||
assert result["trigger_event"]["usage"] == 42
|
||||
assert result["trigger_event"]["limit"] == 3000
|
||||
assert result["api_rate_limit"]["limit"] == -1
|
||||
|
||||
|
||||
class TestBillingServiceRateLimitEnforcement:
|
||||
"""Unit tests for rate limit enforcement mechanisms.
|
||||
|
||||
|
||||
@ -89,7 +89,6 @@ class TestSegmentServiceChildChunks:
|
||||
document = _make_document()
|
||||
segment = _make_segment()
|
||||
existing_a = ChildChunk(
|
||||
id="child-a",
|
||||
tenant_id="tenant-1",
|
||||
dataset_id="dataset-1",
|
||||
document_id="doc-1",
|
||||
@ -100,7 +99,6 @@ class TestSegmentServiceChildChunks:
|
||||
created_by="user-1",
|
||||
)
|
||||
existing_b = ChildChunk(
|
||||
id="child-b",
|
||||
tenant_id="tenant-1",
|
||||
dataset_id="dataset-1",
|
||||
document_id="doc-1",
|
||||
@ -110,7 +108,8 @@ class TestSegmentServiceChildChunks:
|
||||
word_count=9,
|
||||
created_by="user-1",
|
||||
)
|
||||
|
||||
existing_a.id = "child-a"
|
||||
existing_b.id = "child-b"
|
||||
with (
|
||||
patch("services.dataset_service.db") as mock_db,
|
||||
patch("services.dataset_service.uuid.uuid4", return_value="node-new"),
|
||||
|
||||
291
api/tests/unit_tests/tasks/test_clean_document_task.py
Normal file
291
api/tests/unit_tests/tasks/test_clean_document_task.py
Normal file
@ -0,0 +1,291 @@
|
||||
"""
|
||||
Unit tests for clean_document_task.
|
||||
|
||||
Focuses on the resilience contract added by the billing-failure fix:
|
||||
``index_processor.clean()`` is wrapped in ``try/except`` so that a transient
|
||||
failure inside the vector / keyword cleanup (e.g. ``ValueError("Unable to
|
||||
retrieve billing information...")`` raised by ``BillingService._send_request``
|
||||
when ``Vector(dataset)`` transitively triggers ``FeatureService.get_features``)
|
||||
does not abort the entire task and leave PG with stranded ``DocumentSegment``
|
||||
/ ``ChildChunk`` / ``UploadFile`` / ``DatasetMetadataBinding`` rows.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from tasks.clean_document_task import clean_document_task
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def document_id():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dataset_id():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tenant_id():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session_factory():
|
||||
"""Patch ``session_factory.create_session`` to return per-call mock sessions.
|
||||
|
||||
Each call to ``create_session()`` yields a fresh ``MagicMock`` session so we
|
||||
can assert ``execute()`` calls across the multiple short-lived transactions
|
||||
used by ``clean_document_task``.
|
||||
"""
|
||||
with patch("tasks.clean_document_task.session_factory", autospec=True) as mock_sf:
|
||||
sessions: list[MagicMock] = []
|
||||
|
||||
def _create_session():
|
||||
session = MagicMock()
|
||||
session.scalars.return_value.all.return_value = []
|
||||
session.execute.return_value.all.return_value = []
|
||||
session.scalar.return_value = None
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
cm.__exit__.return_value = None
|
||||
sessions.append(session)
|
||||
return cm
|
||||
|
||||
mock_sf.create_session.side_effect = _create_session
|
||||
yield mock_sf, sessions
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_storage():
|
||||
with patch("tasks.clean_document_task.storage", autospec=True) as mock:
|
||||
mock.delete.return_value = None
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_index_processor_factory():
|
||||
"""Mock ``IndexProcessorFactory`` so we can inject behavior into ``clean``."""
|
||||
with patch("tasks.clean_document_task.IndexProcessorFactory", autospec=True) as factory_cls:
|
||||
processor = MagicMock()
|
||||
processor.clean.return_value = None
|
||||
factory_instance = MagicMock()
|
||||
factory_instance.init_index_processor.return_value = processor
|
||||
factory_cls.return_value = factory_instance
|
||||
|
||||
yield {
|
||||
"factory_cls": factory_cls,
|
||||
"factory_instance": factory_instance,
|
||||
"processor": processor,
|
||||
}
|
||||
|
||||
|
||||
def _build_segment(segment_id: str, content: str = "segment content") -> MagicMock:
|
||||
seg = MagicMock()
|
||||
seg.id = segment_id
|
||||
seg.index_node_id = f"node-{segment_id}"
|
||||
seg.content = content
|
||||
return seg
|
||||
|
||||
|
||||
def _build_dataset(dataset_id: str, tenant_id: str) -> MagicMock:
|
||||
ds = MagicMock()
|
||||
ds.id = dataset_id
|
||||
ds.tenant_id = tenant_id
|
||||
return ds
|
||||
|
||||
|
||||
class TestVectorCleanupResilience:
|
||||
"""Vector / keyword cleanup must not abort the task on transient failure."""
|
||||
|
||||
def test_billing_failure_during_vector_cleanup_does_not_skip_pg_cleanup(
|
||||
self,
|
||||
document_id,
|
||||
dataset_id,
|
||||
tenant_id,
|
||||
mock_session_factory,
|
||||
mock_storage,
|
||||
mock_index_processor_factory,
|
||||
):
|
||||
"""Reproduces the production incident:
|
||||
|
||||
``Vector(dataset)`` transitively calls ``FeatureService.get_features``
|
||||
which calls ``BillingService._send_request("GET", ...)``. When billing
|
||||
returns non-200 it raises ``ValueError("Unable to retrieve billing
|
||||
information...")``. Before the fix this propagated out of
|
||||
``clean_document_task`` and left ``DocumentSegment`` / ``ChildChunk`` /
|
||||
``UploadFile`` / ``DatasetMetadataBinding`` rows orphaned because the
|
||||
already-deleted ``Document`` row had been hard-committed by the caller
|
||||
(``dataset_service.delete_document``) before ``.delay()`` was invoked.
|
||||
|
||||
Contract: a billing failure inside ``index_processor.clean()`` must be
|
||||
caught, logged, and the rest of the task must continue so PG ends up
|
||||
consistent with the deleted ``Document`` even if Qdrant retains
|
||||
orphan vectors that can be reaped later.
|
||||
"""
|
||||
mock_sf, sessions = mock_session_factory
|
||||
|
||||
# First create_session(): Step 1 (load segments + attachments).
|
||||
step1_session = MagicMock()
|
||||
step1_session.scalars.return_value.all.return_value = [
|
||||
_build_segment("seg-1"),
|
||||
_build_segment("seg-2"),
|
||||
]
|
||||
step1_session.execute.return_value.all.return_value = []
|
||||
step1_session.scalar.return_value = _build_dataset(dataset_id, tenant_id)
|
||||
# Second create_session(): Step 2 (vector cleanup). Returns dataset.
|
||||
step2_session = MagicMock()
|
||||
step2_session.scalar.return_value = _build_dataset(dataset_id, tenant_id)
|
||||
step2_session.scalars.return_value.all.return_value = []
|
||||
step2_session.execute.return_value.all.return_value = []
|
||||
# Subsequent sessions: Step 3+ (image / segment / file / metadata cleanup).
|
||||
# Default fixture returns empty results which is fine for these short txns.
|
||||
cm1, cm2 = MagicMock(), MagicMock()
|
||||
cm1.__enter__.return_value = step1_session
|
||||
cm1.__exit__.return_value = None
|
||||
cm2.__enter__.return_value = step2_session
|
||||
cm2.__exit__.return_value = None
|
||||
|
||||
def _default_cm():
|
||||
session = MagicMock()
|
||||
session.scalars.return_value.all.return_value = []
|
||||
session.execute.return_value.all.return_value = []
|
||||
session.scalar.return_value = None
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
cm.__exit__.return_value = None
|
||||
sessions.append(session)
|
||||
return cm
|
||||
|
||||
mock_sf.create_session.side_effect = [cm1, cm2] + [_default_cm() for _ in range(10)]
|
||||
|
||||
# Simulate the production failure: index_processor.clean() raises ValueError
|
||||
# mirroring BillingService._send_request when billing returns non-200.
|
||||
mock_index_processor_factory["processor"].clean.side_effect = ValueError(
|
||||
"Unable to retrieve billing information. Please try again later or contact support."
|
||||
)
|
||||
|
||||
# Act — must not raise out of the task even though clean() raises.
|
||||
clean_document_task(
|
||||
document_id=document_id,
|
||||
dataset_id=dataset_id,
|
||||
doc_form="paragraph",
|
||||
file_id=None,
|
||||
)
|
||||
|
||||
# Assert
|
||||
# 1. Vector cleanup was attempted.
|
||||
mock_index_processor_factory["processor"].clean.assert_called_once()
|
||||
# 2. Despite the failure the task continued: at least one DocumentSegment
|
||||
# delete was issued. We use the count of session.execute calls across
|
||||
# later short transactions as a proxy for "Step 3+ executed".
|
||||
execute_calls = sum(s.execute.call_count for s in sessions)
|
||||
assert execute_calls > 0, (
|
||||
"Step 3+ DB cleanup did not run after vector cleanup failure; "
|
||||
"this regression would re-introduce the orphan-segment bug."
|
||||
)
|
||||
|
||||
def test_vector_cleanup_success_path_remains_unaffected(
|
||||
self,
|
||||
document_id,
|
||||
dataset_id,
|
||||
tenant_id,
|
||||
mock_session_factory,
|
||||
mock_storage,
|
||||
mock_index_processor_factory,
|
||||
):
|
||||
"""Backward-compat: the happy path must still call ``clean()`` exactly
|
||||
once with the expected arguments and complete without errors.
|
||||
"""
|
||||
mock_sf, sessions = mock_session_factory
|
||||
|
||||
step1_session = MagicMock()
|
||||
step1_session.scalars.return_value.all.return_value = [_build_segment("seg-1")]
|
||||
step1_session.execute.return_value.all.return_value = []
|
||||
step1_session.scalar.return_value = _build_dataset(dataset_id, tenant_id)
|
||||
step2_session = MagicMock()
|
||||
step2_session.scalar.return_value = _build_dataset(dataset_id, tenant_id)
|
||||
step2_session.scalars.return_value.all.return_value = []
|
||||
step2_session.execute.return_value.all.return_value = []
|
||||
cm1, cm2 = MagicMock(), MagicMock()
|
||||
cm1.__enter__.return_value = step1_session
|
||||
cm1.__exit__.return_value = None
|
||||
cm2.__enter__.return_value = step2_session
|
||||
cm2.__exit__.return_value = None
|
||||
|
||||
def _default_cm():
|
||||
session = MagicMock()
|
||||
session.scalars.return_value.all.return_value = []
|
||||
session.execute.return_value.all.return_value = []
|
||||
session.scalar.return_value = None
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
cm.__exit__.return_value = None
|
||||
sessions.append(session)
|
||||
return cm
|
||||
|
||||
mock_sf.create_session.side_effect = [cm1, cm2] + [_default_cm() for _ in range(10)]
|
||||
|
||||
clean_document_task(
|
||||
document_id=document_id,
|
||||
dataset_id=dataset_id,
|
||||
doc_form="paragraph",
|
||||
file_id=None,
|
||||
)
|
||||
|
||||
assert mock_index_processor_factory["processor"].clean.call_count == 1
|
||||
# Index cleanup invoked with the expected delete_summaries / delete_child_chunks flags.
|
||||
_, kwargs = mock_index_processor_factory["processor"].clean.call_args
|
||||
assert kwargs.get("with_keywords") is True
|
||||
assert kwargs.get("delete_child_chunks") is True
|
||||
assert kwargs.get("delete_summaries") is True
|
||||
|
||||
def test_no_segments_skips_vector_cleanup(
|
||||
self,
|
||||
document_id,
|
||||
dataset_id,
|
||||
tenant_id,
|
||||
mock_session_factory,
|
||||
mock_storage,
|
||||
mock_index_processor_factory,
|
||||
):
|
||||
"""When the document has no segments (e.g. indexing failed before
|
||||
producing any), vector cleanup must not be attempted — and therefore
|
||||
the new try/except wrapper does not change behavior here.
|
||||
"""
|
||||
mock_sf, sessions = mock_session_factory
|
||||
|
||||
step1_session = MagicMock()
|
||||
step1_session.scalars.return_value.all.return_value = [] # no segments
|
||||
step1_session.execute.return_value.all.return_value = []
|
||||
step1_session.scalar.return_value = _build_dataset(dataset_id, tenant_id)
|
||||
cm1 = MagicMock()
|
||||
cm1.__enter__.return_value = step1_session
|
||||
cm1.__exit__.return_value = None
|
||||
|
||||
def _default_cm():
|
||||
session = MagicMock()
|
||||
session.scalars.return_value.all.return_value = []
|
||||
session.execute.return_value.all.return_value = []
|
||||
session.scalar.return_value = None
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
cm.__exit__.return_value = None
|
||||
sessions.append(session)
|
||||
return cm
|
||||
|
||||
mock_sf.create_session.side_effect = [cm1] + [_default_cm() for _ in range(10)]
|
||||
|
||||
clean_document_task(
|
||||
document_id=document_id,
|
||||
dataset_id=dataset_id,
|
||||
doc_form="paragraph",
|
||||
file_id=None,
|
||||
)
|
||||
|
||||
# Vector cleanup is gated on ``index_node_ids``; when there are no
|
||||
# segments the IndexProcessorFactory path is never entered.
|
||||
mock_index_processor_factory["factory_cls"].assert_not_called()
|
||||
@ -0,0 +1,289 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from core.plugin.entities.marketplace import MarketplacePluginSnapshot
|
||||
from core.plugin.entities.plugin import PluginInstallationSource
|
||||
from models.account import TenantPluginAutoUpgradeStrategy
|
||||
|
||||
MODULE = "tasks.process_tenant_plugin_autoupgrade_check_task"
|
||||
|
||||
|
||||
def _make_plugin(plugin_id: str, version: str, source=PluginInstallationSource.Marketplace):
|
||||
"""Build a minimal stand-in for a PluginInstallation entry returned by manager.list_plugins."""
|
||||
return SimpleNamespace(
|
||||
plugin_id=plugin_id,
|
||||
version=version,
|
||||
plugin_unique_identifier=f"{plugin_id}:{version}@deadbeef",
|
||||
source=source,
|
||||
)
|
||||
|
||||
|
||||
def _make_manifest(plugin_id: str, latest_version: str) -> MarketplacePluginSnapshot:
|
||||
org, name = plugin_id.split("/", 1)
|
||||
return MarketplacePluginSnapshot(
|
||||
org=org,
|
||||
name=name,
|
||||
latest_version=latest_version,
|
||||
latest_package_identifier=f"{plugin_id}:{latest_version}@cafe1234",
|
||||
latest_package_url=f"https://marketplace.example/{plugin_id}/{latest_version}.difypkg",
|
||||
)
|
||||
|
||||
|
||||
def _run_task(
|
||||
*,
|
||||
plugins: list,
|
||||
manifests: list[MarketplacePluginSnapshot],
|
||||
strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST,
|
||||
upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
|
||||
exclude_plugins=None,
|
||||
include_plugins=None,
|
||||
):
|
||||
"""
|
||||
Execute the celery task synchronously with mocks for the plugin manager,
|
||||
the marketplace cache and PluginService.upgrade_plugin_with_marketplace.
|
||||
Returns the upgrade-call recorder so each test can assert on it.
|
||||
"""
|
||||
fake_manager = MagicMock()
|
||||
fake_manager.list_plugins.return_value = plugins
|
||||
|
||||
upgrade_calls: list[tuple[str, str, str]] = []
|
||||
|
||||
def _record_upgrade(tenant_id, original, new):
|
||||
upgrade_calls.append((tenant_id, original, new))
|
||||
|
||||
with (
|
||||
patch(f"{MODULE}.PluginInstaller", return_value=fake_manager),
|
||||
patch(f"{MODULE}.marketplace_batch_fetch_plugin_manifests", return_value=manifests),
|
||||
patch(
|
||||
f"{MODULE}.PluginService.upgrade_plugin_with_marketplace",
|
||||
side_effect=_record_upgrade,
|
||||
) as upgrade_mock,
|
||||
):
|
||||
from tasks.process_tenant_plugin_autoupgrade_check_task import (
|
||||
process_tenant_plugin_autoupgrade_check_task,
|
||||
)
|
||||
|
||||
process_tenant_plugin_autoupgrade_check_task(
|
||||
"tenant-1",
|
||||
strategy_setting,
|
||||
0,
|
||||
upgrade_mode,
|
||||
exclude_plugins or [],
|
||||
include_plugins or [],
|
||||
)
|
||||
|
||||
return upgrade_mock, upgrade_calls
|
||||
|
||||
|
||||
class TestUpgradeCallsMarketplaceService:
|
||||
"""
|
||||
Regression test for the bug where the auto-upgrade task called
|
||||
manager.upgrade_plugin directly, which skipped downloading the new package
|
||||
from marketplace and uploading it to the daemon. The daemon then failed with
|
||||
"package file not found" and the upgrade silently never completed.
|
||||
"""
|
||||
|
||||
def test_upgrade_routes_through_plugin_service(self):
|
||||
plugin = _make_plugin("acme/foo", "1.0.0")
|
||||
manifest = _make_manifest("acme/foo", "1.0.1")
|
||||
|
||||
upgrade_mock, calls = _run_task(plugins=[plugin], manifests=[manifest])
|
||||
|
||||
upgrade_mock.assert_called_once()
|
||||
assert calls == [("tenant-1", plugin.plugin_unique_identifier, manifest.latest_package_identifier)]
|
||||
|
||||
def test_does_not_call_manager_upgrade_plugin_directly(self):
|
||||
"""Locks in that we never go back to the broken path that bypassed download/upload."""
|
||||
plugin = _make_plugin("acme/foo", "1.0.0")
|
||||
manifest = _make_manifest("acme/foo", "1.0.1")
|
||||
|
||||
fake_manager = MagicMock()
|
||||
fake_manager.list_plugins.return_value = [plugin]
|
||||
|
||||
with (
|
||||
patch(f"{MODULE}.PluginInstaller", return_value=fake_manager),
|
||||
patch(f"{MODULE}.marketplace_batch_fetch_plugin_manifests", return_value=[manifest]),
|
||||
patch(f"{MODULE}.PluginService.upgrade_plugin_with_marketplace"),
|
||||
):
|
||||
from tasks.process_tenant_plugin_autoupgrade_check_task import (
|
||||
process_tenant_plugin_autoupgrade_check_task,
|
||||
)
|
||||
|
||||
process_tenant_plugin_autoupgrade_check_task(
|
||||
"tenant-1",
|
||||
TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST,
|
||||
0,
|
||||
TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
|
||||
[],
|
||||
[],
|
||||
)
|
||||
|
||||
fake_manager.upgrade_plugin.assert_not_called()
|
||||
|
||||
|
||||
class TestStrategySetting:
|
||||
def test_disabled_strategy_skips_everything(self):
|
||||
upgrade_mock, _ = _run_task(
|
||||
plugins=[_make_plugin("acme/foo", "1.0.0")],
|
||||
manifests=[_make_manifest("acme/foo", "1.0.1")],
|
||||
strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.DISABLED,
|
||||
)
|
||||
upgrade_mock.assert_not_called()
|
||||
|
||||
def test_fix_only_upgrades_patch_version(self):
|
||||
upgrade_mock, calls = _run_task(
|
||||
plugins=[_make_plugin("acme/foo", "1.0.0")],
|
||||
manifests=[_make_manifest("acme/foo", "1.0.5")],
|
||||
strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
|
||||
)
|
||||
upgrade_mock.assert_called_once()
|
||||
assert calls[0][2].endswith(":1.0.5@cafe1234")
|
||||
|
||||
def test_fix_only_skips_minor_bump(self):
|
||||
upgrade_mock, _ = _run_task(
|
||||
plugins=[_make_plugin("acme/foo", "1.0.0")],
|
||||
manifests=[_make_manifest("acme/foo", "1.1.0")],
|
||||
strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
|
||||
)
|
||||
upgrade_mock.assert_not_called()
|
||||
|
||||
def test_fix_only_skips_major_bump(self):
|
||||
upgrade_mock, _ = _run_task(
|
||||
plugins=[_make_plugin("acme/foo", "1.0.0")],
|
||||
manifests=[_make_manifest("acme/foo", "2.0.0")],
|
||||
strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
|
||||
)
|
||||
upgrade_mock.assert_not_called()
|
||||
|
||||
def test_latest_strategy_skips_when_versions_equal(self):
|
||||
upgrade_mock, _ = _run_task(
|
||||
plugins=[_make_plugin("acme/foo", "1.0.0")],
|
||||
manifests=[_make_manifest("acme/foo", "1.0.0")],
|
||||
strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST,
|
||||
)
|
||||
upgrade_mock.assert_not_called()
|
||||
|
||||
|
||||
class TestUpgradeMode:
|
||||
def test_mode_all_upgrades_every_marketplace_plugin(self):
|
||||
plugins = [
|
||||
_make_plugin("acme/foo", "1.0.0"),
|
||||
_make_plugin("acme/bar", "2.0.0"),
|
||||
]
|
||||
manifests = [
|
||||
_make_manifest("acme/foo", "1.0.1"),
|
||||
_make_manifest("acme/bar", "2.0.1"),
|
||||
]
|
||||
|
||||
upgrade_mock, calls = _run_task(
|
||||
plugins=plugins,
|
||||
manifests=manifests,
|
||||
upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
|
||||
)
|
||||
|
||||
assert upgrade_mock.call_count == 2
|
||||
upgraded_ids = sorted(c[1] for c in calls)
|
||||
assert upgraded_ids == sorted(p.plugin_unique_identifier for p in plugins)
|
||||
|
||||
def test_mode_all_skips_non_marketplace_sources(self):
|
||||
plugins = [
|
||||
_make_plugin("acme/foo", "1.0.0"),
|
||||
_make_plugin("acme/bar", "2.0.0", source=PluginInstallationSource.Github),
|
||||
]
|
||||
manifests = [
|
||||
_make_manifest("acme/foo", "1.0.1"),
|
||||
_make_manifest("acme/bar", "2.0.1"),
|
||||
]
|
||||
|
||||
upgrade_mock, calls = _run_task(
|
||||
plugins=plugins,
|
||||
manifests=manifests,
|
||||
upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
|
||||
)
|
||||
|
||||
assert upgrade_mock.call_count == 1
|
||||
assert calls[0][1] == plugins[0].plugin_unique_identifier
|
||||
|
||||
def test_mode_partial_only_upgrades_included_plugins(self):
|
||||
plugins = [
|
||||
_make_plugin("acme/foo", "1.0.0"),
|
||||
_make_plugin("acme/bar", "2.0.0"),
|
||||
]
|
||||
manifests = [
|
||||
_make_manifest("acme/foo", "1.0.1"),
|
||||
_make_manifest("acme/bar", "2.0.1"),
|
||||
]
|
||||
|
||||
upgrade_mock, calls = _run_task(
|
||||
plugins=plugins,
|
||||
manifests=manifests,
|
||||
upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL,
|
||||
include_plugins=["acme/foo"],
|
||||
)
|
||||
|
||||
assert upgrade_mock.call_count == 1
|
||||
assert calls[0][1] == plugins[0].plugin_unique_identifier
|
||||
|
||||
def test_mode_exclude_skips_excluded_plugins(self):
|
||||
plugins = [
|
||||
_make_plugin("acme/foo", "1.0.0"),
|
||||
_make_plugin("acme/bar", "2.0.0"),
|
||||
]
|
||||
manifests = [
|
||||
_make_manifest("acme/foo", "1.0.1"),
|
||||
_make_manifest("acme/bar", "2.0.1"),
|
||||
]
|
||||
|
||||
upgrade_mock, calls = _run_task(
|
||||
plugins=plugins,
|
||||
manifests=manifests,
|
||||
upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE,
|
||||
exclude_plugins=["acme/bar"],
|
||||
)
|
||||
|
||||
assert upgrade_mock.call_count == 1
|
||||
assert calls[0][1] == plugins[0].plugin_unique_identifier
|
||||
|
||||
|
||||
class TestErrorIsolation:
|
||||
def test_one_plugin_failure_does_not_block_others(self):
|
||||
plugins = [
|
||||
_make_plugin("acme/foo", "1.0.0"),
|
||||
_make_plugin("acme/bar", "2.0.0"),
|
||||
]
|
||||
manifests = [
|
||||
_make_manifest("acme/foo", "1.0.1"),
|
||||
_make_manifest("acme/bar", "2.0.1"),
|
||||
]
|
||||
fake_manager = MagicMock()
|
||||
fake_manager.list_plugins.return_value = plugins
|
||||
|
||||
seen: list[str] = []
|
||||
|
||||
def _upgrade(tenant_id, original, new):
|
||||
seen.append(original)
|
||||
if "foo" in original:
|
||||
raise RuntimeError("boom")
|
||||
|
||||
with (
|
||||
patch(f"{MODULE}.PluginInstaller", return_value=fake_manager),
|
||||
patch(f"{MODULE}.marketplace_batch_fetch_plugin_manifests", return_value=manifests),
|
||||
patch(f"{MODULE}.PluginService.upgrade_plugin_with_marketplace", side_effect=_upgrade),
|
||||
):
|
||||
from tasks.process_tenant_plugin_autoupgrade_check_task import (
|
||||
process_tenant_plugin_autoupgrade_check_task,
|
||||
)
|
||||
|
||||
process_tenant_plugin_autoupgrade_check_task(
|
||||
"tenant-1",
|
||||
TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST,
|
||||
0,
|
||||
TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL,
|
||||
[],
|
||||
[],
|
||||
)
|
||||
|
||||
assert any("foo" in s for s in seen)
|
||||
assert any("bar" in s for s in seen)
|
||||
204
api/tests/unit_tests/tasks/test_trigger_processing_tasks.py
Normal file
204
api/tests/unit_tests/tasks/test_trigger_processing_tasks.py
Normal file
@ -0,0 +1,204 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
import tasks.trigger_processing_tasks as trigger_processing_tasks_module
|
||||
from services.errors.app import QuotaExceededError
|
||||
from tasks.trigger_processing_tasks import dispatch_triggered_workflow
|
||||
|
||||
|
||||
class TestDispatchTriggeredWorkflow:
|
||||
"""Unit tests covering branch behaviours of ``dispatch_triggered_workflow``.
|
||||
|
||||
The covered branches are:
|
||||
- workflow missing for ``plugin_trigger.app_id`` → log + ``continue``
|
||||
- ``QuotaService.reserve`` raising ``QuotaExceededError`` →
|
||||
``mark_tenant_triggers_rate_limited`` + early ``return``
|
||||
- ``trigger_workflow_async`` succeeds →
|
||||
``quota_charge.commit()`` + ``dispatched_count`` increments
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def subscription(self):
|
||||
sub = MagicMock()
|
||||
sub.id = "subscription-123"
|
||||
sub.tenant_id = "tenant-123"
|
||||
sub.provider_id = "langgenius/test_plugin/test_plugin"
|
||||
sub.endpoint_id = "endpoint-123"
|
||||
sub.credentials = {}
|
||||
sub.credential_type = "api_key"
|
||||
return sub
|
||||
|
||||
@pytest.fixture
|
||||
def plugin_trigger(self):
|
||||
trigger = MagicMock()
|
||||
trigger.id = "plugin-trigger-123"
|
||||
trigger.app_id = "app-123"
|
||||
trigger.node_id = "node-123"
|
||||
return trigger
|
||||
|
||||
@pytest.fixture
|
||||
def provider_controller(self):
|
||||
controller = MagicMock()
|
||||
controller.plugin_unique_identifier = "langgenius/test_plugin:0.0.1"
|
||||
controller.entity.identity.name = "Test Plugin"
|
||||
controller.entity.identity.icon = "icon.svg"
|
||||
controller.entity.identity.icon_dark = "icon_dark.svg"
|
||||
return controller
|
||||
|
||||
@pytest.fixture
|
||||
def dispatch_mocks(self, subscription, plugin_trigger, provider_controller):
|
||||
"""Patch all external dependencies reached by ``dispatch_triggered_workflow``.
|
||||
|
||||
Defaults are configured so the code flow can reach the final async
|
||||
trigger block (line ~385); each test overrides specific handles
|
||||
(``get_workflows``, ``reserve``, ``create_end_user_batch``, ...) to
|
||||
drive the path it targets.
|
||||
"""
|
||||
session_cm = MagicMock()
|
||||
session_cm.__enter__.return_value = MagicMock()
|
||||
session_cm.__exit__.return_value = False
|
||||
|
||||
invoke_response = MagicMock()
|
||||
invoke_response.cancelled = False
|
||||
invoke_response.variables = {}
|
||||
|
||||
quota_charge = MagicMock()
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.TriggerHttpRequestCachingService,
|
||||
"get_request",
|
||||
return_value=MagicMock(),
|
||||
),
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.TriggerHttpRequestCachingService,
|
||||
"get_payload",
|
||||
return_value=MagicMock(),
|
||||
),
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.TriggerSubscriptionOperatorService,
|
||||
"get_subscriber_triggers",
|
||||
return_value=[plugin_trigger],
|
||||
),
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.TriggerManager,
|
||||
"get_trigger_provider",
|
||||
return_value=provider_controller,
|
||||
),
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.TriggerManager,
|
||||
"invoke_trigger_event",
|
||||
return_value=invoke_response,
|
||||
) as invoke_trigger_event,
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.TriggerEventNodeData,
|
||||
"model_validate",
|
||||
return_value=MagicMock(),
|
||||
),
|
||||
patch.object(
|
||||
trigger_processing_tasks_module,
|
||||
"_get_latest_workflows_by_app_ids",
|
||||
) as get_workflows,
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.EndUserService,
|
||||
"create_end_user_batch",
|
||||
return_value={},
|
||||
) as create_end_user_batch,
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.session_factory,
|
||||
"create_session",
|
||||
return_value=session_cm,
|
||||
),
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.QuotaService,
|
||||
"reserve",
|
||||
return_value=quota_charge,
|
||||
) as reserve,
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.AppTriggerService,
|
||||
"mark_tenant_triggers_rate_limited",
|
||||
) as mark_rate_limited,
|
||||
patch.object(
|
||||
trigger_processing_tasks_module.AsyncWorkflowService,
|
||||
"trigger_workflow_async",
|
||||
) as trigger_workflow_async,
|
||||
):
|
||||
yield {
|
||||
"get_workflows": get_workflows,
|
||||
"reserve": reserve,
|
||||
"quota_charge": quota_charge,
|
||||
"mark_rate_limited": mark_rate_limited,
|
||||
"invoke_trigger_event": invoke_trigger_event,
|
||||
"invoke_response": invoke_response,
|
||||
"create_end_user_batch": create_end_user_batch,
|
||||
"trigger_workflow_async": trigger_workflow_async,
|
||||
}
|
||||
|
||||
def test_dispatch_skips_when_workflow_missing(self, subscription, dispatch_mocks):
|
||||
"""Covers missing workflow → log + ``continue``."""
|
||||
dispatch_mocks["get_workflows"].return_value = {}
|
||||
|
||||
dispatched = dispatch_triggered_workflow(
|
||||
user_id="user-123",
|
||||
subscription=subscription,
|
||||
event_name="test_event",
|
||||
request_id="request-123",
|
||||
)
|
||||
|
||||
assert dispatched == 0
|
||||
dispatch_mocks["reserve"].assert_not_called()
|
||||
dispatch_mocks["invoke_trigger_event"].assert_not_called()
|
||||
dispatch_mocks["mark_rate_limited"].assert_not_called()
|
||||
|
||||
def test_dispatch_marks_rate_limited_when_quota_exceeded(self, subscription, plugin_trigger, dispatch_mocks):
|
||||
"""Covers QuotaExceededError → mark rate-limited + early return."""
|
||||
workflow_mock = MagicMock()
|
||||
workflow_mock.walk_nodes.return_value = iter(
|
||||
[(plugin_trigger.node_id, {"type": trigger_processing_tasks_module.TRIGGER_PLUGIN_NODE_TYPE})]
|
||||
)
|
||||
dispatch_mocks["get_workflows"].return_value = {plugin_trigger.app_id: workflow_mock}
|
||||
dispatch_mocks["reserve"].side_effect = QuotaExceededError(
|
||||
feature="trigger", tenant_id=subscription.tenant_id, required=1
|
||||
)
|
||||
|
||||
dispatched = dispatch_triggered_workflow(
|
||||
user_id="user-123",
|
||||
subscription=subscription,
|
||||
event_name="test_event",
|
||||
request_id="request-123",
|
||||
)
|
||||
|
||||
assert dispatched == 0
|
||||
dispatch_mocks["reserve"].assert_called_once()
|
||||
dispatch_mocks["mark_rate_limited"].assert_called_once_with(subscription.tenant_id)
|
||||
dispatch_mocks["invoke_trigger_event"].assert_not_called()
|
||||
|
||||
def test_dispatch_commits_quota_and_counts_when_workflow_triggered(
|
||||
self, subscription, plugin_trigger, dispatch_mocks
|
||||
):
|
||||
"""Happy path: end user exists and async trigger succeeds."""
|
||||
workflow_mock = MagicMock()
|
||||
workflow_mock.id = "workflow-123"
|
||||
workflow_mock.walk_nodes.return_value = iter(
|
||||
[(plugin_trigger.node_id, {"type": trigger_processing_tasks_module.TRIGGER_PLUGIN_NODE_TYPE})]
|
||||
)
|
||||
dispatch_mocks["get_workflows"].return_value = {plugin_trigger.app_id: workflow_mock}
|
||||
|
||||
end_user_mock = MagicMock()
|
||||
dispatch_mocks["create_end_user_batch"].return_value = {plugin_trigger.app_id: end_user_mock}
|
||||
|
||||
dispatched = dispatch_triggered_workflow(
|
||||
user_id="user-123",
|
||||
subscription=subscription,
|
||||
event_name="test_event",
|
||||
request_id="request-123",
|
||||
)
|
||||
|
||||
assert dispatched == 1
|
||||
dispatch_mocks["trigger_workflow_async"].assert_called_once()
|
||||
_, kwargs = dispatch_mocks["trigger_workflow_async"].call_args
|
||||
assert kwargs["user"] is end_user_mock
|
||||
dispatch_mocks["quota_charge"].commit.assert_called_once()
|
||||
dispatch_mocks["quota_charge"].refund.assert_not_called()
|
||||
dispatch_mocks["mark_rate_limited"].assert_not_called()
|
||||
286
api/uv.lock
generated
286
api/uv.lock
generated
@ -604,29 +604,29 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.42.91"
|
||||
version = "1.42.96"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "botocore" },
|
||||
{ name = "jmespath" },
|
||||
{ name = "s3transfer" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a7/c0/98b8cec7ca22dde776df48c58940ae1abc425593959b7226e270760d726f/boto3-1.42.91.tar.gz", hash = "sha256:03d70532b17f7f84df37ca7e8c21553280454dea53ae12b15d1cfef9b16fcb8a", size = 113181, upload-time = "2026-04-17T19:31:06.251Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a6/2d/69fb3acd50bab83fb295c167d33c4b653faeb5fb0f42bfca4d9b69d6fb68/boto3-1.42.96.tar.gz", hash = "sha256:b38a9e4a3fbbee9017252576f1379780d0a5814768676c08df2f539d31fcdd68", size = 113203, upload-time = "2026-04-24T19:47:18.677Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/02/29/faba6521257c34085cc9b439ef98235b581772580f417fa3629728007270/boto3-1.42.91-py3-none-any.whl", hash = "sha256:04e72071cde022951ce7f81bd9933c90095ab8923e8ced61c8dacfe9edac0f5c", size = 140553, upload-time = "2026-04-17T19:31:02.57Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/9d/b3f617d011c42eb804d993103b8fa9acdce153e181a3042f58bfe33d7cb4/boto3-1.42.96-py3-none-any.whl", hash = "sha256:2f4566da2c209a98bdbfc874d813ef231c84ad24e4f815e9bc91de5f63351a24", size = 140557, upload-time = "2026-04-24T19:47:15.824Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boto3-stubs"
|
||||
version = "1.42.92"
|
||||
version = "1.42.96"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "botocore-stubs" },
|
||||
{ name = "types-s3transfer" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fa/b4/7f472d64a89f6aa6b8e8eeadc876667b7e4edfb526c6118efe2b2c98ba17/boto3_stubs-1.42.92.tar.gz", hash = "sha256:4bc934069c5e8c7b3cdd2442569dae14e8272fe207d445bd38aa578b8463638f", size = 102696, upload-time = "2026-04-20T19:55:19.858Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/77/86/65f45f84621cccc2471871088bab8fe515b4346ba9e48d9001484ec440d6/boto3_stubs-1.42.96.tar.gz", hash = "sha256:1e7819c34d1eae8e5e3cfaf9d144fdcad65aad184b380488871de1d0b2851879", size = 102691, upload-time = "2026-04-24T20:25:13.984Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/ce/2fe2c6456f8dc0b8bb8d80e05e154c7975ec058991bedf54f3aeed634b79/boto3_stubs-1.42.92-py3-none-any.whl", hash = "sha256:b3994e60f0133b2dd3d9a88ceaeef48fa6367d9a9429426e919575768a1ad9c6", size = 70666, upload-time = "2026-04-20T19:55:16.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/51/bdac1ff9fd4321091183776c5adffce5fc7b4d0fec7e38af9064e24a2497/boto3_stubs-1.42.96-py3-none-any.whl", hash = "sha256:2c112e257f40006147a53f6f62075804689154271973b2807f5656feaa804216", size = 70668, upload-time = "2026-04-24T20:25:09.736Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
@ -636,16 +636,16 @@ bedrock-runtime = [
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.42.91"
|
||||
version = "1.42.96"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "jmespath" },
|
||||
{ name = "python-dateutil" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/21/bc/a4b7c46471c2e789ad8c4c7acfd7f302fdb481d93ff870f441249b924ae6/botocore-1.42.91.tar.gz", hash = "sha256:d252e27bc454afdbf5ed3dc617aa423f2c855c081e98b7963093399483ecc698", size = 15213010, upload-time = "2026-04-17T19:30:50.793Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/77/2c333622a1d47cf5bf73cdcab0cb6c92addafbef2ec05f81b9f75687d9e5/botocore-1.42.96.tar.gz", hash = "sha256:75b3b841ffacaa944f645196655a21ca777591dd8911e732bfb6614545af0250", size = 15263344, upload-time = "2026-04-24T19:47:05.283Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/fc/24cc0a47c824f13933e210e9ad034b4fba22f7185b8d904c0fbf5a3b2be8/botocore-1.42.91-py3-none-any.whl", hash = "sha256:7a28c3cc6bfab5724ad18899d52402b776a0de7d87fa20c3c5270bcaaf199ce8", size = 14897344, upload-time = "2026-04-17T19:30:44.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/56/152c3a859ca1b9d77ed16deac3cf81682013677c68cf5715698781fc81bd/botocore-1.42.96-py3-none-any.whl", hash = "sha256:db2c3e2006628be6fde81a24124a6563c363d6982fb92728837cf174bad9d98a", size = 14945920, upload-time = "2026-04-24T19:47:00.323Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1058,7 +1058,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "cos-python-sdk-v5"
|
||||
version = "1.9.41"
|
||||
version = "1.9.42"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "crcmod" },
|
||||
@ -1067,9 +1067,9 @@ dependencies = [
|
||||
{ name = "six" },
|
||||
{ name = "xmltodict" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0e/38/c0029f413f51238aa2319715f45d74bcae931768e36c7e4604b02f407c6c/cos_python_sdk_v5-1.9.41.tar.gz", hash = "sha256:68f4be7d8fe27a1d186b3159b93c622816e398effdc236eddd442b86db592b82", size = 102625, upload-time = "2026-01-06T07:00:11.692Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/e3/b903b4acde334510f481d126a686bc4013710c00e2af34bff369511329ac/cos_python_sdk_v5-1.9.42.tar.gz", hash = "sha256:2a01d1868f50c5a70771f2b67da868f1dc6c6f3890f8009715313834404decc4", size = 102670, upload-time = "2026-04-23T11:08:27.949Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/2f/ead3fb551509fdc94e4a42093b770e3de2827ff7227570165df5e35c2a3e/cos_python_sdk_v5-1.9.41-py3-none-any.whl", hash = "sha256:f465aae43a4ba3f1caa8caeaca838d0395932f6848e89d6dde2807725e3c88a0", size = 98285, upload-time = "2026-01-06T06:43:02.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/bf/4ea660bb79d91fd41ba394605eccffd3d0943ed547b3fe2bdc6c7a52d2d1/cos_python_sdk_v5-1.9.42-py3-none-any.whl", hash = "sha256:02e583a1094e1794e6c0f56618d5190eb9eb7bfe75909f1dfac41bbee46e46c5", size = 98375, upload-time = "2026-04-23T11:05:14.519Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1299,6 +1299,7 @@ dependencies = [
|
||||
{ name = "celery" },
|
||||
{ name = "croniter" },
|
||||
{ name = "fastopenapi", extra = ["flask"] },
|
||||
{ name = "flask" },
|
||||
{ name = "flask-compress" },
|
||||
{ name = "flask-cors" },
|
||||
{ name = "flask-login" },
|
||||
@ -1577,10 +1578,11 @@ requires-dist = [
|
||||
{ name = "aliyun-log-python-sdk", specifier = ">=0.9.44,<1.0.0" },
|
||||
{ name = "azure-identity", specifier = ">=1.25.3,<2.0.0" },
|
||||
{ name = "bleach", specifier = ">=6.3.0" },
|
||||
{ name = "boto3", specifier = ">=1.42.91" },
|
||||
{ name = "boto3", specifier = ">=1.42.96" },
|
||||
{ name = "celery", specifier = ">=5.6.3" },
|
||||
{ name = "croniter", specifier = ">=6.2.2" },
|
||||
{ name = "fastopenapi", extras = ["flask"], specifier = "~=0.7.0" },
|
||||
{ name = "flask", specifier = ">=3.1.3,<4.0.0" },
|
||||
{ name = "flask-compress", specifier = ">=1.24,<2.0.0" },
|
||||
{ name = "flask-cors", specifier = ">=6.0.2" },
|
||||
{ name = "flask-login", specifier = ">=0.6.3,<1.0.0" },
|
||||
@ -1597,15 +1599,15 @@ requires-dist = [
|
||||
{ name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<1.0.0" },
|
||||
{ name = "httpx-sse", specifier = "~=0.4.0" },
|
||||
{ name = "json-repair", specifier = "~=0.59.4" },
|
||||
{ name = "opentelemetry-distro", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-distro", specifier = ">=0.62b1,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-celery", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-flask", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-httpx", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-redis", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-propagator-b3", specifier = ">=1.41.0,<2.0.0" },
|
||||
{ name = "opentelemetry-propagator-b3", specifier = ">=1.41.1,<2.0.0" },
|
||||
{ name = "psycogreen", specifier = ">=1.0.2" },
|
||||
{ name = "psycopg2-binary", specifier = ">=2.9.11" },
|
||||
{ name = "psycopg2-binary", specifier = ">=2.9.12" },
|
||||
{ name = "python-socketio", specifier = ">=5.13.0" },
|
||||
{ name = "readabilipy", specifier = ">=0.3.0,<1.0.0" },
|
||||
{ name = "redis", extras = ["hiredis"], specifier = ">=7.4.0" },
|
||||
@ -1617,15 +1619,15 @@ requires-dist = [
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "basedpyright", specifier = ">=1.39.3" },
|
||||
{ name = "boto3-stubs", specifier = ">=1.42.92" },
|
||||
{ name = "boto3-stubs", specifier = ">=1.42.96" },
|
||||
{ name = "celery-types", specifier = ">=0.23.0" },
|
||||
{ name = "coverage", specifier = ">=7.13.4" },
|
||||
{ name = "dotenv-linter", specifier = ">=0.7.0" },
|
||||
{ name = "faker", specifier = ">=40.15.0" },
|
||||
{ name = "hypothesis", specifier = ">=6.152.1" },
|
||||
{ name = "hypothesis", specifier = ">=6.152.3" },
|
||||
{ name = "import-linter", specifier = ">=2.3" },
|
||||
{ name = "lxml-stubs", specifier = ">=0.5.1" },
|
||||
{ name = "mypy", specifier = ">=1.20.1" },
|
||||
{ name = "mypy", specifier = ">=1.20.2" },
|
||||
{ name = "pandas-stubs", specifier = ">=3.0.0" },
|
||||
{ name = "pyrefly", specifier = ">=0.62.0" },
|
||||
{ name = "pytest", specifier = ">=9.0.3" },
|
||||
@ -1635,7 +1637,7 @@ dev = [
|
||||
{ name = "pytest-mock", specifier = ">=3.15.1" },
|
||||
{ name = "pytest-timeout", specifier = ">=2.4.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.8.0" },
|
||||
{ name = "ruff", specifier = ">=0.15.11" },
|
||||
{ name = "ruff", specifier = ">=0.15.12" },
|
||||
{ name = "scipy-stubs", specifier = ">=1.17.1.4" },
|
||||
{ name = "testcontainers", specifier = ">=4.14.2" },
|
||||
{ name = "types-aiofiles", specifier = ">=25.1.0" },
|
||||
@ -1660,7 +1662,7 @@ dev = [
|
||||
{ name = "types-pexpect", specifier = ">=4.9.0" },
|
||||
{ name = "types-protobuf", specifier = ">=7.34.1" },
|
||||
{ name = "types-psutil", specifier = ">=7.2.2" },
|
||||
{ name = "types-psycopg2", specifier = ">=2.9.21" },
|
||||
{ name = "types-psycopg2", specifier = ">=2.9.21.20260422" },
|
||||
{ name = "types-pygments", specifier = ">=2.20.0" },
|
||||
{ name = "types-pymysql", specifier = ">=1.1.0" },
|
||||
{ name = "types-pyopenssl", specifier = ">=24.1.0" },
|
||||
@ -1677,17 +1679,17 @@ dev = [
|
||||
{ name = "types-tensorflow", specifier = ">=2.18.0.20260408" },
|
||||
{ name = "types-tqdm", specifier = ">=4.67.3.20260408" },
|
||||
{ name = "types-ujson", specifier = ">=5.10.0" },
|
||||
{ name = "xinference-client", specifier = ">=2.5.0" },
|
||||
{ name = "xinference-client", specifier = ">=2.7.0" },
|
||||
]
|
||||
storage = [
|
||||
{ name = "azure-storage-blob", specifier = ">=12.28.0" },
|
||||
{ name = "bce-python-sdk", specifier = ">=0.9.70" },
|
||||
{ name = "cos-python-sdk-v5", specifier = ">=1.9.41" },
|
||||
{ name = "cos-python-sdk-v5", specifier = ">=1.9.42" },
|
||||
{ name = "esdk-obs-python", specifier = ">=3.22.2" },
|
||||
{ name = "google-cloud-storage", specifier = ">=3.10.1" },
|
||||
{ name = "opendal", specifier = ">=0.46.0" },
|
||||
{ name = "oss2", specifier = ">=2.19.1" },
|
||||
{ name = "supabase", specifier = ">=2.28.3" },
|
||||
{ name = "supabase", specifier = ">=2.29.0" },
|
||||
{ name = "tos", specifier = ">=2.9.0" },
|
||||
]
|
||||
tools = [
|
||||
@ -1774,7 +1776,7 @@ vdb-upstash = [{ name = "dify-vdb-upstash", editable = "providers/vdb/vdb-upstas
|
||||
vdb-vastbase = [{ name = "dify-vdb-vastbase", editable = "providers/vdb/vdb-vastbase" }]
|
||||
vdb-vikingdb = [{ name = "dify-vdb-vikingdb", editable = "providers/vdb/vdb-vikingdb" }]
|
||||
vdb-weaviate = [{ name = "dify-vdb-weaviate", editable = "providers/vdb/vdb-weaviate" }]
|
||||
vdb-xinference = [{ name = "xinference-client", specifier = ">=2.5.0" }]
|
||||
vdb-xinference = [{ name = "xinference-client", specifier = ">=2.7.0" }]
|
||||
|
||||
[[package]]
|
||||
name = "dify-trace-aliyun"
|
||||
@ -2655,14 +2657,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "gitpython"
|
||||
version = "3.1.45"
|
||||
version = "3.1.47"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "gitdb" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9a/c8/dd58967d119baab745caec2f9d853297cec1989ec1d63f677d3880632b88/gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c", size = 215076, upload-time = "2025-07-24T03:45:54.871Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c1/bd/50db468e9b1310529a19fce651b3b0e753b5c07954d486cba31bbee9a5d5/gitpython-3.1.47.tar.gz", hash = "sha256:dba27f922bd2b42cb54c87a8ab3cb6beb6bf07f3d564e21ac848913a05a8a3cd", size = 216978, upload-time = "2026-04-22T02:44:44.059Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168, upload-time = "2025-07-24T03:45:52.517Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/c5/a1bc0996af85757903cf2bf444a7824e68e0035ce63fb41d6f76f9def68b/gitpython-3.1.47-py3-none-any.whl", hash = "sha256:489f590edfd6d20571b2c0e72c6a6ac6915ee8b8cd04572330e3842207a78905", size = 209547, upload-time = "2026-04-22T02:44:41.271Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3317,14 +3319,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "hypothesis"
|
||||
version = "6.152.1"
|
||||
version = "6.152.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "sortedcontainers" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/64/b1/c32bcddb9aab9e3abc700f1f56faf14e7655c64a16ca47701a57362276ea/hypothesis-6.152.1.tar.gz", hash = "sha256:4f4ed934eee295dd84ee97592477d23e8dc03e9f12ae0ee30a4e7c9ef3fca3b0", size = 465029, upload-time = "2026-04-14T22:29:24.062Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/70/90/fc0b263b6f2622e5f8d2aa93f2e95ba79718a5faa7d2a74bfab10d6b0905/hypothesis-6.152.3.tar.gz", hash = "sha256:c4e5300d3755b6c8a270a28fe5abff40153e927328e89d2bb0229c1384618998", size = 466478, upload-time = "2026-04-26T17:31:07.657Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/83/860fb3075e00b0fc19a22a2301bc3c96f00437558c3911bdd0a3573a4a53/hypothesis-6.152.1-py3-none-any.whl", hash = "sha256:40a3619d9e0cb97b018857c7986f75cf5de2e5ec0fa8a0b172d00747758f749e", size = 530752, upload-time = "2026-04-14T22:29:20.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/38/15475b91a4c12721d2be3349e9d6cf8649c76ed9bc1287e2de7c8d06c261/hypothesis-6.152.3-py3-none-any.whl", hash = "sha256:4b47f00916c858ed49cf870a2f08b04e5fff5afae0bb78f3b4a6d9c74fd6c7bc", size = 532154, upload-time = "2026-04-26T17:31:04.42Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3945,7 +3947,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.20.1"
|
||||
version = "1.20.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "librt", marker = "platform_python_implementation != 'PyPy'" },
|
||||
@ -3953,16 +3955,16 @@ dependencies = [
|
||||
{ name = "pathspec" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0b/3d/5b373635b3146264eb7a68d09e5ca11c305bbb058dfffbb47c47daf4f632/mypy-1.20.1.tar.gz", hash = "sha256:6fc3f4ecd52de81648fed1945498bf42fa2993ddfad67c9056df36ae5757f804", size = 3815892, upload-time = "2026-04-13T02:46:51.474Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/04/af/e3d4b3e9ec91a0ff9aabfdb38692952acf49bbb899c2e4c29acb3a6da3ae/mypy-1.20.2.tar.gz", hash = "sha256:e8222c26daaafd9e8626dec58ae36029f82585890589576f769a650dd20fd665", size = 3817349, upload-time = "2026-04-21T17:12:28.473Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/1b/75a7c825a02781ca10bc2f2f12fba2af5202f6d6005aad8d2d1f264d8d78/mypy-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:36ee2b9c6599c230fea89bbd79f401f9f9f8e9fcf0c777827789b19b7da90f51", size = 14494077, upload-time = "2026-04-13T02:45:55.085Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/54/5e5a569ea5c2b4d48b729fb32aa936eeb4246e4fc3e6f5b3d36a2dfbefb9/mypy-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fba3fb0968a7b48806b0c90f38d39296f10766885a94c83bd21399de1e14eb28", size = 13319495, upload-time = "2026-04-13T02:45:29.674Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/a4/a1945b19f33e91721b59deee3abb484f2fa5922adc33bb166daf5325d76d/mypy-1.20.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef1415a637cd3627d6304dfbeddbadd21079dafc2a8a753c477ce4fc0c2af54f", size = 13696948, upload-time = "2026-04-13T02:46:15.006Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/c6/75e969781c2359b2f9c15b061f28ec6d67c8b61865ceda176e85c8e7f2de/mypy-1.20.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef3461b1ad5cd446e540016e90b5984657edda39f982f4cc45ca317b628f5a37", size = 14706744, upload-time = "2026-04-13T02:46:00.482Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/6e/b221b1de981fc4262fe3e0bf9ec272d292dfe42394a689c2d49765c144c4/mypy-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:542dd63c9e1339b6092eb25bd515f3a32a1453aee8c9521d2ddb17dacd840237", size = 14949035, upload-time = "2026-04-13T02:45:06.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/4b/298ba2de0aafc0da3ff2288da06884aae7ba6489bc247c933f87847c41b3/mypy-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:1d55c7cd8ca22e31f93af2a01160a9e95465b5878de23dba7e48116052f20a8d", size = 10883216, upload-time = "2026-04-13T02:45:47.232Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/f9/5e25b8f0b8cb92f080bfed9c21d3279b2a0b6a601cdca369a039ba84789d/mypy-1.20.1-cp312-cp312-win_arm64.whl", hash = "sha256:f5b84a79070586e0d353ee07b719d9d0a4aa7c8ee90c0ea97747e98cbe193019", size = 9814299, upload-time = "2026-04-13T02:45:21.934Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/28/926bd972388e65a39ee98e188ccf67e81beb3aacfd5d6b310051772d974b/mypy-1.20.1-py3-none-any.whl", hash = "sha256:1aae28507f253fe82d883790d1c0a0d35798a810117c88184097fe8881052f06", size = 2636553, upload-time = "2026-04-13T02:46:30.45Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/4e/7560e4528db9e9b147e4c0f22660466bf30a0a1fe3d63d1b9d3b0fd354ee/mypy-1.20.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4dbfcf869f6b0517f70cf0030ba6ea1d6645e132337a7d5204a18d8d5636c02b", size = 14539393, upload-time = "2026-04-21T17:07:12.52Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/d9/34a5efed8124f5a9234f55ac6a4ced4201e2c5b81e1109c49ad23190ec8c/mypy-1.20.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b6481b228d072315b053210b01ac320e1be243dc17f9e5887ef167f23f5fae4", size = 13361642, upload-time = "2026-04-21T17:06:53.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/14/eb377acf78c03c92d566a1510cda8137348215b5335085ef662ab82ecd3a/mypy-1.20.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34397cdced6b90b836e38182076049fdb41424322e0b0728c946b0939ebdf9f6", size = 13740347, upload-time = "2026-04-21T17:12:04.73Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/94/7e4634a32b641aa1c112422eed1bbece61ee16205f674190e8b536f884de/mypy-1.20.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5da6976f20cae27059ea8d0c86e7cef3de720e04c4bb9ee18e3690fdb792066", size = 14734042, upload-time = "2026-04-21T17:07:43.16Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/f3/f7e62395cb7f434541b4491a01149a4439e28ace4c0c632bbf5431e92d1f/mypy-1.20.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:56908d7e08318d39f85b1f0c6cfd47b0cac1a130da677630dac0de3e0623e102", size = 14964958, upload-time = "2026-04-21T17:11:00.665Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/0d/47e3c3a0ec2a876e35aeac365df3cac7776c36bbd4ed18cc521e1b9d255b/mypy-1.20.2-cp312-cp312-win_amd64.whl", hash = "sha256:d52ad8d78522da1d308789df651ee5379088e77c76cb1994858d40a426b343b9", size = 10911340, upload-time = "2026-04-21T17:10:49.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/b2/6c852d72e0ea8b01f49da817fb52539993cde327e7d010e0103dc12d0dac/mypy-1.20.2-cp312-cp312-win_arm64.whl", hash = "sha256:785b08db19c9f214dc37d65f7c165d19a30fcecb48abfa30f31b01b5acaabb58", size = 9833947, upload-time = "2026-04-21T17:09:05.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/9a/f23c163e25b11074188251b0b5a0342625fc1cdb6af604757174fa9acc9b/mypy-1.20.2-py3-none-any.whl", hash = "sha256:a94c5a76ab46c5e6257c7972b6c8cff0574201ca7dc05647e33e795d78680563", size = 2637314, upload-time = "2026-04-21T17:05:54.5Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4233,29 +4235,29 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-api"
|
||||
version = "1.41.0"
|
||||
version = "1.41.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "importlib-metadata" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/47/8e/3778a7e87801d994869a9396b9fc2a289e5f9be91ff54a27d41eace494b0/opentelemetry_api-1.41.0.tar.gz", hash = "sha256:9421d911326ec12dee8bc933f7839090cad7a3f13fcfb0f9e82f8174dc003c09", size = 71416, upload-time = "2026-04-09T14:38:34.544Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fa/fc/b7564cbef36601aef0d6c9bc01f7badb64be8e862c2e1c3c5c3b43b53e4f/opentelemetry_api-1.41.1.tar.gz", hash = "sha256:0ad1814d73b875f84494387dae86ce0b12c68556331ce6ce8fe789197c949621", size = 71416, upload-time = "2026-04-24T13:15:38.262Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/58/ee/99ab786653b3bda9c37ade7e24a7b607a1b1f696063172768417539d876d/opentelemetry_api-1.41.0-py3-none-any.whl", hash = "sha256:0e77c806e6a89c9e4f8d372034622f3e1418a11bdbe1c80a50b3d3397ad0fa4f", size = 69007, upload-time = "2026-04-09T14:38:11.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/59/3e7118ed140f76b0982ba4321bdaed1997a0473f9720de2d10788a577033/opentelemetry_api-1.41.1-py3-none-any.whl", hash = "sha256:a22df900e75c76dc08440710e51f52f1aa6b451b429298896023e60db5b3139f", size = 69007, upload-time = "2026-04-24T13:15:15.662Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-distro"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
{ name = "opentelemetry-instrumentation" },
|
||||
{ name = "opentelemetry-sdk" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/c6/52b0dbcc8fbdecf179047921940516cbb8aaf05f6b737faa526ad76fec51/opentelemetry_distro-0.62b0.tar.gz", hash = "sha256:aa0308fbe50ad8f17d4446982dbf26870e20b8031ba38d8e1224ecf7aedd3184", size = 2611, upload-time = "2026-04-09T14:40:20.404Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/f1/314e5015e353a001948e03f48a6935ca7ef00e99107b8e3e63871426b0f6/opentelemetry_distro-0.62b1.tar.gz", hash = "sha256:0169b128b9d6d5cab809ae4c4fb3d576bfc5d3f30b32d8a43b770b587f04f253", size = 2606, upload-time = "2026-04-24T13:22:29.403Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/7e/5858bba1c7ed880c7b0fe7d9a1ea40ab8affd18c9ebc1e16c2d69c501da1/opentelemetry_distro-0.62b0-py3-none-any.whl", hash = "sha256:23e9065a35cef12868ad5efb18ce9c88a9103800256b318dec4c9c850c6c78c1", size = 3348, upload-time = "2026-04-09T14:39:17.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/19/c58c119a299298f03d0797fcb780f221880e8d725959c71bcfb4ae034738/opentelemetry_distro-0.62b1-py3-none-any.whl", hash = "sha256:fd938de6ca1d047ffd15a65fa09d89f4b4ca7dd97ef25601a12d6d10efd693a0", size = 3348, upload-time = "2026-04-24T13:21:27.389Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4321,7 +4323,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
@ -4329,14 +4331,14 @@ dependencies = [
|
||||
{ name = "packaging" },
|
||||
{ name = "wrapt" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/fd/b8e90bb340957f059084376f94cff336b0e871a42feba7d3f7342365e987/opentelemetry_instrumentation-0.62b0.tar.gz", hash = "sha256:aa1b0b9ab2e1722c2a8a5384fb016fc28d30bba51826676c8036074790d2861e", size = 34042, upload-time = "2026-04-09T14:40:22.843Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/cb/0523b92c112a6cc70be43724343dc45225d3af134419844d7879a07755d4/opentelemetry_instrumentation-0.62b1.tar.gz", hash = "sha256:90e92a905ba4f84db06ac3aec96701df6c079b2d66e9379f8739f0a1bdcc7f45", size = 34043, upload-time = "2026-04-24T13:22:31.997Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/00/b6/3356d2e335e3c449c5183e9b023f30f04f1b7073a6583c68745ea2e704b1/opentelemetry_instrumentation-0.62b0-py3-none-any.whl", hash = "sha256:30d4e76486eae64fb095264a70c2c809c4bed17b73373e53091470661f7d477c", size = 34158, upload-time = "2026-04-09T14:39:21.428Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/0f/45adbaea1f81b847cffdcee4f4b5f89297e42facf7fac78c7aaac4c38e75/opentelemetry_instrumentation-0.62b1-py3-none-any.whl", hash = "sha256:976fc6e640f2006599e97429c949e622c108d0c17c2059347d1e6c93c707f257", size = 34163, upload-time = "2026-04-24T13:21:31.722Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-asgi"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "asgiref" },
|
||||
@ -4345,28 +4347,28 @@ dependencies = [
|
||||
{ name = "opentelemetry-semantic-conventions" },
|
||||
{ name = "opentelemetry-util-http" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/38/999bf777774878971c2716de4b7a03cd57a7decb4af25090e703b79fa0e5/opentelemetry_instrumentation_asgi-0.62b0.tar.gz", hash = "sha256:93cde8c62e5918a3c1ff9ba020518127300e5e0816b7e8b14baf46a26ba619fc", size = 26779, upload-time = "2026-04-09T14:40:26.566Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/54/43/b2f0703ff46718ff7b17d7fbf8e9d7f20e26a23c7c325092dd762d09cf9d/opentelemetry_instrumentation_asgi-0.62b1.tar.gz", hash = "sha256:7cf5f5d5c493bbb1edd2bd6d51fa879d964e94048904017258a32ffa47329310", size = 26781, upload-time = "2026-04-24T13:22:37.158Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/25/cf/29df82f5870178143bdb5c9a7be044b9f78c71e1c5dcf995242e86d80158/opentelemetry_instrumentation_asgi-0.62b0-py3-none-any.whl", hash = "sha256:89b62a6f996b260b162f515c25e6d78e39286e4cbe2f935899e51b32f31027e2", size = 17011, upload-time = "2026-04-09T14:39:27.305Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/41/968c1fe12fb90abffca6620e65d4af91451c02ecca8f74a17a62cac490de/opentelemetry_instrumentation_asgi-0.62b1-py3-none-any.whl", hash = "sha256:b7f89be48528512619bd54fa2459f72afb1695ba71d7024d382ad96d467e7fa8", size = 17011, upload-time = "2026-04-24T13:21:38.006Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-celery"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
{ name = "opentelemetry-instrumentation" },
|
||||
{ name = "opentelemetry-semantic-conventions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/01/b4/20a3c8c669dc45aa3703c0370041d67e8be613f1829523cdaf634a5f9626/opentelemetry_instrumentation_celery-0.62b0.tar.gz", hash = "sha256:55e8fa48e5b886bcca448fa32e28a6cc2165157745e8328de479a826d3903095", size = 14808, upload-time = "2026-04-09T14:40:31.603Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/35/86/9e78c174b2f6ea92af3f99aa7488807b74290a5cd44a8e05bfbfd7b109be/opentelemetry_instrumentation_celery-0.62b1.tar.gz", hash = "sha256:f0035abd464a2989414a9c5ecdd79a25c87bd8c43f96c7f39e07000c6f25dfef", size = 14809, upload-time = "2026-04-24T13:22:45.656Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/60/cf951e6bd6ec62ec55bd2384e0ba9841ea38f2d128c773d85dc60da97172/opentelemetry_instrumentation_celery-0.62b0-py3-none-any.whl", hash = "sha256:cadfd3e65287a36099dce5ba7e05d98e4c5f9479a455241e01d140ecc5c10935", size = 13864, upload-time = "2026-04-09T14:39:35.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/51/f38a31ac8f8e3bd365f301f697661679addaf548d52a05cfdde4448a5493/opentelemetry_instrumentation_celery-0.62b1-py3-none-any.whl", hash = "sha256:50567a47b7adc4ea552d09709de4d73fea7b4ff24ab0e9d38739d03fcd3f95ef", size = 13864, upload-time = "2026-04-24T13:21:46.557Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-fastapi"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
@ -4375,14 +4377,14 @@ dependencies = [
|
||||
{ name = "opentelemetry-semantic-conventions" },
|
||||
{ name = "opentelemetry-util-http" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/37/09/92740c6d114d1bef392557a03ae6de64065c83c1b331dae9b57fe718497c/opentelemetry_instrumentation_fastapi-0.62b0.tar.gz", hash = "sha256:e4748e4e575077e08beaf2c5d2f369da63dd90882d89d73c4192a97356637dec", size = 25056, upload-time = "2026-04-09T14:40:36.438Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/77/38/91780475a25370b6d483afbaed3e1e170459d6351c5f7c08d66b65e2172e/opentelemetry_instrumentation_fastapi-0.62b1.tar.gz", hash = "sha256:b377d4ba32868fb1ff0f64da3fcdd3aa154d698fc83d65f5d380ea21bf31ee19", size = 25054, upload-time = "2026-04-24T13:22:50.222Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/64/bb/186ffe0fde0ad33ceb50e1d3596cc849b732d3b825592a6a507a40c8c49b/opentelemetry_instrumentation_fastapi-0.62b0-py3-none-any.whl", hash = "sha256:06d3272ad15f9daea5a0a27c32831aff376110a4b0394197120256ef6d610e6e", size = 13482, upload-time = "2026-04-09T14:39:43.446Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/6f/602e4081d3fe82731aff7e3e9c2f1662d85701841d6dc25f16a1874e11cd/opentelemetry_instrumentation_fastapi-0.62b1-py3-none-any.whl", hash = "sha256:93fa9cc4f315819aee5f4fceb6196c1e5b0fbd789c5520c631de228bd3e5285b", size = 13484, upload-time = "2026-04-24T13:21:54.538Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-flask"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
@ -4392,14 +4394,14 @@ dependencies = [
|
||||
{ name = "opentelemetry-util-http" },
|
||||
{ name = "packaging" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/86/522294f6a80d59560d8f722da59513d2ed2d53c6178fa109789dacc5dd50/opentelemetry_instrumentation_flask-0.62b0.tar.gz", hash = "sha256:330e903c0e92b06aae32f9eb7b8a923599d7a29440f50841a59dbba34ec6dd9f", size = 24100, upload-time = "2026-04-09T14:40:37.111Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d3/08/e52e6eab550db1736c5657a7e38484c22a101009e77fc67eb00b272a96c1/opentelemetry_instrumentation_flask-0.62b1.tar.gz", hash = "sha256:37662ad159570dab1e3017a2a415193c014a5798fc32d33f3bdd254469e8c69a", size = 24100, upload-time = "2026-04-24T13:22:50.845Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/c8/9f3bb38281bcb50c93c3d2358b303645f6917bf972c167484c09f9a97ff1/opentelemetry_instrumentation_flask-0.62b0-py3-none-any.whl", hash = "sha256:8c1f8986ec3887d08899d2eb654625252c929105174911b3b50dcf12b1001807", size = 16006, upload-time = "2026-04-09T14:39:44.401Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/58/d0e5e82d225365987bd192576095b1125f6b172decc4db79963373c92b74/opentelemetry_instrumentation_flask-0.62b1-py3-none-any.whl", hash = "sha256:6df32684a7dd5dab5feb499c0748a4628b3fd139bffd8171326fb479aa525367", size = 16007, upload-time = "2026-04-24T13:21:55.462Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-httpx"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
@ -4408,14 +4410,14 @@ dependencies = [
|
||||
{ name = "opentelemetry-util-http" },
|
||||
{ name = "wrapt" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/77/a7/63e2c6325c8e99cd9b8e0229a8b61c37520ee537214a2c8d514e84486a94/opentelemetry_instrumentation_httpx-0.62b0.tar.gz", hash = "sha256:d865398db3f3c289ba226e355bf4d94460a4301c0c8916e3136caea55ae18000", size = 24182, upload-time = "2026-04-09T14:40:38.719Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/33/cb/7a418e69c7dad281803529cb4f6de1b747d802cca44c38032668690b4836/opentelemetry_instrumentation_httpx-0.62b1.tar.gz", hash = "sha256:a1fac9bcc3a6ef5996a7990563f1af0798468b2c146de535fd598369383fba7e", size = 24181, upload-time = "2026-04-24T13:22:52.124Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/5e/7d5fc28487637871b015128cd5dbb3c36f6d343a9098b893bd803d5a9cca/opentelemetry_instrumentation_httpx-0.62b0-py3-none-any.whl", hash = "sha256:c7660b939c12608fec67743126e9b4dc23dceef0ed631c415924966b0d1579e3", size = 17200, upload-time = "2026-04-09T14:39:46.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/e0/eca824e9492ccec00e055bdd243aeda8eb7c5eda746d98af4d7a2d97ecf3/opentelemetry_instrumentation_httpx-0.62b1-py3-none-any.whl", hash = "sha256:88614015df451d61bc7e73f22524e6f223611f80b6caad2f6bdcbe05fa0df653", size = 17201, upload-time = "2026-04-24T13:21:58.072Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-redis"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
@ -4423,14 +4425,14 @@ dependencies = [
|
||||
{ name = "opentelemetry-semantic-conventions" },
|
||||
{ name = "wrapt" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/55/7d/5acdb4e4e36c522f9393cfa91f7a431ee089663c77855e524bc97f993020/opentelemetry_instrumentation_redis-0.62b0.tar.gz", hash = "sha256:513bc6679ee251436f0aff7be7ddab6186637dde09a795a8dc9659103f103bef", size = 14796, upload-time = "2026-04-09T14:40:48.391Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f5/ff/35414ad80409bd9e472c7959832524c5f2c8f63965af08c41c2b42d3a6a6/opentelemetry_instrumentation_redis-0.62b1.tar.gz", hash = "sha256:2d3c421d95e05ade075bee5becbe34e743b1cdf5bdee2085cb524f88c4f13dcb", size = 14796, upload-time = "2026-04-24T13:23:01.138Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/de/42/a13a7da074c972a51c14277e7f747e90037b9d815515c73b802e95897690/opentelemetry_instrumentation_redis-0.62b0-py3-none-any.whl", hash = "sha256:92ada3d7bdf395785f660549b0e6e8e5bac7cab80e7f1369a7d02228b27684c3", size = 15501, upload-time = "2026-04-09T14:40:00.69Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/37/bc2271f3472e3041eeade8b8da1cfd3b06badae76fe5d0ff135b6285e70c/opentelemetry_instrumentation_redis-0.62b1-py3-none-any.whl", hash = "sha256:9aedd02c1acf631251d1d676634db47da9da04e0a626cd0c7d83fe0eb791d165", size = 15501, upload-time = "2026-04-24T13:22:11.705Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-sqlalchemy"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
@ -4439,14 +4441,14 @@ dependencies = [
|
||||
{ name = "packaging" },
|
||||
{ name = "wrapt" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2a/3d/40adc8c38e5be017ceb230a28ca57ca81981d4dc0c4b902cc930c77fd14f/opentelemetry_instrumentation_sqlalchemy-0.62b0.tar.gz", hash = "sha256:d02f85b83f349e9ef70a34cb3f4c3a3481fa15b11747f09209818663e161cac4", size = 18539, upload-time = "2026-04-09T14:40:50.251Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1a/53/fa511ab998dd66b4eb66a36d8c262d0604cc5bad7a9c82e923be038dda97/opentelemetry_instrumentation_sqlalchemy-0.62b1.tar.gz", hash = "sha256:bdeac015351a1de057e8ea39f1fe26c9e60ea6bedbf1d5ad6a8262a516b3dc7d", size = 18539, upload-time = "2026-04-24T13:23:03.169Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/e0/77954ac593f34740dc32e28a15fe7170e90f6ba6398eaaa5c88b34c05ed1/opentelemetry_instrumentation_sqlalchemy-0.62b0-py3-none-any.whl", hash = "sha256:ec576e0660080d9d15ce4fa44d2a07fff8cb4b796a84344cb0f2c9e5d6e26f79", size = 15534, upload-time = "2026-04-09T14:40:03.957Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/c5/aa2abcf8752a435536901636c5d540ba7a2c0ba2c4e98c7d119482e04262/opentelemetry_instrumentation_sqlalchemy-0.62b1-py3-none-any.whl", hash = "sha256:613542ecd52aabeec83d8813b5c287a3fb6c9ac3cd660694c94c0571f066e972", size = 15536, upload-time = "2026-04-24T13:22:14.767Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-wsgi"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
@ -4454,22 +4456,22 @@ dependencies = [
|
||||
{ name = "opentelemetry-semantic-conventions" },
|
||||
{ name = "opentelemetry-util-http" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b7/5c/ed45ff053d76c94c59173f2bcde3d61052adb10214f70f028f760aa56625/opentelemetry_instrumentation_wsgi-0.62b0.tar.gz", hash = "sha256:d179f969ecce0c29a15ffd4d982580dfae57c8ff2fd4d9366e299a6d4815e668", size = 19922, upload-time = "2026-04-09T14:40:56.227Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/36/db/19f1d66cead56e52291fccaa235b07ad45a5c24be1c740301a840c68235a/opentelemetry_instrumentation_wsgi-0.62b1.tar.gz", hash = "sha256:02a364fd9c940a46b19c825c5bfe386b007d5292ef91573894164836953fe831", size = 19919, upload-time = "2026-04-24T13:23:09.796Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/cb/753dbbe624df88594fa35a3ff26302fea22623385ed64462f6c8ee7c81eb/opentelemetry_instrumentation_wsgi-0.62b0-py3-none-any.whl", hash = "sha256:2714ab5ab2f35e67dc181ffa3a43fa15313c85c09b4d024c36d72cf1efa29c9a", size = 14628, upload-time = "2026-04-09T14:40:13.529Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/0e/60fec0780e16929c821df7c55c4f0bea45d6ef562e662c5f27f47d0ff195/opentelemetry_instrumentation_wsgi-0.62b1-py3-none-any.whl", hash = "sha256:a2df11de0113f504043e2b0fa0288238a93ee49ff607bd5100cb2d3a75bc771f", size = 14629, upload-time = "2026-04-24T13:22:23.951Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-propagator-b3"
|
||||
version = "1.41.0"
|
||||
version = "1.41.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ef/43/cea77e171c014324876104cf2a17c78f5e931408b977b9e64979f950912c/opentelemetry_propagator_b3-1.41.0.tar.gz", hash = "sha256:ef98b715b3a05e8b0b03ebaea1bf295b4ad61a0e306e2d1da81d32af7395e6ad", size = 9588, upload-time = "2026-04-09T14:38:43.328Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8a/ef/e2c1093e21fb9b5f8e44fa6cebacf2cbb60b47b4646d652805dcce48f3b8/opentelemetry_propagator_b3-1.41.1.tar.gz", hash = "sha256:e8563b588aa5f1f90740dcd678f04d5634de2d4e0077b7ca4a177c71a02f745d", size = 9587, upload-time = "2026-04-24T13:15:48.349Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/50/c1/11345c06774ec6ed6d89e3994dd1f62ad2ab41dfeb312eacd6b2a2323280/opentelemetry_propagator_b3-1.41.0-py3-none-any.whl", hash = "sha256:0b085c26ba59fcb66771226f967e91886bdeef998b3b5f2e9da6a604918c6f90", size = 8923, upload-time = "2026-04-09T14:38:26.865Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/78/388ea1ae84fd3d2858c782f0410d73d936ffbd1a54711e45874490c576e7/opentelemetry_propagator_b3-1.41.1-py3-none-any.whl", hash = "sha256:f4b045d0aa4b5c17ac25a371bf3d08173a2f4b8f19a94357e57ae690c15415dc", size = 8921, upload-time = "2026-04-24T13:15:30.408Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4486,38 +4488,38 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-sdk"
|
||||
version = "1.41.0"
|
||||
version = "1.41.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
{ name = "opentelemetry-semantic-conventions" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f8/0e/a586df1186f9f56b5a0879d52653effc40357b8e88fc50fe300038c3c08b/opentelemetry_sdk-1.41.0.tar.gz", hash = "sha256:7bddf3961131b318fc2d158947971a8e37e38b1cd23470cfb72b624e7cc108bd", size = 230181, upload-time = "2026-04-09T14:38:47.225Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/58/d0/54ee30dab82fb0acda23d144502771ff76ef8728459c83c3e89ef9fb1825/opentelemetry_sdk-1.41.1.tar.gz", hash = "sha256:724b615e1215b5aeacda0abb8a6a8922c9a1853068948bd0bd225a56d0c792e6", size = 230180, upload-time = "2026-04-24T13:15:50.991Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/13/a7825118208cb32e6a4edcd0a99f925cbef81e77b3b0aedfd9125583c543/opentelemetry_sdk-1.41.0-py3-none-any.whl", hash = "sha256:a596f5687964a3e0d7f8edfdcf5b79cbca9c93c7025ebf5fb00f398a9443b0bd", size = 180214, upload-time = "2026-04-09T14:38:30.657Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/e7/a1420b698aad018e1cf60fdbaaccbe49021fb415e2a0d81c242f4c518f54/opentelemetry_sdk-1.41.1-py3-none-any.whl", hash = "sha256:edee379c126c1bce952b0c812b48fe8ff35b30df0eecf17e98afa4d598b7d85d", size = 180213, upload-time = "2026-04-24T13:15:33.767Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-semantic-conventions"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a3/b0/c14f723e86c049b7bf8ff431160d982519b97a7be2857ed2247377397a24/opentelemetry_semantic_conventions-0.62b0.tar.gz", hash = "sha256:cbfb3c8fc259575cf68a6e1b94083cc35adc4a6b06e8cf431efa0d62606c0097", size = 145753, upload-time = "2026-04-09T14:38:48.274Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9e/de/911ac9e309052aca1b20b2d5549d3db45d1011e1a610e552c6ccdd1b64f8/opentelemetry_semantic_conventions-0.62b1.tar.gz", hash = "sha256:c5cc6e04a7f8c7cdd30be2ed81499fa4e75bfbd52c9cb70d40af1f9cd3619802", size = 145750, upload-time = "2026-04-24T13:15:52.236Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/58/6c/5e86fa1759a525ef91c2d8b79d668574760ff3f900d114297765eb8786cb/opentelemetry_semantic_conventions-0.62b0-py3-none-any.whl", hash = "sha256:0ddac1ce59eaf1a827d9987ab60d9315fb27aea23304144242d1fcad9e16b489", size = 231619, upload-time = "2026-04-09T14:38:32.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/a6/83dc2ab6fa397ee66fba04fe2e74bdf7be3b3870005359ceb7689103c058/opentelemetry_semantic_conventions-0.62b1-py3-none-any.whl", hash = "sha256:cf506938103d331fbb78eded0d9788095f7fd59016f2bda813c3324e5a74a93c", size = 231620, upload-time = "2026-04-24T13:15:35.454Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-util-http"
|
||||
version = "0.62b0"
|
||||
version = "0.62b1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/830f7c57135158eb8a8efd3f94ab191a89e3b8a49bed314a35ee501da3f2/opentelemetry_util_http-0.62b0.tar.gz", hash = "sha256:a62e4b19b8a432c0de657f167dee3455516136bb9c6ed463ca8063019970d835", size = 11393, upload-time = "2026-04-09T14:40:59.442Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/24/1b/aa71b63e18d30a8384036b9937f40f7618f8030a7aa213155fb54f6f2b47/opentelemetry_util_http-0.62b1.tar.gz", hash = "sha256:adf6facbb89aef8f8bc566e2f04624942ba08a7b678b3479a91051a8f4dc70a3", size = 11393, upload-time = "2026-04-24T13:23:12.994Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/7f/5c1b7d4385852b9e5eacd4e7f9d8b565d3d351d17463b24916ad098adf1a/opentelemetry_util_http-0.62b0-py3-none-any.whl", hash = "sha256:c20462808d8cc95b69b0dc4a3e02a9d36beb663347e96c931f51ffd78bd318ad", size = 9294, upload-time = "2026-04-09T14:40:19.014Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/85/a9d9d32161c1ced61346267db4c9702da54f81ec5dc88214bc65c23f4e9d/opentelemetry_util_http-0.62b1-py3-none-any.whl", hash = "sha256:c57e8a6c19fc422c288e6074e882f506f85030b69b7376182f74f9257b9261f0", size = 9295, upload-time = "2026-04-24T13:22:28.078Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4808,7 +4810,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "postgrest"
|
||||
version = "2.28.3"
|
||||
version = "2.29.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "deprecation" },
|
||||
@ -4816,9 +4818,9 @@ dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/60/9378ddd6e21b6005b34aeb42dc7a9ed9985c673c97c9b6a1858f9c52ebbd/postgrest-2.28.3.tar.gz", hash = "sha256:56336e9304950a78315ec7d6c8eb307cdb964d0878a7bec6111392ddb6c16a45", size = 13758, upload-time = "2026-03-20T14:38:06.542Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/98/f216b8b5c4d116ab6a2fb21339b5821da279ee773e163612418e1c56c012/postgrest-2.29.0.tar.gz", hash = "sha256:a87081858f627fcd57e8e7137004a1ef0adbdf0dbdfed1384e9ea1d7a9c525ec", size = 14217, upload-time = "2026-04-24T13:13:00.281Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/5e/6eeb1d53d010d80e800204c1eee6b3d5419a6a2b985c364f56f36cf48cca/postgrest-2.28.3-py3-none-any.whl", hash = "sha256:5a44d6c6d509abdbe0f928c86f0dc31ef26bda36e0357129836ec54dfb50b083", size = 21865, upload-time = "2026-03-20T14:38:05.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/0b/08b670a93a90d625c557b9e64b8a5fdeec80c3542d2d0265f0b4d6b16646/postgrest-2.29.0-py3-none-any.whl", hash = "sha256:3ee48e146f726272733d20e2b12de354cdb6cb9dd9cc3a61ed97ce69047aeb96", size = 22735, upload-time = "2026-04-24T13:12:58.405Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4980,21 +4982,21 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "psycopg2-binary"
|
||||
version = "2.9.11"
|
||||
version = "2.9.12"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2a/60/a3624f79acea344c16fbef3a94d28b89a8042ddfb8f3e4ca83f538671409/psycopg2_binary-2.9.12.tar.gz", hash = "sha256:5ac9444edc768c02a6b6a591f070b8aae28ff3a99be57560ac996001580f294c", size = 379686, upload-time = "2026-04-21T09:40:34.304Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/9f/ef4ef3c8e15083df90ca35265cfd1a081a2f0cc07bb229c6314c6af817f4/psycopg2_binary-2.9.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5cdc05117180c5fa9c40eea8ea559ce64d73824c39d928b7da9fb5f6a9392433", size = 3712459, upload-time = "2026-04-20T23:34:30.549Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/01/3dd14e46ba48c1e1a6ec58ee599fa1b5efa00c246d5046cd903d0eeb1af1/psycopg2_binary-2.9.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d3227a3bc228c10d21011a99245edca923e4e8bf461857e869a507d9a41fe9f6", size = 3822936, upload-time = "2026-04-20T23:34:32.77Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/f7/0640e4901119d8a9f7a1784b927f494e2198e213ceb593753d1f2c8b1b30/psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:995ce929eede89db6254b50827e2b7fd61e50d11f0b116b29fffe4a2e53c4580", size = 4578676, upload-time = "2026-04-20T23:34:35.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/55/44df3965b5f297c50cc0b1b594a31c67d6127a9d133045b8a66611b14dfb/psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9fe06d93e72f1c048e731a2e3e7854a5bfaa58fc736068df90b352cefe66f03f", size = 4274917, upload-time = "2026-04-20T23:34:37.982Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/4b/74535248b1eac0c9336862e8617c765ac94dac76f9e25d7c4a79588c8907/psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40e7b28b63aaf737cb3a1edc3a9bbc9a9f4ad3dcb7152e8c1130e4050eddcb7d", size = 5894843, upload-time = "2026-04-20T23:34:40.856Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/ba/f1bf8d2ae71868ad800b661099086ee52bc0f8d9f05be1acd8ebb06757cc/psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:89d19a9f7899e8eb0656a2b3a08e0da04c720a06db6e0033eab5928aabe60fa9", size = 4110556, upload-time = "2026-04-20T23:34:44.016Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/46/c15706c338403b7c420bcc0c2905aad116cc064545686d8bf85f1999ea00/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:612b965daee295ae2da8f8218ce1d274645dc76ef3f1abf6a0a94fd57eff876d", size = 3655714, upload-time = "2026-04-20T23:34:46.233Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/7c/a2d5dc09b64a4564db242a0fe418fde7d33f6f8259dd2c5b9d7def00fb5a/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b9a339b79d37c1b45f3235265f07cdeb0cb5ad7acd2ac7720a5920989c17c24e", size = 3301154, upload-time = "2026-04-20T23:34:49.528Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/e8/cc8c9a4ce71461f9ec548d38cadc41dc184b34c73e6455450775a9334ccd/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3471336e1acfd9c7fe507b8bad5af9317b6a89294f9eb37bd9a030bb7bebcdc6", size = 3048882, upload-time = "2026-04-20T23:34:51.86Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/6a/31e2296bc0787c5ab75d3d118e40b239db8151b5192b90b77c72bc9256e9/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7af18183109e23502c8b2ae7f6926c0882766f35b5175a4cd737ad825e4d7a1b", size = 3351298, upload-time = "2026-04-20T23:34:54.124Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/a8/75f4e3e11203b590150abed2cf7794b9c9c9f7eceddae955191138b44dde/psycopg2_binary-2.9.12-cp312-cp312-win_amd64.whl", hash = "sha256:398fcd4db988c7d7d3713e2b8e18939776fd3fb447052daae4f24fa39daede4c", size = 2757230, upload-time = "2026-04-20T23:34:56.242Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5721,16 +5723,16 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "realtime"
|
||||
version = "2.28.3"
|
||||
version = "2.29.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "websockets" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9c/3d/ef6ed9221f98766f3a503e6e3ac68fa7ca25c117b383f1efc448294232ac/realtime-2.28.3.tar.gz", hash = "sha256:5cc83a6217874426799d8bf74e96d904ac6fa77c39fa8982fa99287947eb2cbf", size = 18723, upload-time = "2026-03-20T14:38:08.424Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6e/f1/08c42a42653942fadfbef495d5b0239356140e7186cc528704956c5f06d4/realtime-2.29.0.tar.gz", hash = "sha256:8efe4a1b3a548a5fda09de701bd041fa0970c5a2fe7d13db0b9861ce11828be2", size = 18715, upload-time = "2026-04-24T13:13:02.315Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/d5/659405f9d4c9b022b7ac02bd52986ccc081f211db081051440f46bf4f358/realtime-2.28.3-py3-none-any.whl", hash = "sha256:efe484d6d39024c7e00ef70f70be600142e9407e5d802de8c96e86e014ce3b36", size = 22378, upload-time = "2026-03-20T14:38:07.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/48/f6375c0a24923beb988f0c71c052604c96641cf43c2d22b91ec1df86afa0/realtime-2.29.0-py3-none-any.whl", hash = "sha256:1a4891e6c82e88ac9d96ac715e435e086f6f8c7665212a8717346de829cbb509", size = 22374, upload-time = "2026-04-24T13:13:01.103Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5887,27 +5889,27 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.15.11"
|
||||
version = "0.15.12"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e4/8d/192f3d7103816158dfd5ea50d098ef2aec19194e6cbccd4b3485bdb2eb2d/ruff-0.15.11.tar.gz", hash = "sha256:f092b21708bf0e7437ce9ada249dfe688ff9a0954fc94abab05dcea7dcd29c33", size = 4637264, upload-time = "2026-04-16T18:46:26.58Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/99/43/3291f1cc9106f4c63bdce7a8d0df5047fe8422a75b091c16b5e9355e0b11/ruff-0.15.12.tar.gz", hash = "sha256:ecea26adb26b4232c0c2ca19ccbc0083a68344180bba2a600605538ce51a40a6", size = 4643852, upload-time = "2026-04-24T18:17:14.305Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/02/1e/6aca3427f751295ab011828e15e9bf452200ac74484f1db4be0197b8170b/ruff-0.15.11-py3-none-linux_armv6l.whl", hash = "sha256:e927cfff503135c558eb581a0c9792264aae9507904eb27809cdcff2f2c847b7", size = 10607943, upload-time = "2026-04-16T18:46:05.967Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/26/1341c262e74f36d4e84f3d6f4df0ac68cd53331a66bfc5080daa17c84c0b/ruff-0.15.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7a1b5b2938d8f890b76084d4fa843604d787a912541eae85fd7e233398bbb73e", size = 10988592, upload-time = "2026-04-16T18:46:00.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/71/850b1d6ffa9564fbb6740429bad53df1094082fe515c8c1e74b6d8d05f18/ruff-0.15.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d4176f3d194afbdaee6e41b9ccb1a2c287dba8700047df474abfbe773825d1cb", size = 10338501, upload-time = "2026-04-16T18:46:03.723Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/11/cc1284d3e298c45a817a6aadb6c3e1d70b45c9b36d8d9cce3387b495a03a/ruff-0.15.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b17c886fb88203ced3afe7f14e8d5ae96e9d2f4ccc0ee66aa19f2c2675a27e4", size = 10670693, upload-time = "2026-04-16T18:46:41.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/9e/f8288b034ab72b371513c13f9a41d9ba3effac54e24bfb467b007daee2ca/ruff-0.15.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:49fafa220220afe7758a487b048de4c8f9f767f37dfefad46b9dd06759d003eb", size = 10416177, upload-time = "2026-04-16T18:46:21.717Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/71/504d79abfd3d92532ba6bbe3d1c19fada03e494332a59e37c7c2dabae427/ruff-0.15.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2ab8427e74a00d93b8bda1307b1e60970d40f304af38bccb218e056c220120d", size = 11221886, upload-time = "2026-04-16T18:46:15.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/5a/947e6ab7a5ad603d65b474be15a4cbc6d29832db5d762cd142e4e3a74164/ruff-0.15.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:195072c0c8e1fc8f940652073df082e37a5d9cb43b4ab1e4d0566ab8977a13b7", size = 12075183, upload-time = "2026-04-16T18:46:07.944Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/a1/0b7bb6268775fdd3a0818aee8efd8f5b4e231d24dd4d528ced2534023182/ruff-0.15.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3a0996d486af3920dec930a2e7daed4847dfc12649b537a9335585ada163e9e", size = 11516575, upload-time = "2026-04-16T18:46:31.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/c3/bb5168fc4d233cc06e95f482770d0f3c87945a0cd9f614b90ea8dc2f2833/ruff-0.15.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bef2cb556d509259f1fe440bb9cd33c756222cf0a7afe90d15edf0866702431", size = 11306537, upload-time = "2026-04-16T18:46:36.988Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/92/4cfae6441f3967317946f3b788136eecf093729b94d6561f963ed810c82e/ruff-0.15.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:030d921a836d7d4a12cf6e8d984a88b66094ccb0e0f17ddd55067c331191bf19", size = 11296813, upload-time = "2026-04-16T18:46:24.182Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/26/972784c5dde8313acde8ac71ba8ac65475b85db4a2352a76c9934361f9bc/ruff-0.15.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0e783b599b4577788dbbb66b9addcef87e9a8832f4ce0c19e34bf55543a2f890", size = 10633136, upload-time = "2026-04-16T18:46:39.802Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/53/3985a4f185020c2f367f2e08a103032e12564829742a1b417980ce1514a0/ruff-0.15.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ae90592246625ba4a34349d68ec28d4400d75182b71baa196ddb9f82db025ef5", size = 10424701, upload-time = "2026-04-16T18:46:10.381Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/57/bf0dfb32241b56c83bb663a826133da4bf17f682ba8c096973065f6e6a68/ruff-0.15.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1f111d62e3c983ed20e0ca2e800f8d77433a5b1161947df99a5c2a3fb60514f0", size = 10873887, upload-time = "2026-04-16T18:46:29.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/05/e48076b2a57dc33ee8c7a957296f97c744ca891a8ffb4ffb1aaa3b3f517d/ruff-0.15.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:06f483d6646f59eaffba9ae30956370d3a886625f511a3108994000480621d1c", size = 11404316, upload-time = "2026-04-16T18:46:19.462Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/27/0195d15fe7a897cbcba0904792c4b7c9fdd958456c3a17d2ea6093716a9a/ruff-0.15.11-py3-none-win32.whl", hash = "sha256:476a2aa56b7da0b73a3ee80b6b2f0e19cce544245479adde7baa65466664d5f3", size = 10655535, upload-time = "2026-04-16T18:46:12.47Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/5e/c927b325bd4c1d3620211a4b96f47864633199feed60fa936025ab27e090/ruff-0.15.11-py3-none-win_amd64.whl", hash = "sha256:8b6756d88d7e234fb0c98c91511aae3cd519d5e3ed271cae31b20f39cb2a12a3", size = 11779692, upload-time = "2026-04-16T18:46:17.268Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/b6/aeadee5443e49baa2facd51131159fd6301cc4ccfc1541e4df7b021c37dd/ruff-0.15.11-py3-none-win_arm64.whl", hash = "sha256:063fed18cc1bbe0ee7393957284a6fe8b588c6a406a285af3ee3f46da2391ee4", size = 11032614, upload-time = "2026-04-16T18:46:34.487Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/6e/e78ffb61d4686f3d96ba3df2c801161843746dcbcbb17a1e927d4829312b/ruff-0.15.12-py3-none-linux_armv6l.whl", hash = "sha256:f86f176e188e94d6bdbc09f09bfd9dc729059ad93d0e7390b5a73efe19f8861c", size = 10640713, upload-time = "2026-04-24T18:17:22.841Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/08/a317bc231fb9e7b93e4ef3089501e51922ff88d6936ce5cf870c4fe55419/ruff-0.15.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3bcd123364c3770b8e1b7baaf343cc99a35f197c5c6e8af79015c666c423a6c", size = 11069267, upload-time = "2026-04-24T18:17:30.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/a4/f828e9718d3dce1f5f11c39c4f65afd32783c8b2aebb2e3d259e492c47bd/ruff-0.15.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fe87510d000220aa1ed530d4448a7c696a0cae1213e5ec30e5874287b66557b5", size = 10397182, upload-time = "2026-04-24T18:17:07.177Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/e0/3310fc6d1b5e1fdea22bf3b1b807c7e187b581021b0d7d4514cccdb5fb71/ruff-0.15.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84a1630093121375a3e2a95b4a6dc7b59e2b4ee76216e32d81aae550a832d002", size = 10758012, upload-time = "2026-04-24T18:16:55.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/c1/a606911aee04c324ddaa883ae418f3569792fd3c4a10c50e0dd0a2311e1e/ruff-0.15.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb129f40f114f089ebe0ca56c0d251cf2061b17651d464bb6478dc01e69f11f5", size = 10447479, upload-time = "2026-04-24T18:16:51.677Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/68/4201e8444f0894f21ab4aeeaee68aa4f10b51613514a20d80bd628d57e88/ruff-0.15.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0c862b172d695db7598426b8af465e7e9ac00a3ea2a3630ee67eb82e366aaa6", size = 11234040, upload-time = "2026-04-24T18:17:16.529Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/ff/8a6d6cf4ccc23fd67060874e832c18919d1557a0611ebef03fdb01fff11e/ruff-0.15.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2849ea9f3484c3aca43a82f484210370319e7170df4dfe4843395ddf6c57bc33", size = 12087377, upload-time = "2026-04-24T18:17:04.944Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/f6/c669cf73f5152f623d34e69866a46d5e6185816b19fcd5b6dd8a2d299922/ruff-0.15.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e77c7e51c07fe396826d5969a5b846d9cd4c402535835fb6e21ce8b28fef847", size = 11367784, upload-time = "2026-04-24T18:17:25.409Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/39/c61d193b8a1daaa8977f7dea9e8d8ba866e02ea7b65d32f6861693aa4c12/ruff-0.15.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b2f4f2f3b1026b5fb449b467d9264bf22067b600f7b6f41fc5958909f449d0", size = 11344088, upload-time = "2026-04-24T18:17:12.258Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/8d/49afab3645e31e12c590acb6d3b5b69d7aab5b81926dbaf7461f9441f37a/ruff-0.15.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9ba3b8f1afd7e2e43d8943e55f249e13f9682fde09711644a6e7290eb4f3e339", size = 11271770, upload-time = "2026-04-24T18:17:02.457Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/06/33f41fe94403e2b755481cdfb9b7ef3e4e0ed031c4581124658d935d52b4/ruff-0.15.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e852ba9fdc890655e1d78f2df1499efbe0e54126bd405362154a75e2bde159c5", size = 10719355, upload-time = "2026-04-24T18:17:27.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/59/18aa4e014debbf559670e4048e39260a85c7fcee84acfd761ac01e7b8d35/ruff-0.15.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dd8aed930da53780d22fc70bdf84452c843cf64f8cb4eb38984319c24c5cd5fd", size = 10462758, upload-time = "2026-04-24T18:17:32.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/e7/cc9f16fd0f3b5fddcbd7ec3d6ae30c8f3fde1047f32a4093a98d633c6570/ruff-0.15.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01da3988d225628b709493d7dc67c3b9b12c0210016b08690ef9bd27970b262b", size = 10953498, upload-time = "2026-04-24T18:17:20.674Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/7a/a9ba7f98c7a575978698f4230c5e8cc54bbc761af34f560818f933dafa0c/ruff-0.15.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9cae0f92bd5700d1213188b31cd3bdd2b315361296d10b96b8e2337d3d11f53e", size = 11447765, upload-time = "2026-04-24T18:17:09.755Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/f9/0ae446942c846b8266059ad8a30702a35afae55f5cdc54c5adf8d7afdc27/ruff-0.15.12-py3-none-win32.whl", hash = "sha256:d0185894e038d7043ba8fd6aee7499ece6462dc0ea9f1e260c7451807c714c20", size = 10657277, upload-time = "2026-04-24T18:17:18.591Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/f1/9614e03e1cdcbf9437570b5400ced8a720b5db22b28d8e0f1bda429f660d/ruff-0.15.12-py3-none-win_amd64.whl", hash = "sha256:c87a162d61ab3adca47c03f7f717c68672edec7d1b5499e652331780fe74950d", size = 11837758, upload-time = "2026-04-24T18:17:00.113Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/98/6beb4b351e472e5f4c4613f7c35a5290b8be2497e183825310c4c3a3984b/ruff-0.15.12-py3-none-win_arm64.whl", hash = "sha256:a538f7a82d061cee7be55542aca1d86d1393d55d81d4fcc314370f4340930d4f", size = 11120821, upload-time = "2026-04-24T18:16:57.979Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -6212,7 +6214,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "storage3"
|
||||
version = "2.28.3"
|
||||
version = "2.29.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "deprecation" },
|
||||
@ -6221,9 +6223,9 @@ dependencies = [
|
||||
{ name = "pyiceberg" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/eb/b5/18df59ba92951d74774eb0265072bf236ead5e3cbc4b802d8bf1cf3581a0/storage3-2.28.3.tar.gz", hash = "sha256:2b3f843cbd44c4a3b483ec076a12c27de88c0ad5358a43067ed44ef08292353f", size = 20109, upload-time = "2026-03-20T14:38:11.467Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d7/be/771246434b5caf3c6187bfdc932eaede00bf5f2937b47475ab25209ede3e/storage3-2.29.0.tar.gz", hash = "sha256:b0cc2f6714655d725c998d2c5ae8c6fb4f56a513bd31e4f85770df557fe021e3", size = 20160, upload-time = "2026-04-24T13:13:04.626Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/a5/2dbe216954e026a8c2e2dc7dfa5fd7b1a1ae0824d10972e62462f4f15aca/storage3-2.28.3-py3-none-any.whl", hash = "sha256:bac35c5087619174448fdef6a337db4e3dfebf3de69f685bd706de93ddcdad69", size = 28239, upload-time = "2026-03-20T14:38:10.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/c3/790c31866f52c13b26f108b45759bf50dafae3a0bafb4511fadc98ba7c33/storage3-2.29.0-py3-none-any.whl", hash = "sha256:043ef7ff27cc8b9da12be403cf78ee4586180edfcf62b227ff61e1bd79594b06", size = 28284, upload-time = "2026-04-24T13:13:03.338Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -6249,7 +6251,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "supabase"
|
||||
version = "2.28.3"
|
||||
version = "2.29.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
@ -6260,37 +6262,37 @@ dependencies = [
|
||||
{ name = "supabase-functions" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5a/98/2f1c95a2269ce995a34f275760b1c2ee71ee7a75649238ca0470afdfc2ef/supabase-2.28.3.tar.gz", hash = "sha256:1200961e46cdec17c7c280a1e09a159544643eada2759591ea69835303a2e1a4", size = 9687, upload-time = "2026-03-20T14:38:13.272Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/51/a0/2407d616fdf68e8632bbbfb063d1685c38377ac0199e8ca11deaea1f3bf0/supabase-2.29.0.tar.gz", hash = "sha256:a88c4a4eb50fbb903e2e962fbc7c27733b00589140139f9e837bc9fe30dd3615", size = 9689, upload-time = "2026-04-24T13:13:06.728Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/de/96/1b48eb664153401c22087bbf77f6a428965e830cc8e0d0c6d68324a28342/supabase-2.28.3-py3-none-any.whl", hash = "sha256:52a7ce4a1d2d55fa6d657bf4760672935058143a5bedc64165851be25ce01dbd", size = 16634, upload-time = "2026-03-20T14:38:12.319Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/52/232f6bbf5326e04ae12e2ef04a24f011a0d7cab379a8b9698652bc8ff78f/supabase-2.29.0-py3-none-any.whl", hash = "sha256:16c3ec4b7094f6b92efc5cd3bb3f96826d3b6dd5d24fe15c89c81166efce88fe", size = 16633, upload-time = "2026-04-24T13:13:05.722Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "supabase-auth"
|
||||
version = "2.28.3"
|
||||
version = "2.29.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx", extra = ["http2"] },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pyjwt", extra = ["crypto"] },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cc/6f/1bf81293374ba71183b321bf5dfd7151c3db0c2e24715f35783bc1c56385/supabase_auth-2.28.3.tar.gz", hash = "sha256:41c049da82f9d7fc2f111808e57e984015f128d033f58caa67fd76f428472807", size = 39160, upload-time = "2026-03-20T14:38:15.128Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/51/7f/7ceeb4c7a2caa188062e934897f0e08e1af0a0e47e376c7645c26b4c39d8/supabase_auth-2.29.0.tar.gz", hash = "sha256:46efc6a3455a23957b846dc974303a844ba0413718cfa899425477ac977f95b3", size = 39154, upload-time = "2026-04-24T13:13:08.509Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/d3/e012315aa895b434fa77bc475e2dfeb87119e67918ecca4d88a25f96814d/supabase_auth-2.28.3-py3-none-any.whl", hash = "sha256:e47c5caec7bbf3c258964d027fbbe99f3cc4a956d3a635f898c962b4d22832dd", size = 48378, upload-time = "2026-03-20T14:38:14.169Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/ac/3c35cf52281f940b9497cf17abfc5c2050ca49f342d60cfafe22dac3482b/supabase_auth-2.29.0-py3-none-any.whl", hash = "sha256:64de6ef8cae80f97d3aa8d5ca507d5427dda5c89885c0bcfe9f8b0263b6fb9a4", size = 48379, upload-time = "2026-04-24T13:13:07.417Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "supabase-functions"
|
||||
version = "2.28.3"
|
||||
version = "2.29.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx", extra = ["http2"] },
|
||||
{ name = "strenum" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/ea/59bf327960e5384fcc9e69afbdf97260a2cf2684a25c0731968a8a393b9c/supabase_functions-2.28.3.tar.gz", hash = "sha256:5a6255d60a263d44251c5ca250fcdde2408a8483a8bf31f4ac80255de8f3fcae", size = 4679, upload-time = "2026-03-20T14:38:16.742Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/19/1a1d22749f38f2a6cbca93a6f5a35c9f816c2c3c06bfaa077fa336e90537/supabase_functions-2.29.0.tar.gz", hash = "sha256:0f8a14a2ea9f12b1c208f61dc6f55e2f4b1121f81bf01c08f9b487d22888744d", size = 4683, upload-time = "2026-04-24T13:13:10.432Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/ca/1e720f1347a88519e3d52b6d801cd031c3a7a5df66640c5dc6e81d925057/supabase_functions-2.28.3-py3-none-any.whl", hash = "sha256:eb30578866103fed9322c54e95dd68c2f1a4b6b177e129d9369edd364637904e", size = 8801, upload-time = "2026-03-20T14:38:15.883Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/10/6f8ef0b408ade76b5a439afab588ce5849e9604a23040ca73cfe0b90cb9e/supabase_functions-2.29.0-py3-none-any.whl", hash = "sha256:6f08de52eec5820eae53616868b85e849e181beffaa5d05b8ea1708ceae5e48e", size = 8799, upload-time = "2026-04-24T13:13:09.214Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -6780,11 +6782,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "types-psycopg2"
|
||||
version = "2.9.21.20260408"
|
||||
version = "2.9.21.20260422"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cd/24/d8ae11a0c056535557aaabeb7d7838423abdfdcf1e5f8dfb2c04d316c65d/types_psycopg2-2.9.21.20260408.tar.gz", hash = "sha256:bb65cd12f53b6633077fd782607a33065e1f3bf585219c9f786b61ad2b72211c", size = 27078, upload-time = "2026-04-08T04:26:15.848Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b9/a2/ecb04604074a7f2e82231ab1f2d3b5a792589aa3c21a597cb3232a38ece3/types_psycopg2-2.9.21.20260422.tar.gz", hash = "sha256:ad7574fa8e25d9aa96ab96cd280c4dee20872725cd1fe6a6d3facc354f2644d4", size = 27123, upload-time = "2026-04-22T04:36:33.263Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/fe/9aab9239640107b6e46afddcee578a916b8b98bfee36e03da5b0d2c95124/types_psycopg2-2.9.21.20260408-py3-none-any.whl", hash = "sha256:49b086bfc9e0ce901c6537403ead1c19c75275571040b037af0248a8e48c322f", size = 24921, upload-time = "2026-04-08T04:26:14.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/08/82f86c2d0a7ae4d335c6fe3c4ad193c4a57f0d6bfe1a676289cf63667275/types_psycopg2-2.9.21.20260422-py3-none-any.whl", hash = "sha256:e240684ac37946c5a2a058b04ea1f2fd0e4ee2655719b8c3ec9abf37f96da5ba", size = 24918, upload-time = "2026-04-22T04:36:32.108Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -7479,7 +7481,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "xinference-client"
|
||||
version = "2.5.0"
|
||||
version = "2.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
@ -7487,9 +7489,9 @@ dependencies = [
|
||||
{ name = "requests" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d0/8a/4d7c72510f3c462195c2e7aa63559cafcf20f7d1901132d533b7498bab1c/xinference_client-2.5.0.tar.gz", hash = "sha256:0680324e2f438b8b208ca80e8a7e1c22e9152fce54f8c024c75e2ce57bfa5639", size = 58430, upload-time = "2026-04-13T07:21:40.145Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/99/86/89723d8a4f862bac49581ef99c9e52c014acf42355710335470062efabf1/xinference_client-2.7.0.tar.gz", hash = "sha256:51c174bc1704a505512550097d4b2025480a840d97bed8097dfbfaec2172ca9e", size = 58577, upload-time = "2026-04-25T14:37:37.345Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/dd/4fd501b8092c01f0775142850e3b601d743edf733077b756defe4a01cc37/xinference_client-2.5.0-py3-none-any.whl", hash = "sha256:bb90f069a2c30ac6ea7453ab37a0fadd34c28b655afa51fe20c18e67a361c269", size = 40006, upload-time = "2026-04-13T07:21:38.851Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/22/f9b92941be1cba5b2347211bb04c354a6ba2bad0e7b2da41510f77959327/xinference_client-2.7.0-py3-none-any.whl", hash = "sha256:76377804eb7fd2ece8a7d1e5c517d8aed8b5a511834066e43414ad74bcb34c09", size = 40154, upload-time = "2026-04-25T14:37:35.959Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
1
depot.json
Normal file
1
depot.json
Normal file
@ -0,0 +1 @@
|
||||
{"id":"smkxz53ddb"}
|
||||
@ -59,19 +59,25 @@ services:
|
||||
- ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}:/var/lib/mysql
|
||||
ports:
|
||||
- "${EXPOSE_MYSQL_PORT:-3306}:3306"
|
||||
# mysqladmin ping passes during mysql:8.0's TCP-listening stage even while
|
||||
# the server is still finalising init, leading to "Lost connection during
|
||||
# query" on the first real query. Verify with a real SELECT instead.
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"mysqladmin",
|
||||
"ping",
|
||||
"-u",
|
||||
"root",
|
||||
"mysql",
|
||||
"-h",
|
||||
"127.0.0.1",
|
||||
"-uroot",
|
||||
"-p${DB_PASSWORD:-difyai123456}",
|
||||
"-e",
|
||||
"SELECT 1",
|
||||
]
|
||||
interval: 1s
|
||||
timeout: 3s
|
||||
retries: 30
|
||||
start_period: 20s
|
||||
|
||||
# The redis cache.
|
||||
redis:
|
||||
|
||||
26
e2e/features/apps/share-app.feature
Normal file
26
e2e/features/apps/share-app.feature
Normal file
@ -0,0 +1,26 @@
|
||||
@apps @authenticated @core
|
||||
Feature: Share app publicly
|
||||
|
||||
Scenario: Enable public share for a published workflow app
|
||||
Given I am signed in as the default E2E admin
|
||||
And a "workflow" app has been created via API
|
||||
And a minimal runnable workflow draft has been synced
|
||||
When I open the app from the app list
|
||||
And I open the publish panel
|
||||
And I publish the app
|
||||
And I navigate to the app overview page
|
||||
And I enable the Web App share
|
||||
Then the Web App should be in service
|
||||
|
||||
@unauthenticated
|
||||
Scenario: Access a shared workflow app without authentication
|
||||
Given a workflow app has been published and shared via API
|
||||
When I open the shared app URL
|
||||
Then the shared app page should be accessible
|
||||
|
||||
@unauthenticated
|
||||
Scenario: Run a shared workflow app without authentication
|
||||
Given a workflow app has been published and shared via API
|
||||
When I open the shared app URL
|
||||
And I run the shared workflow app
|
||||
Then the shared workflow run should succeed
|
||||
13
e2e/features/apps/workflow-run-publish.feature
Normal file
13
e2e/features/apps/workflow-run-publish.feature
Normal file
@ -0,0 +1,13 @@
|
||||
@apps @authenticated @core @mode-matrix
|
||||
Feature: Workflow run and publish
|
||||
|
||||
Scenario: Run and publish a minimal workflow app
|
||||
Given I am signed in as the default E2E admin
|
||||
And a "workflow" app has been created via API
|
||||
And a minimal runnable workflow draft has been synced
|
||||
When I open the app from the app list
|
||||
And I run the workflow
|
||||
Then the workflow run should succeed
|
||||
When I open the publish panel
|
||||
And I publish the app
|
||||
Then the app should be marked as published
|
||||
51
e2e/features/step-definitions/apps/share-app.steps.ts
Normal file
51
e2e/features/step-definitions/apps/share-app.steps.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import type { DifyWorld } from '../../support/world'
|
||||
import { Given, Then, When } from '@cucumber/cucumber'
|
||||
import { expect } from '@playwright/test'
|
||||
import { createTestApp, enableAppSiteAndGetURL, publishWorkflowApp, syncRunnableWorkflowDraft } from '../../../support/api'
|
||||
|
||||
When('I enable the Web App share', async function (this: DifyWorld) {
|
||||
const page = this.getPage()
|
||||
const appName = this.lastCreatedAppName
|
||||
if (!appName)
|
||||
throw new Error('No app name available. Run "a \\"workflow\\" app has been created via API" first.')
|
||||
|
||||
await page.locator('button').filter({ hasText: appName }).filter({ hasText: 'Workflow' }).click()
|
||||
await expect(page.getByRole('switch').first()).toBeEnabled({ timeout: 15_000 })
|
||||
await page.getByRole('switch').first().click()
|
||||
})
|
||||
|
||||
Then('the Web App should be in service', async function (this: DifyWorld) {
|
||||
await expect(this.getPage().getByText('In Service').first()).toBeVisible({ timeout: 10_000 })
|
||||
})
|
||||
|
||||
Given('a workflow app has been published and shared via API', async function (this: DifyWorld) {
|
||||
const app = await createTestApp(`E2E Share ${Date.now()}`, 'workflow')
|
||||
this.createdAppIds.push(app.id)
|
||||
this.lastCreatedAppName = app.name
|
||||
await syncRunnableWorkflowDraft(app.id)
|
||||
await publishWorkflowApp(app.id)
|
||||
this.shareURL = await enableAppSiteAndGetURL(app.id)
|
||||
})
|
||||
|
||||
When('I open the shared app URL', async function (this: DifyWorld) {
|
||||
if (!this.shareURL)
|
||||
throw new Error('No share URL available. Run "a workflow app has been published and shared via API" first.')
|
||||
await this.getPage().goto(this.shareURL, { timeout: 20_000 })
|
||||
})
|
||||
|
||||
Then('the shared app page should be accessible', async function (this: DifyWorld) {
|
||||
await expect(this.getPage()).toHaveURL(/\/(workflow|chat)\/[a-zA-Z0-9]+/, { timeout: 15_000 })
|
||||
await expect(this.getPage().locator('body')).toBeVisible({ timeout: 10_000 })
|
||||
})
|
||||
|
||||
When('I run the shared workflow app', async function (this: DifyWorld) {
|
||||
const page = this.getPage()
|
||||
const runButton = page.getByTestId('run-button')
|
||||
|
||||
await expect(runButton).toBeEnabled({ timeout: 15_000 })
|
||||
await runButton.click()
|
||||
})
|
||||
|
||||
Then('the shared workflow run should succeed', async function (this: DifyWorld) {
|
||||
await expect(this.getPage().getByTestId('status-icon-success')).toBeVisible({ timeout: 55_000 })
|
||||
})
|
||||
25
e2e/features/step-definitions/apps/workflow-run.steps.ts
Normal file
25
e2e/features/step-definitions/apps/workflow-run.steps.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import type { DifyWorld } from '../../support/world'
|
||||
import { Given, Then, When } from '@cucumber/cucumber'
|
||||
import { expect } from '@playwright/test'
|
||||
import { syncRunnableWorkflowDraft } from '../../../support/api'
|
||||
|
||||
Given('a minimal runnable workflow draft has been synced', async function (this: DifyWorld) {
|
||||
const appId = this.createdAppIds.at(-1)
|
||||
if (!appId)
|
||||
throw new Error('No app ID found. Run "a \\"workflow\\" app has been created via API" first.')
|
||||
await syncRunnableWorkflowDraft(appId)
|
||||
})
|
||||
|
||||
When('I run the workflow', async function (this: DifyWorld) {
|
||||
const page = this.getPage()
|
||||
const testRunButton = page.getByText('Test Run')
|
||||
|
||||
await expect(testRunButton).toBeVisible({ timeout: 15_000 })
|
||||
await testRunButton.click()
|
||||
})
|
||||
|
||||
Then('the workflow run should succeed', async function (this: DifyWorld) {
|
||||
const page = this.getPage()
|
||||
await page.getByText('DETAIL').click()
|
||||
await expect(page.getByText('SUCCESS').first()).toBeVisible({ timeout: 55_000 })
|
||||
})
|
||||
@ -15,6 +15,7 @@ export class DifyWorld extends World {
|
||||
lastCreatedAppName: string | undefined
|
||||
createdAppIds: string[] = []
|
||||
capturedDownloads: Download[] = []
|
||||
shareURL: string | undefined
|
||||
|
||||
constructor(options: IWorldOptions) {
|
||||
super(options)
|
||||
@ -27,6 +28,7 @@ export class DifyWorld extends World {
|
||||
this.lastCreatedAppName = undefined
|
||||
this.createdAppIds = []
|
||||
this.capturedDownloads = []
|
||||
this.shareURL = undefined
|
||||
}
|
||||
|
||||
async startSession(browser: Browser, authenticated: boolean) {
|
||||
|
||||
@ -67,11 +67,20 @@ const main = async () => {
|
||||
logFilePath: path.join(logDir, 'cucumber-api.log'),
|
||||
})
|
||||
|
||||
const celeryProcess = await startLoggedProcess({
|
||||
command: 'npx',
|
||||
args: ['tsx', './scripts/setup.ts', 'celery'],
|
||||
cwd: e2eDir,
|
||||
label: 'celery worker',
|
||||
logFilePath: path.join(logDir, 'cucumber-celery.log'),
|
||||
})
|
||||
|
||||
let cleanupPromise: Promise<void> | undefined
|
||||
const cleanup = async () => {
|
||||
if (!cleanupPromise) {
|
||||
cleanupPromise = (async () => {
|
||||
await stopWebServer()
|
||||
await stopManagedProcess(celeryProcess)
|
||||
await stopManagedProcess(apiProcess)
|
||||
|
||||
if (startMiddlewareForRun) {
|
||||
|
||||
@ -202,6 +202,32 @@ export const startApi = async () => {
|
||||
})
|
||||
}
|
||||
|
||||
export const startCelery = async () => {
|
||||
const env = await getApiEnvironment()
|
||||
|
||||
await runForegroundProcess({
|
||||
command: 'uv',
|
||||
args: [
|
||||
'run',
|
||||
'--project',
|
||||
'.',
|
||||
'--no-sync',
|
||||
'celery',
|
||||
'-A',
|
||||
'app.celery',
|
||||
'worker',
|
||||
'--pool',
|
||||
'solo',
|
||||
'--loglevel',
|
||||
'INFO',
|
||||
'-Q',
|
||||
'workflow_based_app_execution',
|
||||
],
|
||||
cwd: apiDir,
|
||||
env,
|
||||
})
|
||||
}
|
||||
|
||||
export const stopMiddleware = async () => {
|
||||
await runCommandOrThrow({
|
||||
command: 'docker',
|
||||
@ -308,7 +334,7 @@ export const startMiddleware = async () => {
|
||||
}
|
||||
|
||||
const printUsage = () => {
|
||||
console.log('Usage: tsx ./scripts/setup.ts <reset|middleware-up|middleware-down|api|web>')
|
||||
console.log('Usage: tsx ./scripts/setup.ts <reset|middleware-up|middleware-down|api|celery|web>')
|
||||
}
|
||||
|
||||
const main = async () => {
|
||||
@ -318,6 +344,9 @@ const main = async () => {
|
||||
case 'api':
|
||||
await startApi()
|
||||
return
|
||||
case 'celery':
|
||||
await startCelery()
|
||||
return
|
||||
case 'middleware-down':
|
||||
await stopMiddleware()
|
||||
return
|
||||
|
||||
@ -80,3 +80,83 @@ export async function deleteTestApp(id: string): Promise<void> {
|
||||
await ctx.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
export async function syncRunnableWorkflowDraft(appId: string): Promise<void> {
|
||||
const ctx = await createApiContext()
|
||||
try {
|
||||
await ctx.post(`/console/api/apps/${appId}/workflows/draft`, {
|
||||
data: {
|
||||
graph: {
|
||||
nodes: [
|
||||
{
|
||||
id: 'start',
|
||||
type: 'custom',
|
||||
position: { x: 80, y: 282 },
|
||||
data: { id: 'start', type: 'start', title: 'Start', variables: [] },
|
||||
},
|
||||
{
|
||||
id: 'end',
|
||||
type: 'custom',
|
||||
position: { x: 480, y: 282 },
|
||||
data: {
|
||||
id: 'end',
|
||||
type: 'end',
|
||||
title: 'End',
|
||||
outputs: [{ variable: 'result', value_selector: ['sys', 'workflow_run_id'] }],
|
||||
},
|
||||
},
|
||||
],
|
||||
edges: [
|
||||
{
|
||||
id: 'start-end',
|
||||
type: 'custom',
|
||||
source: 'start',
|
||||
target: 'end',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
features: {},
|
||||
environment_variables: [],
|
||||
conversation_variables: [],
|
||||
},
|
||||
})
|
||||
}
|
||||
finally {
|
||||
await ctx.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
export async function publishWorkflowApp(appId: string): Promise<void> {
|
||||
const ctx = await createApiContext()
|
||||
try {
|
||||
await ctx.post(`/console/api/apps/${appId}/workflows/publish`, {
|
||||
data: { marked_name: '', marked_comment: '' },
|
||||
})
|
||||
}
|
||||
finally {
|
||||
await ctx.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
type AppDetailWithSite = {
|
||||
site: { access_token: string, app_base_url: string, enable_site: boolean }
|
||||
}
|
||||
|
||||
export async function enableAppSiteAndGetURL(appId: string): Promise<string> {
|
||||
const ctx = await createApiContext()
|
||||
try {
|
||||
await ctx.post(`/console/api/apps/${appId}/site-enable`, {
|
||||
data: { enable_site: true },
|
||||
})
|
||||
const res = await ctx.get(`/console/api/apps/${appId}`)
|
||||
const body = (await res.json()) as AppDetailWithSite
|
||||
const { app_base_url, access_token } = body.site
|
||||
return `${app_base_url}/workflow/${access_token}`
|
||||
}
|
||||
finally {
|
||||
await ctx.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
@ -2059,7 +2059,7 @@
|
||||
},
|
||||
"web/app/components/base/text-generation/types.ts": {
|
||||
"no-barrel-files/no-barrel-files": {
|
||||
"count": 3
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/base/textarea/index.stories.tsx": {
|
||||
@ -2070,11 +2070,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/base/textarea/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/base/video-gallery/VideoPlayer.tsx": {
|
||||
"react/set-state-in-effect": {
|
||||
"count": 1
|
||||
@ -2422,21 +2417,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/create-from-pipeline/data-source/base/header.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/create-from-pipeline/data-source/online-documents/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/header/breadcrumbs/bucket.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/list/item.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
@ -2525,11 +2510,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/detail/completed/common/summary-status.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/detail/completed/components/index.ts": {
|
||||
"no-barrel-files/no-barrel-files": {
|
||||
"count": 3
|
||||
@ -2789,11 +2769,6 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/develop/secret-key/input-copy.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/develop/secret-key/secret-key-generate.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
@ -3159,16 +3134,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/base/badges/icon-with-tooltip.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/base/key-value-item.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/card/index.tsx": {
|
||||
"ts/no-non-null-asserted-optional-chain": {
|
||||
"count": 1
|
||||
@ -3328,24 +3293,11 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/detail-header/components/plugin-source-badge.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/detail-header/hooks/index.ts": {
|
||||
"no-barrel-files/no-barrel-files": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/endpoint-card.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/endpoint-list.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
@ -3544,11 +3496,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-page/plugin-tasks/components/task-status-indicator.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/readme-panel/index.tsx": {
|
||||
"react/unsupported-syntax": {
|
||||
"count": 1
|
||||
@ -3559,11 +3506,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/reference-setting-modal/auto-update-setting/strategy-picker.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/reference-setting-modal/auto-update-setting/types.ts": {
|
||||
"erasable-syntax-only/enums": {
|
||||
"count": 2
|
||||
@ -3822,14 +3764,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/mcp/detail/content.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/mcp/detail/tool-item.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
@ -5394,14 +5328,6 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/panel/chat-variable-panel/components/variable-type-select.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/panel/chat-variable-panel/type.ts": {
|
||||
"erasable-syntax-only/enums": {
|
||||
"count": 1
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
"name": "dify",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.33.0",
|
||||
"packageManager": "pnpm@10.33.2",
|
||||
"engines": {
|
||||
"node": "^22.22.1"
|
||||
},
|
||||
|
||||
@ -194,7 +194,6 @@ describe('Select wrappers', () => {
|
||||
})
|
||||
|
||||
it('should forward passthrough props to positioner popup and list when passthrough props are provided', async () => {
|
||||
const onPositionerMouseEnter = vi.fn()
|
||||
const onPopupClick = vi.fn()
|
||||
const onListFocus = vi.fn()
|
||||
|
||||
@ -208,7 +207,6 @@ describe('Select wrappers', () => {
|
||||
'role': 'group',
|
||||
'aria-label': 'select positioner',
|
||||
'id': 'select-positioner',
|
||||
'onMouseEnter': onPositionerMouseEnter,
|
||||
}}
|
||||
popupProps={{
|
||||
'role': 'dialog',
|
||||
@ -231,10 +229,7 @@ describe('Select wrappers', () => {
|
||||
</Select>,
|
||||
)
|
||||
|
||||
screen.getByRole('group', { name: 'select positioner' }).element().dispatchEvent(new MouseEvent('mouseover', {
|
||||
bubbles: true,
|
||||
}))
|
||||
asHTMLElement(screen.getByRole('dialog', { name: 'select popup' }).element()).click()
|
||||
await screen.getByRole('dialog', { name: 'select popup' }).click()
|
||||
screen.getByRole('listbox', { name: 'select list' }).element().dispatchEvent(new FocusEvent('focusin', {
|
||||
bubbles: true,
|
||||
}))
|
||||
@ -242,7 +237,6 @@ describe('Select wrappers', () => {
|
||||
await expect.element(screen.getByRole('group', { name: 'select positioner' })).toHaveAttribute('id', 'select-positioner')
|
||||
await expect.element(screen.getByRole('dialog', { name: 'select popup' })).toHaveAttribute('id', 'select-popup')
|
||||
await expect.element(screen.getByRole('listbox', { name: 'select list' })).toHaveAttribute('id', 'select-list')
|
||||
expect(onPositionerMouseEnter).toHaveBeenCalledTimes(1)
|
||||
expect(onPopupClick).toHaveBeenCalledTimes(1)
|
||||
expect(onListFocus).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@ -125,21 +125,6 @@ describe('@langgenius/dify-ui/toast', () => {
|
||||
expect(onClose).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should respect the host timeout configuration', async () => {
|
||||
const screen = await render(<ToastHost timeout={3000} />)
|
||||
|
||||
toast('Configured timeout')
|
||||
await expect.element(screen.getByText('Configured timeout')).toBeInTheDocument()
|
||||
|
||||
await vi.advanceTimersByTimeAsync(2999)
|
||||
expect(document.body).toHaveTextContent('Configured timeout')
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1)
|
||||
await vi.waitFor(() => {
|
||||
expect(document.body).not.toHaveTextContent('Configured timeout')
|
||||
})
|
||||
})
|
||||
|
||||
it('should respect custom timeout values including zero', async () => {
|
||||
const screen = await render(<ToastHost />)
|
||||
|
||||
|
||||
850
pnpm-lock.yaml
generated
850
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -47,18 +47,18 @@ overrides:
|
||||
yaml@>=2.0.0 <2.8.3: 2.8.3
|
||||
yauzl@<3.2.1: 3.2.1
|
||||
catalog:
|
||||
'@amplitude/analytics-browser': 2.41.0
|
||||
'@amplitude/plugin-session-replay-browser': 1.27.10
|
||||
'@amplitude/analytics-browser': 2.41.1
|
||||
'@amplitude/plugin-session-replay-browser': 1.28.0
|
||||
'@antfu/eslint-config': 8.2.0
|
||||
'@base-ui/react': 1.4.1
|
||||
'@chromatic-com/storybook': 5.1.2
|
||||
'@cucumber/cucumber': 12.8.1
|
||||
'@cucumber/cucumber': 12.8.2
|
||||
'@egoist/tailwindcss-icons': 1.9.2
|
||||
'@emoji-mart/data': 1.2.1
|
||||
'@eslint-react/eslint-plugin': 3.0.0
|
||||
'@eslint/js': 10.0.1
|
||||
'@floating-ui/react': 0.27.19
|
||||
'@formatjs/intl-localematcher': 0.8.3
|
||||
'@formatjs/intl-localematcher': 0.8.4
|
||||
'@headlessui/react': 2.2.10
|
||||
'@heroicons/react': 2.2.0
|
||||
'@hono/node-server': 1.19.14
|
||||
@ -77,14 +77,14 @@ catalog:
|
||||
'@monaco-editor/react': 4.7.0
|
||||
'@next/eslint-plugin-next': 16.2.4
|
||||
'@next/mdx': 16.2.4
|
||||
'@orpc/client': 1.13.14
|
||||
'@orpc/contract': 1.13.14
|
||||
'@orpc/openapi-client': 1.13.14
|
||||
'@orpc/tanstack-query': 1.13.14
|
||||
'@orpc/client': 1.14.0
|
||||
'@orpc/contract': 1.14.0
|
||||
'@orpc/openapi-client': 1.14.0
|
||||
'@orpc/tanstack-query': 1.14.0
|
||||
'@playwright/test': 1.59.1
|
||||
'@remixicon/react': 4.9.0
|
||||
'@rgrove/parse-xml': 4.2.0
|
||||
'@sentry/react': 10.49.0
|
||||
'@sentry/react': 10.50.0
|
||||
'@storybook/addon-docs': 10.3.5
|
||||
'@storybook/addon-links': 10.3.5
|
||||
'@storybook/addon-onboarding': 10.3.5
|
||||
@ -98,12 +98,12 @@ catalog:
|
||||
'@tailwindcss/postcss': 4.2.4
|
||||
'@tailwindcss/typography': 0.5.19
|
||||
'@tailwindcss/vite': 4.2.4
|
||||
'@tanstack/eslint-plugin-query': 5.99.2
|
||||
'@tanstack/eslint-plugin-query': 5.100.5
|
||||
'@tanstack/react-devtools': 0.10.2
|
||||
'@tanstack/react-form': 1.29.1
|
||||
'@tanstack/react-form-devtools': 0.2.22
|
||||
'@tanstack/react-query': 5.99.2
|
||||
'@tanstack/react-query-devtools': 5.99.2
|
||||
'@tanstack/react-query': 5.100.5
|
||||
'@tanstack/react-query-devtools': 5.100.5
|
||||
'@tanstack/react-virtual': 3.13.24
|
||||
'@testing-library/dom': 10.4.1
|
||||
'@testing-library/jest-dom': 6.9.1
|
||||
@ -122,11 +122,11 @@ catalog:
|
||||
'@types/sortablejs': 1.15.9
|
||||
'@typescript-eslint/eslint-plugin': 8.59.0
|
||||
'@typescript-eslint/parser': 8.59.0
|
||||
'@typescript/native-preview': 7.0.0-dev.20260422.1
|
||||
'@typescript/native-preview': 7.0.0-dev.20260426.1
|
||||
'@vitejs/plugin-react': 6.0.1
|
||||
'@vitejs/plugin-rsc': 0.5.24
|
||||
'@vitejs/plugin-rsc': 0.5.25
|
||||
'@vitest/coverage-v8': 4.1.5
|
||||
abcjs: 6.6.2
|
||||
abcjs: 6.6.3
|
||||
agentation: 3.0.2
|
||||
ahooks: 3.9.7
|
||||
class-variance-authority: 0.7.1
|
||||
@ -158,7 +158,7 @@ catalog:
|
||||
fast-deep-equal: 3.1.3
|
||||
happy-dom: 20.9.0
|
||||
hast-util-to-jsx-runtime: 2.3.6
|
||||
hono: 4.12.14
|
||||
hono: 4.12.15
|
||||
html-entities: 2.6.0
|
||||
html-to-image: 1.11.13
|
||||
i18next: 26.0.6
|
||||
@ -171,11 +171,11 @@ catalog:
|
||||
js-yaml: 4.1.1
|
||||
jsonschema: 1.5.0
|
||||
katex: 0.16.45
|
||||
knip: 6.6.1
|
||||
knip: 6.7.0
|
||||
ky: 2.0.2
|
||||
lamejs: 1.2.1
|
||||
lexical: 0.43.0
|
||||
loro-crdt: 1.11.1
|
||||
loro-crdt: 1.12.0
|
||||
mermaid: 11.14.0
|
||||
mime: 4.1.0
|
||||
mitt: 3.0.1
|
||||
@ -185,7 +185,7 @@ catalog:
|
||||
nuqs: 2.8.9
|
||||
pinyin-pro: 3.28.1
|
||||
playwright: 1.59.1
|
||||
postcss: 8.5.10
|
||||
postcss: 8.5.12
|
||||
qrcode.react: 4.2.0
|
||||
qs: 6.15.1
|
||||
react: 19.2.5
|
||||
|
||||
@ -5,7 +5,7 @@ export type ModelAndParameter = {
|
||||
parameters: Record<string, any>
|
||||
}
|
||||
|
||||
export type MultipleAndConfigs = {
|
||||
type MultipleAndConfigs = {
|
||||
multiple: boolean
|
||||
configs: ModelAndParameter[]
|
||||
}
|
||||
|
||||
@ -8,7 +8,7 @@ import type {
|
||||
HumanInputFormData,
|
||||
} from '@/types/workflow'
|
||||
|
||||
export type MessageMore = {
|
||||
type MessageMore = {
|
||||
time: string
|
||||
tokens: number
|
||||
latency: number | string
|
||||
|
||||
@ -532,6 +532,7 @@ describe('useEmbeddedChatbot', () => {
|
||||
})
|
||||
|
||||
it('handleChangeConversation updates current conversation and refetches chat list', async () => {
|
||||
mockStoreState.embeddedConversationId = null
|
||||
const { result } = await renderWithClient(() => useEmbeddedChatbot(AppSourceType.webApp))
|
||||
|
||||
act(() => {
|
||||
@ -548,6 +549,39 @@ describe('useEmbeddedChatbot', () => {
|
||||
expect(result.current.clearChatList).toBe(false)
|
||||
})
|
||||
|
||||
// Scenario: URL-provided conversation_id should take precedence over localStorage value.
|
||||
it('should prioritize URL conversation_id over localStorage', async () => {
|
||||
localStorage.setItem(CONVERSATION_ID_INFO, JSON.stringify({
|
||||
'app-1': { 'embedded-user-1': 'stored-conv-id' },
|
||||
}))
|
||||
mockStoreState.embeddedConversationId = 'url-conv-id'
|
||||
mockGetProcessedSystemVariablesFromUrlParams.mockResolvedValue({
|
||||
user_id: 'embedded-user-1',
|
||||
conversation_id: 'url-conv-id',
|
||||
})
|
||||
|
||||
const { result } = await renderWithClient(() => useEmbeddedChatbot(AppSourceType.webApp))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.currentConversationId).toBe('url-conv-id')
|
||||
})
|
||||
})
|
||||
|
||||
// Scenario: When no URL conversation_id is provided, fall back to localStorage.
|
||||
it('should fall back to localStorage when no URL conversation_id is provided', async () => {
|
||||
localStorage.setItem(CONVERSATION_ID_INFO, JSON.stringify({
|
||||
'app-1': { DEFAULT: 'stored-conv-id' },
|
||||
}))
|
||||
mockStoreState.embeddedConversationId = null
|
||||
mockStoreState.embeddedUserId = null
|
||||
|
||||
const { result } = await renderWithClient(() => useEmbeddedChatbot(AppSourceType.webApp))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.currentConversationId).toBe('stored-conv-id')
|
||||
})
|
||||
})
|
||||
|
||||
it('handleFeedback invokes updateFeedback service successfully', async () => {
|
||||
const { updateFeedback } = await import('@/service/share')
|
||||
const { result } = await renderWithClient(() => useEmbeddedChatbot(AppSourceType.webApp))
|
||||
|
||||
@ -113,7 +113,7 @@ export const useEmbeddedChatbot = (appSourceType: AppSourceType, tryAppId?: stri
|
||||
})
|
||||
}, [setConversationIdInfo])
|
||||
const allowResetChat = !conversationId
|
||||
const currentConversationId = useMemo(() => conversationIdInfo?.[appId || '']?.[userId || 'DEFAULT'] || conversationId || '', [appId, conversationIdInfo, userId, conversationId])
|
||||
const currentConversationId = useMemo(() => conversationId || conversationIdInfo?.[appId || '']?.[userId || 'DEFAULT'] || '', [appId, conversationIdInfo, userId, conversationId])
|
||||
const handleConversationIdInfoChange = useCallback((changeConversationId: string) => {
|
||||
if (appId) {
|
||||
let prevValue = conversationIdInfo?.[appId || '']
|
||||
|
||||
@ -43,7 +43,7 @@ describe('OptionListItem', () => {
|
||||
</OptionListItem>,
|
||||
)
|
||||
|
||||
const item = screen.getByRole('listitem')
|
||||
const item = screen.getByRole('button')
|
||||
expect(item).toHaveClass('bg-components-button-ghost-bg-hover')
|
||||
})
|
||||
|
||||
@ -54,7 +54,7 @@ describe('OptionListItem', () => {
|
||||
</OptionListItem>,
|
||||
)
|
||||
|
||||
const item = screen.getByRole('listitem')
|
||||
const item = screen.getByRole('button')
|
||||
expect(item).not.toHaveClass('bg-components-button-ghost-bg-hover')
|
||||
})
|
||||
})
|
||||
@ -100,7 +100,7 @@ describe('OptionListItem', () => {
|
||||
Clickable
|
||||
</OptionListItem>,
|
||||
)
|
||||
fireEvent.click(screen.getByRole('listitem'))
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
|
||||
expect(handleClick).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
@ -111,7 +111,7 @@ describe('OptionListItem', () => {
|
||||
Item
|
||||
</OptionListItem>,
|
||||
)
|
||||
fireEvent.click(screen.getByRole('listitem'))
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
|
||||
expect(Element.prototype.scrollIntoView).toHaveBeenCalledWith({ behavior: 'smooth' })
|
||||
})
|
||||
@ -126,7 +126,7 @@ describe('OptionListItem', () => {
|
||||
</OptionListItem>,
|
||||
)
|
||||
|
||||
const item = screen.getByRole('listitem')
|
||||
const item = screen.getByRole('button')
|
||||
fireEvent.click(item)
|
||||
fireEvent.click(item)
|
||||
fireEvent.click(item)
|
||||
|
||||
@ -0,0 +1,28 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import OptionList from '../option-list'
|
||||
|
||||
describe('OptionList', () => {
|
||||
it('should render a scrollable list with hidden scrollbar styles', () => {
|
||||
render(
|
||||
<OptionList>
|
||||
<li>Item</li>
|
||||
</OptionList>,
|
||||
)
|
||||
|
||||
const list = screen.getByRole('list')
|
||||
|
||||
expect(list).toHaveClass('overflow-y-auto')
|
||||
expect(list).toHaveClass('[scrollbar-width:none]')
|
||||
expect(list).toHaveClass('[&::-webkit-scrollbar]:hidden')
|
||||
})
|
||||
|
||||
it('should append caller className after default classes', () => {
|
||||
render(
|
||||
<OptionList className="custom-list">
|
||||
<li>Item</li>
|
||||
</OptionList>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('list')).toHaveClass('custom-list')
|
||||
})
|
||||
})
|
||||
@ -1,4 +1,4 @@
|
||||
import type { FC } from 'react'
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import { cn } from '@langgenius/dify-ui/cn'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useRef } from 'react'
|
||||
@ -7,7 +7,8 @@ type OptionListItemProps = {
|
||||
isSelected: boolean
|
||||
onClick: () => void
|
||||
noAutoScroll?: boolean
|
||||
} & React.LiHTMLAttributes<HTMLLIElement>
|
||||
children: ReactNode
|
||||
}
|
||||
|
||||
const OptionListItem: FC<OptionListItemProps> = ({
|
||||
isSelected,
|
||||
@ -25,16 +26,21 @@ const OptionListItem: FC<OptionListItemProps> = ({
|
||||
return (
|
||||
<li
|
||||
ref={listItemRef}
|
||||
className={cn(
|
||||
'flex cursor-pointer items-center justify-center rounded-md px-1.5 py-1 system-xs-medium text-components-button-ghost-text',
|
||||
isSelected ? 'bg-components-button-ghost-bg-hover' : 'hover:bg-components-button-ghost-bg-hover',
|
||||
)}
|
||||
onClick={() => {
|
||||
listItemRef.current?.scrollIntoView({ behavior: 'smooth' })
|
||||
onClick()
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
<button
|
||||
type="button"
|
||||
className={cn(
|
||||
'flex w-full cursor-pointer items-center justify-center rounded-md px-1.5 py-1 system-xs-medium text-components-button-ghost-text outline-hidden',
|
||||
'focus-visible:ring-1 focus-visible:ring-components-input-border-hover focus-visible:ring-inset',
|
||||
isSelected ? 'bg-components-button-ghost-bg-hover' : 'hover:bg-components-button-ghost-bg-hover',
|
||||
)}
|
||||
onClick={() => {
|
||||
listItemRef.current?.scrollIntoView({ behavior: 'smooth' })
|
||||
onClick()
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</button>
|
||||
</li>
|
||||
)
|
||||
}
|
||||
|
||||
@ -0,0 +1,26 @@
|
||||
import type { HTMLAttributes, ReactNode } from 'react'
|
||||
import { cn } from '@langgenius/dify-ui/cn'
|
||||
import * as React from 'react'
|
||||
|
||||
type OptionListProps = {
|
||||
children: ReactNode
|
||||
} & HTMLAttributes<HTMLUListElement>
|
||||
|
||||
const optionListClassName = cn(
|
||||
'flex h-[208px] flex-col gap-y-0.5 overflow-y-auto pb-[184px]',
|
||||
'[scrollbar-width:none] [&::-webkit-scrollbar]:hidden',
|
||||
)
|
||||
|
||||
const OptionList = ({
|
||||
children,
|
||||
className,
|
||||
...props
|
||||
}: OptionListProps) => {
|
||||
return (
|
||||
<ul className={cn(optionListClassName, className)} {...props}>
|
||||
{children}
|
||||
</ul>
|
||||
)
|
||||
}
|
||||
|
||||
export default React.memo(OptionList)
|
||||
@ -64,13 +64,13 @@ describe('TimePickerOptions', () => {
|
||||
it('should render selected hour in the list', () => {
|
||||
const props = createOptionsProps({ selectedTime: dayjs('2024-01-01 05:30:00') })
|
||||
render(<Options {...props} />)
|
||||
const selectedHour = screen.getAllByRole('listitem').find(item => item.textContent === '05')
|
||||
const selectedHour = screen.getAllByRole('button').find(item => item.textContent === '05')
|
||||
expect(selectedHour)!.toHaveClass('bg-components-button-ghost-bg-hover')
|
||||
})
|
||||
it('should render selected minute in the list', () => {
|
||||
const props = createOptionsProps({ selectedTime: dayjs('2024-01-01 05:30:00') })
|
||||
render(<Options {...props} />)
|
||||
const selectedMinute = screen.getAllByRole('listitem').find(item => item.textContent === '30')
|
||||
const selectedMinute = screen.getAllByRole('button').find(item => item.textContent === '30')
|
||||
expect(selectedMinute)!.toHaveClass('bg-components-button-ghost-bg-hover')
|
||||
})
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user