Merge branch 'main' into jzh

This commit is contained in:
JzoNg 2026-04-27 11:45:30 +08:00
commit eaa55aab1e
66 changed files with 1814 additions and 583 deletions

View File

@ -16,7 +16,7 @@ concurrency:
jobs: jobs:
api-unit: api-unit:
name: API Unit Tests name: API Unit Tests
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
env: env:
COVERAGE_FILE: coverage-unit COVERAGE_FILE: coverage-unit
defaults: defaults:
@ -62,7 +62,7 @@ jobs:
api-integration: api-integration:
name: API Integration Tests name: API Integration Tests
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
env: env:
COVERAGE_FILE: coverage-integration COVERAGE_FILE: coverage-integration
STORAGE_TYPE: opendal STORAGE_TYPE: opendal
@ -137,7 +137,7 @@ jobs:
api-coverage: api-coverage:
name: API Coverage name: API Coverage
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
needs: needs:
- api-unit - api-unit
- api-integration - api-integration

View File

@ -13,7 +13,7 @@ permissions:
jobs: jobs:
autofix: autofix:
if: github.repository == 'langgenius/dify' if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Complete merge group check - name: Complete merge group check
if: github.event_name == 'merge_group' if: github.event_name == 'merge_group'

View File

@ -26,6 +26,9 @@ jobs:
build: build:
runs-on: ${{ matrix.runs_on }} runs-on: ${{ matrix.runs_on }}
if: github.repository == 'langgenius/dify' if: github.repository == 'langgenius/dify'
permissions:
contents: read
id-token: write
strategy: strategy:
matrix: matrix:
include: include:
@ -35,28 +38,28 @@ jobs:
build_context: "{{defaultContext}}:api" build_context: "{{defaultContext}}:api"
file: "Dockerfile" file: "Dockerfile"
platform: linux/amd64 platform: linux/amd64
runs_on: ubuntu-latest runs_on: depot-ubuntu-24.04-4
- service_name: "build-api-arm64" - service_name: "build-api-arm64"
image_name_env: "DIFY_API_IMAGE_NAME" image_name_env: "DIFY_API_IMAGE_NAME"
artifact_context: "api" artifact_context: "api"
build_context: "{{defaultContext}}:api" build_context: "{{defaultContext}}:api"
file: "Dockerfile" file: "Dockerfile"
platform: linux/arm64 platform: linux/arm64
runs_on: ubuntu-24.04-arm runs_on: depot-ubuntu-24.04-4
- service_name: "build-web-amd64" - service_name: "build-web-amd64"
image_name_env: "DIFY_WEB_IMAGE_NAME" image_name_env: "DIFY_WEB_IMAGE_NAME"
artifact_context: "web" artifact_context: "web"
build_context: "{{defaultContext}}" build_context: "{{defaultContext}}"
file: "web/Dockerfile" file: "web/Dockerfile"
platform: linux/amd64 platform: linux/amd64
runs_on: ubuntu-latest runs_on: depot-ubuntu-24.04-4
- service_name: "build-web-arm64" - service_name: "build-web-arm64"
image_name_env: "DIFY_WEB_IMAGE_NAME" image_name_env: "DIFY_WEB_IMAGE_NAME"
artifact_context: "web" artifact_context: "web"
build_context: "{{defaultContext}}" build_context: "{{defaultContext}}"
file: "web/Dockerfile" file: "web/Dockerfile"
platform: linux/arm64 platform: linux/arm64
runs_on: ubuntu-24.04-arm runs_on: depot-ubuntu-24.04-4
steps: steps:
- name: Prepare - name: Prepare
@ -70,8 +73,8 @@ jobs:
username: ${{ env.DOCKERHUB_USER }} username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }} password: ${{ env.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx - name: Set up Depot CLI
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0 uses: depot/setup-action@v1
- name: Extract metadata for Docker - name: Extract metadata for Docker
id: meta id: meta
@ -81,16 +84,15 @@ jobs:
- name: Build Docker image - name: Build Docker image
id: build id: build
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0 uses: depot/build-push-action@v1
with: with:
project: ${{ vars.DEPOT_PROJECT_ID }}
context: ${{ matrix.build_context }} context: ${{ matrix.build_context }}
file: ${{ matrix.file }} file: ${{ matrix.file }}
platforms: ${{ matrix.platform }} platforms: ${{ matrix.platform }}
build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }} build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env[matrix.image_name_env] }},push-by-digest=true,name-canonical=true,push=true outputs: type=image,name=${{ env[matrix.image_name_env] }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=gha,scope=${{ matrix.service_name }}
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
- name: Export digest - name: Export digest
env: env:
@ -108,9 +110,33 @@ jobs:
if-no-files-found: error if-no-files-found: error
retention-days: 1 retention-days: 1
fork-build-validate:
if: github.repository != 'langgenius/dify'
runs-on: ubuntu-24.04
strategy:
matrix:
include:
- service_name: "validate-api-amd64"
build_context: "{{defaultContext}}:api"
file: "Dockerfile"
- service_name: "validate-web-amd64"
build_context: "{{defaultContext}}"
file: "web/Dockerfile"
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@98e3b2c9eab4f4f98a95c0c0a3ea5e5e672fd2a8 # v3.10.0
- name: Validate Docker image
uses: docker/build-push-action@5cd29d66b4a8d8e6f4d5dfe2e9329f0b1d446289 # v6.18.0
with:
push: false
context: ${{ matrix.build_context }}
file: ${{ matrix.file }}
platforms: linux/amd64
create-manifest: create-manifest:
needs: build needs: build
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
if: github.repository == 'langgenius/dify' if: github.repository == 'langgenius/dify'
strategy: strategy:
matrix: matrix:

View File

@ -9,7 +9,7 @@ concurrency:
jobs: jobs:
db-migration-test-postgres: db-migration-test-postgres:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Checkout code - name: Checkout code
@ -59,7 +59,7 @@ jobs:
run: uv run --directory api flask upgrade-db run: uv run --directory api flask upgrade-db
db-migration-test-mysql: db-migration-test-mysql:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Checkout code - name: Checkout code

View File

@ -13,7 +13,7 @@ on:
jobs: jobs:
deploy: deploy:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
if: | if: |
github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/agent-dev' github.event.workflow_run.head_branch == 'deploy/agent-dev'

View File

@ -10,7 +10,7 @@ on:
jobs: jobs:
deploy: deploy:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
if: | if: |
github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/dev' github.event.workflow_run.head_branch == 'deploy/dev'

View File

@ -13,7 +13,7 @@ on:
jobs: jobs:
deploy: deploy:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
if: | if: |
github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/enterprise' github.event.workflow_run.head_branch == 'deploy/enterprise'

View File

@ -10,7 +10,7 @@ on:
jobs: jobs:
deploy: deploy:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
if: | if: |
github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'build/feat/hitl' github.event.workflow_run.head_branch == 'build/feat/hitl'

View File

@ -14,40 +14,69 @@ concurrency:
jobs: jobs:
build-docker: build-docker:
if: github.event.pull_request.head.repo.full_name == github.repository
runs-on: ${{ matrix.runs_on }} runs-on: ${{ matrix.runs_on }}
permissions:
contents: read
id-token: write
strategy: strategy:
matrix: matrix:
include: include:
- service_name: "api-amd64" - service_name: "api-amd64"
platform: linux/amd64 platform: linux/amd64
runs_on: ubuntu-latest runs_on: depot-ubuntu-24.04-4
context: "{{defaultContext}}:api" context: "{{defaultContext}}:api"
file: "Dockerfile" file: "Dockerfile"
- service_name: "api-arm64" - service_name: "api-arm64"
platform: linux/arm64 platform: linux/arm64
runs_on: ubuntu-24.04-arm runs_on: depot-ubuntu-24.04-4
context: "{{defaultContext}}:api" context: "{{defaultContext}}:api"
file: "Dockerfile" file: "Dockerfile"
- service_name: "web-amd64" - service_name: "web-amd64"
platform: linux/amd64 platform: linux/amd64
runs_on: ubuntu-latest runs_on: depot-ubuntu-24.04-4
context: "{{defaultContext}}" context: "{{defaultContext}}"
file: "web/Dockerfile" file: "web/Dockerfile"
- service_name: "web-arm64" - service_name: "web-arm64"
platform: linux/arm64 platform: linux/arm64
runs_on: ubuntu-24.04-arm runs_on: depot-ubuntu-24.04-4
context: "{{defaultContext}}" context: "{{defaultContext}}"
file: "web/Dockerfile" file: "web/Dockerfile"
steps: steps:
- name: Set up Docker Buildx - name: Set up Depot CLI
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0 uses: depot/setup-action@v1
- name: Build Docker Image - name: Build Docker Image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0 uses: depot/build-push-action@v1
with: with:
project: ${{ vars.DEPOT_PROJECT_ID }}
push: false push: false
context: ${{ matrix.context }} context: ${{ matrix.context }}
file: ${{ matrix.file }} file: ${{ matrix.file }}
platforms: ${{ matrix.platform }} platforms: ${{ matrix.platform }}
cache-from: type=gha
cache-to: type=gha,mode=max build-docker-fork:
if: github.event.pull_request.head.repo.full_name != github.repository
runs-on: ubuntu-24.04
permissions:
contents: read
strategy:
matrix:
include:
- service_name: "api-amd64"
context: "{{defaultContext}}:api"
file: "Dockerfile"
- service_name: "web-amd64"
context: "{{defaultContext}}"
file: "web/Dockerfile"
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@98e3b2c9eab4f4f98a95c0c0a3ea5e5e672fd2a8 # v3.10.0
- name: Build Docker Image
uses: docker/build-push-action@5cd29d66b4a8d8e6f4d5dfe2e9329f0b1d446289 # v6.18.0
with:
push: false
context: ${{ matrix.context }}
file: ${{ matrix.file }}
platforms: linux/amd64

View File

@ -7,7 +7,7 @@ jobs:
permissions: permissions:
contents: read contents: read
pull-requests: write pull-requests: write
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1 - uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
with: with:

View File

@ -23,7 +23,7 @@ concurrency:
jobs: jobs:
pre_job: pre_job:
name: Skip Duplicate Checks name: Skip Duplicate Checks
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
outputs: outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip || 'false' }} should_skip: ${{ steps.skip_check.outputs.should_skip || 'false' }}
steps: steps:
@ -39,7 +39,7 @@ jobs:
name: Check Changed Files name: Check Changed Files
needs: pre_job needs: pre_job
if: needs.pre_job.outputs.should_skip != 'true' if: needs.pre_job.outputs.should_skip != 'true'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
outputs: outputs:
api-changed: ${{ steps.changes.outputs.api }} api-changed: ${{ steps.changes.outputs.api }}
e2e-changed: ${{ steps.changes.outputs.e2e }} e2e-changed: ${{ steps.changes.outputs.e2e }}
@ -141,7 +141,7 @@ jobs:
- pre_job - pre_job
- check-changes - check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.api-changed != 'true' if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.api-changed != 'true'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Report skipped API tests - name: Report skipped API tests
run: echo "No API-related changes detected; skipping API tests." run: echo "No API-related changes detected; skipping API tests."
@ -154,7 +154,7 @@ jobs:
- check-changes - check-changes
- api-tests-run - api-tests-run
- api-tests-skip - api-tests-skip
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Finalize API Tests status - name: Finalize API Tests status
env: env:
@ -201,7 +201,7 @@ jobs:
- pre_job - pre_job
- check-changes - check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.web-changed != 'true' if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.web-changed != 'true'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Report skipped web tests - name: Report skipped web tests
run: echo "No web-related changes detected; skipping web tests." run: echo "No web-related changes detected; skipping web tests."
@ -214,7 +214,7 @@ jobs:
- check-changes - check-changes
- web-tests-run - web-tests-run
- web-tests-skip - web-tests-skip
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Finalize Web Tests status - name: Finalize Web Tests status
env: env:
@ -260,7 +260,7 @@ jobs:
- pre_job - pre_job
- check-changes - check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.e2e-changed != 'true' if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.e2e-changed != 'true'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Report skipped web full-stack e2e - name: Report skipped web full-stack e2e
run: echo "No E2E-related changes detected; skipping web full-stack E2E." run: echo "No E2E-related changes detected; skipping web full-stack E2E."
@ -273,7 +273,7 @@ jobs:
- check-changes - check-changes
- web-e2e-run - web-e2e-run
- web-e2e-skip - web-e2e-skip
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Finalize Web Full-Stack E2E status - name: Finalize Web Full-Stack E2E status
env: env:
@ -325,7 +325,7 @@ jobs:
- pre_job - pre_job
- check-changes - check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.vdb-changed != 'true' if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.vdb-changed != 'true'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Report skipped VDB tests - name: Report skipped VDB tests
run: echo "No VDB-related changes detected; skipping VDB tests." run: echo "No VDB-related changes detected; skipping VDB tests."
@ -338,7 +338,7 @@ jobs:
- check-changes - check-changes
- vdb-tests-run - vdb-tests-run
- vdb-tests-skip - vdb-tests-skip
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Finalize VDB Tests status - name: Finalize VDB Tests status
env: env:
@ -384,7 +384,7 @@ jobs:
- pre_job - pre_job
- check-changes - check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.migration-changed != 'true' if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.migration-changed != 'true'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Report skipped DB migration tests - name: Report skipped DB migration tests
run: echo "No migration-related changes detected; skipping DB migration tests." run: echo "No migration-related changes detected; skipping DB migration tests."
@ -397,7 +397,7 @@ jobs:
- check-changes - check-changes
- db-migration-test-run - db-migration-test-run
- db-migration-test-skip - db-migration-test-skip
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Finalize DB Migration Test status - name: Finalize DB Migration Test status
env: env:

View File

@ -12,7 +12,7 @@ permissions: {}
jobs: jobs:
comment: comment:
name: Comment PR with pyrefly diff name: Comment PR with pyrefly diff
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
permissions: permissions:
actions: read actions: read
contents: read contents: read

View File

@ -10,7 +10,7 @@ permissions:
jobs: jobs:
pyrefly-diff: pyrefly-diff:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
permissions: permissions:
contents: read contents: read
issues: write issues: write

View File

@ -12,7 +12,7 @@ permissions: {}
jobs: jobs:
comment: comment:
name: Comment PR with type coverage name: Comment PR with type coverage
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
permissions: permissions:
actions: read actions: read
contents: read contents: read

View File

@ -10,7 +10,7 @@ permissions:
jobs: jobs:
pyrefly-type-coverage: pyrefly-type-coverage:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
permissions: permissions:
contents: read contents: read
issues: write issues: write

View File

@ -16,7 +16,7 @@ jobs:
name: Validate PR title name: Validate PR title
permissions: permissions:
pull-requests: read pull-requests: read
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Complete merge group check - name: Complete merge group check
if: github.event_name == 'merge_group' if: github.event_name == 'merge_group'

View File

@ -12,7 +12,7 @@ on:
jobs: jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
permissions: permissions:
issues: write issues: write
pull-requests: write pull-requests: write

View File

@ -15,7 +15,7 @@ permissions:
jobs: jobs:
python-style: python-style:
name: Python Style name: Python Style
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Checkout code - name: Checkout code
@ -57,7 +57,7 @@ jobs:
web-style: web-style:
name: Web Style name: Web Style
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
defaults: defaults:
run: run:
working-directory: ./web working-directory: ./web
@ -131,7 +131,7 @@ jobs:
superlinter: superlinter:
name: SuperLinter name: SuperLinter
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
steps: steps:
- name: Checkout code - name: Checkout code

View File

@ -18,7 +18,7 @@ concurrency:
jobs: jobs:
build: build:
name: unit test for Node.js SDK name: unit test for Node.js SDK
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
defaults: defaults:
run: run:

View File

@ -35,7 +35,7 @@ concurrency:
jobs: jobs:
translate: translate:
if: github.repository == 'langgenius/dify' if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
timeout-minutes: 120 timeout-minutes: 120
steps: steps:
@ -158,7 +158,7 @@ jobs:
- name: Run Claude Code for Translation Sync - name: Run Claude Code for Translation Sync
if: steps.context.outputs.CHANGED_FILES != '' if: steps.context.outputs.CHANGED_FILES != ''
uses: anthropics/claude-code-action@38ec876110f9fbf8b950c79f534430740c3ac009 # v1.0.101 uses: anthropics/claude-code-action@567fe954a4527e81f132d87d1bdbcc94f7737434 # v1.0.107
with: with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -16,7 +16,7 @@ concurrency:
jobs: jobs:
trigger: trigger:
if: github.repository == 'langgenius/dify' if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
timeout-minutes: 5 timeout-minutes: 5
steps: steps:

View File

@ -16,7 +16,7 @@ jobs:
test: test:
name: Full VDB Tests name: Full VDB Tests
if: github.repository == 'langgenius/dify' if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
strategy: strategy:
matrix: matrix:
python-version: python-version:

View File

@ -13,7 +13,7 @@ concurrency:
jobs: jobs:
test: test:
name: VDB Smoke Tests name: VDB Smoke Tests
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
strategy: strategy:
matrix: matrix:
python-version: python-version:

View File

@ -13,7 +13,7 @@ concurrency:
jobs: jobs:
test: test:
name: Web Full-Stack E2E name: Web Full-Stack E2E
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
defaults: defaults:
run: run:
shell: bash shell: bash

View File

@ -16,7 +16,7 @@ concurrency:
jobs: jobs:
test: test:
name: Web Tests (${{ matrix.shardIndex }}/${{ matrix.shardTotal }}) name: Web Tests (${{ matrix.shardIndex }}/${{ matrix.shardTotal }})
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
env: env:
VITEST_COVERAGE_SCOPE: app-components VITEST_COVERAGE_SCOPE: app-components
strategy: strategy:
@ -54,7 +54,7 @@ jobs:
name: Merge Test Reports name: Merge Test Reports
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
needs: [test] needs: [test]
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
env: env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
defaults: defaults:
@ -92,7 +92,7 @@ jobs:
dify-ui-test: dify-ui-test:
name: dify-ui Tests name: dify-ui Tests
runs-on: ubuntu-latest runs-on: depot-ubuntu-24.04
env: env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
defaults: defaults:

View File

@ -551,6 +551,7 @@ class RetrievalService:
child_index_nodes = session.execute(child_chunk_stmt).scalars().all() child_index_nodes = session.execute(child_chunk_stmt).scalars().all()
for i in child_index_nodes: for i in child_index_nodes:
assert i.index_node_id
segment_ids.append(i.segment_id) segment_ids.append(i.segment_id)
if i.segment_id in child_chunk_map: if i.segment_id in child_chunk_map:
child_chunk_map[i.segment_id].append(i) child_chunk_map[i.segment_id].append(i)

View File

@ -11,6 +11,7 @@ from core.rag.models.document import AttachmentDocument, Document
from extensions.ext_database import db from extensions.ext_database import db
from graphon.model_runtime.entities.model_entities import ModelType from graphon.model_runtime.entities.model_entities import ModelType
from models.dataset import ChildChunk, Dataset, DocumentSegment, SegmentAttachmentBinding from models.dataset import ChildChunk, Dataset, DocumentSegment, SegmentAttachmentBinding
from models.enums import SegmentType
class DatasetDocumentStore: class DatasetDocumentStore:
@ -127,6 +128,7 @@ class DatasetDocumentStore:
if save_child: if save_child:
if doc.children: if doc.children:
for position, child in enumerate(doc.children, start=1): for position, child in enumerate(doc.children, start=1):
assert self._document_id
child_segment = ChildChunk( child_segment = ChildChunk(
tenant_id=self._dataset.tenant_id, tenant_id=self._dataset.tenant_id,
dataset_id=self._dataset.id, dataset_id=self._dataset.id,
@ -137,7 +139,7 @@ class DatasetDocumentStore:
index_node_hash=child.metadata.get("doc_hash"), index_node_hash=child.metadata.get("doc_hash"),
content=child.page_content, content=child.page_content,
word_count=len(child.page_content), word_count=len(child.page_content),
type="automatic", type=SegmentType.AUTOMATIC,
created_by=self._user_id, created_by=self._user_id,
) )
db.session.add(child_segment) db.session.add(child_segment)
@ -163,6 +165,7 @@ class DatasetDocumentStore:
) )
# add new child chunks # add new child chunks
for position, child in enumerate(doc.children, start=1): for position, child in enumerate(doc.children, start=1):
assert self._document_id
child_segment = ChildChunk( child_segment = ChildChunk(
tenant_id=self._dataset.tenant_id, tenant_id=self._dataset.tenant_id,
dataset_id=self._dataset.id, dataset_id=self._dataset.id,
@ -173,7 +176,7 @@ class DatasetDocumentStore:
index_node_hash=child.metadata.get("doc_hash"), index_node_hash=child.metadata.get("doc_hash"),
content=child.page_content, content=child.page_content,
word_count=len(child.page_content), word_count=len(child.page_content),
type="automatic", type=SegmentType.AUTOMATIC,
created_by=self._user_id, created_by=self._user_id,
) )
db.session.add(child_segment) db.session.add(child_segment)

View File

@ -1,56 +1,17 @@
import logging
from dataclasses import dataclass
from enum import StrEnum, auto from enum import StrEnum, auto
logger = logging.getLogger(__name__)
@dataclass
class QuotaCharge:
"""
Result of a quota consumption operation.
Attributes:
success: Whether the quota charge succeeded
charge_id: UUID for refund, or None if failed/disabled
"""
success: bool
charge_id: str | None
_quota_type: "QuotaType"
def refund(self) -> None:
"""
Refund this quota charge.
Safe to call even if charge failed or was disabled.
This method guarantees no exceptions will be raised.
"""
if self.charge_id:
self._quota_type.refund(self.charge_id)
logger.info("Refunded quota for %s with charge_id: %s", self._quota_type.value, self.charge_id)
class QuotaType(StrEnum): class QuotaType(StrEnum):
""" """
Supported quota types for tenant feature usage. Supported quota types for tenant feature usage.
Add additional types here whenever new billable features become available.
""" """
# Trigger execution quota
TRIGGER = auto() TRIGGER = auto()
# Workflow execution quota
WORKFLOW = auto() WORKFLOW = auto()
UNLIMITED = auto() UNLIMITED = auto()
@property @property
def billing_key(self) -> str: def billing_key(self) -> str:
"""
Get the billing key for the feature.
"""
match self: match self:
case QuotaType.TRIGGER: case QuotaType.TRIGGER:
return "trigger_event" return "trigger_event"
@ -58,152 +19,3 @@ class QuotaType(StrEnum):
return "api_rate_limit" return "api_rate_limit"
case _: case _:
raise ValueError(f"Invalid quota type: {self}") raise ValueError(f"Invalid quota type: {self}")
def consume(self, tenant_id: str, amount: int = 1) -> QuotaCharge:
"""
Consume quota for the feature.
Args:
tenant_id: The tenant identifier
amount: Amount to consume (default: 1)
Returns:
QuotaCharge with success status and charge_id for refund
Raises:
QuotaExceededError: When quota is insufficient
"""
from configs import dify_config
from services.billing_service import BillingService
from services.errors.app import QuotaExceededError
if not dify_config.BILLING_ENABLED:
logger.debug("Billing disabled, allowing request for %s", tenant_id)
return QuotaCharge(success=True, charge_id=None, _quota_type=self)
logger.info("Consuming %d %s quota for tenant %s", amount, self.value, tenant_id)
if amount <= 0:
raise ValueError("Amount to consume must be greater than 0")
try:
response = BillingService.update_tenant_feature_plan_usage(tenant_id, self.billing_key, delta=amount)
if response.get("result") != "success":
logger.warning(
"Failed to consume quota for %s, feature %s details: %s",
tenant_id,
self.value,
response.get("detail"),
)
raise QuotaExceededError(feature=self.value, tenant_id=tenant_id, required=amount)
charge_id = response.get("history_id")
logger.debug(
"Successfully consumed %d %s quota for tenant %s, charge_id: %s",
amount,
self.value,
tenant_id,
charge_id,
)
return QuotaCharge(success=True, charge_id=charge_id, _quota_type=self)
except QuotaExceededError:
raise
except Exception:
# fail-safe: allow request on billing errors
logger.exception("Failed to consume quota for %s, feature %s", tenant_id, self.value)
return unlimited()
def check(self, tenant_id: str, amount: int = 1) -> bool:
"""
Check if tenant has sufficient quota without consuming.
Args:
tenant_id: The tenant identifier
amount: Amount to check (default: 1)
Returns:
True if quota is sufficient, False otherwise
"""
from configs import dify_config
if not dify_config.BILLING_ENABLED:
return True
if amount <= 0:
raise ValueError("Amount to check must be greater than 0")
try:
remaining = self.get_remaining(tenant_id)
return remaining >= amount if remaining != -1 else True
except Exception:
logger.exception("Failed to check quota for %s, feature %s", tenant_id, self.value)
# fail-safe: allow request on billing errors
return True
def refund(self, charge_id: str) -> None:
"""
Refund quota using charge_id from consume().
This method guarantees no exceptions will be raised.
All errors are logged but silently handled.
Args:
charge_id: The UUID returned from consume()
"""
try:
from configs import dify_config
from services.billing_service import BillingService
if not dify_config.BILLING_ENABLED:
return
if not charge_id:
logger.warning("Cannot refund: charge_id is empty")
return
logger.info("Refunding %s quota with charge_id: %s", self.value, charge_id)
response = BillingService.refund_tenant_feature_plan_usage(charge_id)
if response.get("result") == "success":
logger.debug("Successfully refunded %s quota, charge_id: %s", self.value, charge_id)
else:
logger.warning("Refund failed for charge_id: %s", charge_id)
except Exception:
# Catch ALL exceptions - refund must never fail
logger.exception("Failed to refund quota for charge_id: %s", charge_id)
# Don't raise - refund is best-effort and must be silent
def get_remaining(self, tenant_id: str) -> int:
"""
Get remaining quota for the tenant.
Args:
tenant_id: The tenant identifier
Returns:
Remaining quota amount
"""
from services.billing_service import BillingService
try:
usage_info = BillingService.get_tenant_feature_plan_usage(tenant_id, self.billing_key)
# Assuming the API returns a dict with 'remaining' or 'limit' and 'used'
if isinstance(usage_info, dict):
return usage_info.get("remaining", 0)
# If it returns a simple number, treat it as remaining
return int(usage_info) if usage_info else 0
except Exception:
logger.exception("Failed to get remaining quota for %s, feature %s", tenant_id, self.value)
return -1
def unlimited() -> QuotaCharge:
"""
Return a quota charge for unlimited quota.
This is useful for features that are not subject to quota limits, such as the UNLIMITED quota type.
"""
return QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.UNLIMITED)

View File

@ -1036,7 +1036,7 @@ class DocumentSegment(Base):
return attachment_list return attachment_list
class ChildChunk(Base): class ChildChunk(TypeBase):
__tablename__ = "child_chunks" __tablename__ = "child_chunks"
__table_args__ = ( __table_args__ = (
sa.PrimaryKeyConstraint("id", name="child_chunk_pkey"), sa.PrimaryKeyConstraint("id", name="child_chunk_pkey"),
@ -1046,29 +1046,42 @@ class ChildChunk(Base):
) )
# initial fields # initial fields
id = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4())) id: Mapped[str] = mapped_column(StringUUID, nullable=False, default_factory=lambda: str(uuid4()), init=False)
tenant_id = mapped_column(StringUUID, nullable=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False) dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
document_id = mapped_column(StringUUID, nullable=False) document_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
segment_id = mapped_column(StringUUID, nullable=False) segment_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
position: Mapped[int] = mapped_column(sa.Integer, nullable=False) position: Mapped[int] = mapped_column(sa.Integer, nullable=False)
content = mapped_column(LongText, nullable=False) content: Mapped[str] = mapped_column(LongText, nullable=False)
word_count: Mapped[int] = mapped_column(sa.Integer, nullable=False) word_count: Mapped[int] = mapped_column(sa.Integer, nullable=False)
# indexing fields # indexing fields
index_node_id = mapped_column(String(255), nullable=True) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
index_node_hash = mapped_column(String(255), nullable=True) created_at: Mapped[datetime] = mapped_column(
type: Mapped[SegmentType] = mapped_column( DateTime, nullable=False, server_default=sa.func.current_timestamp(), init=False
EnumText(SegmentType, length=255), nullable=False, server_default=sa.text("'automatic'")
) )
created_by = mapped_column(StringUUID, nullable=False) updated_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, init=False)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
updated_at: Mapped[datetime] = mapped_column( updated_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=sa.func.current_timestamp(), onupdate=func.current_timestamp() DateTime,
nullable=False,
server_default=sa.func.current_timestamp(),
onupdate=func.current_timestamp(),
init=False,
) )
indexing_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) indexing_at: Mapped[datetime | None] = mapped_column(
completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) DateTime, nullable=True, insert_default=None, server_default=None, init=False
error = mapped_column(LongText, nullable=True) )
completed_at: Mapped[datetime | None] = mapped_column(
DateTime, nullable=True, insert_default=None, server_default=None, init=False
)
index_node_id: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
index_node_hash: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
type: Mapped[SegmentType] = mapped_column(
EnumText(SegmentType, length=255),
nullable=False,
server_default=sa.text("'automatic'"),
default=SegmentType.AUTOMATIC,
)
error: Mapped[str | None] = mapped_column(LongText, nullable=True, init=False)
@property @property
def dataset(self): def dataset(self):

View File

@ -1867,15 +1867,18 @@ class MessageAnnotation(TypeBase):
) )
id: Mapped[str] = mapped_column( id: Mapped[str] = mapped_column(
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False StringUUID,
insert_default=lambda: str(uuid4()),
default_factory=lambda: str(uuid4()),
init=False,
) )
app_id: Mapped[str] = mapped_column(StringUUID) app_id: Mapped[str] = mapped_column(StringUUID)
question: Mapped[str] = mapped_column(LongText, nullable=False) question: Mapped[str] = mapped_column(LongText, nullable=False)
content: Mapped[str] = mapped_column(LongText, nullable=False) content: Mapped[str] = mapped_column(LongText, nullable=False)
hit_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0"), init=False)
account_id: Mapped[str] = mapped_column(StringUUID, nullable=False) account_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
conversation_id: Mapped[str | None] = mapped_column(StringUUID, sa.ForeignKey("conversations.id"), default=None) conversation_id: Mapped[str | None] = mapped_column(StringUUID, sa.ForeignKey("conversations.id"), default=None)
message_id: Mapped[str | None] = mapped_column(StringUUID, default=None) message_id: Mapped[str | None] = mapped_column(StringUUID, default=None)
hit_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0"), default=0)
created_at: Mapped[datetime] = mapped_column( created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
) )

View File

@ -6,9 +6,10 @@ requires-python = "~=3.12.0"
dependencies = [ dependencies = [
# Legacy: mature and widely deployed # Legacy: mature and widely deployed
"bleach>=6.3.0", "bleach>=6.3.0",
"boto3>=1.42.91", "boto3>=1.42.96",
"celery>=5.6.3", "celery>=5.6.3",
"croniter>=6.2.2", "croniter>=6.2.2",
"flask>=3.1.3,<4.0.0",
"flask-cors>=6.0.2", "flask-cors>=6.0.2",
"gevent>=26.4.0", "gevent>=26.4.0",
"gevent-websocket>=0.10.1", "gevent-websocket>=0.10.1",
@ -16,7 +17,7 @@ dependencies = [
"google-api-python-client>=2.194.0", "google-api-python-client>=2.194.0",
"gunicorn>=25.3.0", "gunicorn>=25.3.0",
"psycogreen>=1.0.2", "psycogreen>=1.0.2",
"psycopg2-binary>=2.9.11", "psycopg2-binary>=2.9.12",
"python-socketio>=5.13.0", "python-socketio>=5.13.0",
"redis[hiredis]>=7.4.0", "redis[hiredis]>=7.4.0",
"sendgrid>=6.12.5", "sendgrid>=6.12.5",
@ -32,13 +33,13 @@ dependencies = [
"flask-restx>=1.3.2,<2.0.0", "flask-restx>=1.3.2,<2.0.0",
"google-cloud-aiplatform>=1.148.1,<2.0.0", "google-cloud-aiplatform>=1.148.1,<2.0.0",
"httpx[socks]>=0.28.1,<1.0.0", "httpx[socks]>=0.28.1,<1.0.0",
"opentelemetry-distro>=0.62b0,<1.0.0", "opentelemetry-distro>=0.62b1,<1.0.0",
"opentelemetry-instrumentation-celery>=0.62b0,<1.0.0", "opentelemetry-instrumentation-celery>=0.62b0,<1.0.0",
"opentelemetry-instrumentation-flask>=0.62b0,<1.0.0", "opentelemetry-instrumentation-flask>=0.62b0,<1.0.0",
"opentelemetry-instrumentation-httpx>=0.62b0,<1.0.0", "opentelemetry-instrumentation-httpx>=0.62b0,<1.0.0",
"opentelemetry-instrumentation-redis>=0.62b0,<1.0.0", "opentelemetry-instrumentation-redis>=0.62b0,<1.0.0",
"opentelemetry-instrumentation-sqlalchemy>=0.62b0,<1.0.0", "opentelemetry-instrumentation-sqlalchemy>=0.62b0,<1.0.0",
"opentelemetry-propagator-b3>=1.41.0,<2.0.0", "opentelemetry-propagator-b3>=1.41.1,<2.0.0",
"readabilipy>=0.3.0,<1.0.0", "readabilipy>=0.3.0,<1.0.0",
"resend>=2.27.0,<3.0.0", "resend>=2.27.0,<3.0.0",
@ -117,7 +118,7 @@ dev = [
"faker>=40.15.0", "faker>=40.15.0",
"lxml-stubs>=0.5.1", "lxml-stubs>=0.5.1",
"basedpyright>=1.39.3", "basedpyright>=1.39.3",
"ruff>=0.15.11", "ruff>=0.15.12",
"pytest>=9.0.3", "pytest>=9.0.3",
"pytest-benchmark>=5.2.3", "pytest-benchmark>=5.2.3",
"pytest-cov>=7.1.0", "pytest-cov>=7.1.0",
@ -144,7 +145,7 @@ dev = [
"types-pexpect>=4.9.0", "types-pexpect>=4.9.0",
"types-protobuf>=7.34.1", "types-protobuf>=7.34.1",
"types-psutil>=7.2.2", "types-psutil>=7.2.2",
"types-psycopg2>=2.9.21", "types-psycopg2>=2.9.21.20260422",
"types-pygments>=2.20.0", "types-pygments>=2.20.0",
"types-pymysql>=1.1.0", "types-pymysql>=1.1.0",
"types-python-dateutil>=2.9.0", "types-python-dateutil>=2.9.0",
@ -157,9 +158,9 @@ dev = [
"types-tensorflow>=2.18.0.20260408", "types-tensorflow>=2.18.0.20260408",
"types-tqdm>=4.67.3.20260408", "types-tqdm>=4.67.3.20260408",
"types-ujson>=5.10.0", "types-ujson>=5.10.0",
"boto3-stubs>=1.42.92", "boto3-stubs>=1.42.96",
"types-jmespath>=1.1.0.20260408", "types-jmespath>=1.1.0.20260408",
"hypothesis>=6.152.1", "hypothesis>=6.152.3",
"types_pyOpenSSL>=24.1.0", "types_pyOpenSSL>=24.1.0",
"types_cffi>=2.0.0.20260408", "types_cffi>=2.0.0.20260408",
"types_setuptools>=82.0.0.20260408", "types_setuptools>=82.0.0.20260408",
@ -169,12 +170,12 @@ dev = [
"import-linter>=2.3", "import-linter>=2.3",
"types-redis>=4.6.0.20241004", "types-redis>=4.6.0.20241004",
"celery-types>=0.23.0", "celery-types>=0.23.0",
"mypy>=1.20.1", "mypy>=1.20.2",
# "locust>=2.40.4", # Temporarily removed due to compatibility issues. Uncomment when resolved. # "locust>=2.40.4", # Temporarily removed due to compatibility issues. Uncomment when resolved.
"pytest-timeout>=2.4.0", "pytest-timeout>=2.4.0",
"pytest-xdist>=3.8.0", "pytest-xdist>=3.8.0",
"pyrefly>=0.62.0", "pyrefly>=0.62.0",
"xinference-client>=2.5.0", "xinference-client>=2.7.0",
] ]
############################################################ ############################################################
@ -184,12 +185,12 @@ dev = [
storage = [ storage = [
"azure-storage-blob>=12.28.0", "azure-storage-blob>=12.28.0",
"bce-python-sdk>=0.9.70", "bce-python-sdk>=0.9.70",
"cos-python-sdk-v5>=1.9.41", "cos-python-sdk-v5>=1.9.42",
"esdk-obs-python>=3.22.2", "esdk-obs-python>=3.22.2",
"google-cloud-storage>=3.10.1", "google-cloud-storage>=3.10.1",
"opendal>=0.46.0", "opendal>=0.46.0",
"oss2>=2.19.1", "oss2>=2.19.1",
"supabase>=2.28.3", "supabase>=2.29.0",
"tos>=2.9.0", "tos>=2.9.0",
] ]
@ -266,7 +267,7 @@ vdb-vastbase = ["dify-vdb-vastbase"]
vdb-vikingdb = ["dify-vdb-vikingdb"] vdb-vikingdb = ["dify-vdb-vikingdb"]
vdb-weaviate = ["dify-vdb-weaviate"] vdb-weaviate = ["dify-vdb-weaviate"]
# Optional client used by some tests / integrations (not a vector backend plugin) # Optional client used by some tests / integrations (not a vector backend plugin)
vdb-xinference = ["xinference-client>=2.5.0"] vdb-xinference = ["xinference-client>=2.7.0"]
trace-all = [ trace-all = [
"dify-trace-aliyun", "dify-trace-aliyun",

View File

@ -133,7 +133,14 @@ class AppAnnotationService:
raise ValueError("'question' is required when 'message_id' is not provided") raise ValueError("'question' is required when 'message_id' is not provided")
question = maybe_question question = maybe_question
annotation = MessageAnnotation(app_id=app.id, content=answer, question=question, account_id=current_user.id) annotation = MessageAnnotation(
app_id=app.id,
conversation_id=None,
message_id=None,
content=answer,
question=question,
account_id=current_user.id,
)
db.session.add(annotation) db.session.add(annotation)
db.session.commit() db.session.commit()

View File

@ -18,12 +18,13 @@ from core.app.features.rate_limiting import RateLimit
from core.app.features.rate_limiting.rate_limit import rate_limit_context from core.app.features.rate_limiting.rate_limit import rate_limit_context
from core.app.layers.pause_state_persist_layer import PauseStateLayerConfig from core.app.layers.pause_state_persist_layer import PauseStateLayerConfig
from core.db import session_factory from core.db import session_factory
from enums.quota_type import QuotaType, unlimited from enums.quota_type import QuotaType
from extensions.otel import AppGenerateHandler, trace_span from extensions.otel import AppGenerateHandler, trace_span
from models.model import Account, App, AppMode, EndUser from models.model import Account, App, AppMode, EndUser
from models.workflow import Workflow, WorkflowRun from models.workflow import Workflow, WorkflowRun
from services.errors.app import QuotaExceededError, WorkflowIdFormatError, WorkflowNotFoundError from services.errors.app import QuotaExceededError, WorkflowIdFormatError, WorkflowNotFoundError
from services.errors.llm import InvokeRateLimitError from services.errors.llm import InvokeRateLimitError
from services.quota_service import QuotaService, unlimited
from services.workflow_service import WorkflowService from services.workflow_service import WorkflowService
from tasks.app_generate.workflow_execute_task import AppExecutionParams, workflow_based_app_execution_task from tasks.app_generate.workflow_execute_task import AppExecutionParams, workflow_based_app_execution_task
@ -106,7 +107,7 @@ class AppGenerateService:
quota_charge = unlimited() quota_charge = unlimited()
if dify_config.BILLING_ENABLED: if dify_config.BILLING_ENABLED:
try: try:
quota_charge = QuotaType.WORKFLOW.consume(app_model.tenant_id) quota_charge = QuotaService.reserve(QuotaType.WORKFLOW, app_model.tenant_id)
except QuotaExceededError: except QuotaExceededError:
raise InvokeRateLimitError(f"Workflow execution quota limit reached for tenant {app_model.tenant_id}") raise InvokeRateLimitError(f"Workflow execution quota limit reached for tenant {app_model.tenant_id}")
@ -116,6 +117,7 @@ class AppGenerateService:
request_id = RateLimit.gen_request_key() request_id = RateLimit.gen_request_key()
try: try:
request_id = rate_limit.enter(request_id) request_id = rate_limit.enter(request_id)
quota_charge.commit()
effective_mode = ( effective_mode = (
AppMode.AGENT_CHAT if app_model.is_agent and app_model.mode != AppMode.AGENT_CHAT else app_model.mode AppMode.AGENT_CHAT if app_model.is_agent and app_model.mode != AppMode.AGENT_CHAT else app_model.mode
) )

View File

@ -22,6 +22,7 @@ from models.trigger import WorkflowTriggerLog, WorkflowTriggerLogDict
from models.workflow import Workflow from models.workflow import Workflow
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
from services.errors.app import QuotaExceededError, WorkflowNotFoundError, WorkflowQuotaLimitError from services.errors.app import QuotaExceededError, WorkflowNotFoundError, WorkflowQuotaLimitError
from services.quota_service import QuotaService, unlimited
from services.workflow.entities import AsyncTriggerResponse, TriggerData, WorkflowTaskData from services.workflow.entities import AsyncTriggerResponse, TriggerData, WorkflowTaskData
from services.workflow.queue_dispatcher import QueueDispatcherManager, QueuePriority from services.workflow.queue_dispatcher import QueueDispatcherManager, QueuePriority
from services.workflow_service import WorkflowService from services.workflow_service import WorkflowService
@ -88,7 +89,10 @@ class AsyncWorkflowService:
raise WorkflowNotFoundError(f"App not found: {trigger_data.app_id}") raise WorkflowNotFoundError(f"App not found: {trigger_data.app_id}")
# 2. Get workflow # 2. Get workflow
workflow = cls._get_workflow(workflow_service, app_model, trigger_data.workflow_id) workflow = cls._get_workflow(workflow_service, app_model, trigger_data.workflow_id, session=session)
# commit read only session before starting the billig rpc call
session.commit()
# 3. Get dispatcher based on tenant subscription # 3. Get dispatcher based on tenant subscription
dispatcher = dispatcher_manager.get_dispatcher(trigger_data.tenant_id) dispatcher = dispatcher_manager.get_dispatcher(trigger_data.tenant_id)
@ -131,9 +135,10 @@ class AsyncWorkflowService:
trigger_log = trigger_log_repo.create(trigger_log) trigger_log = trigger_log_repo.create(trigger_log)
session.commit() session.commit()
# 7. Check and consume quota # 7. Reserve quota (commit after successful dispatch)
quota_charge = unlimited()
try: try:
QuotaType.WORKFLOW.consume(trigger_data.tenant_id) quota_charge = QuotaService.reserve(QuotaType.WORKFLOW, trigger_data.tenant_id)
except QuotaExceededError as e: except QuotaExceededError as e:
# Update trigger log status # Update trigger log status
trigger_log.status = WorkflowTriggerStatus.RATE_LIMITED trigger_log.status = WorkflowTriggerStatus.RATE_LIMITED
@ -153,13 +158,18 @@ class AsyncWorkflowService:
# 9. Dispatch to appropriate queue # 9. Dispatch to appropriate queue
task_data_dict = task_data.model_dump(mode="json") task_data_dict = task_data.model_dump(mode="json")
task: AsyncResult[Any] | None = None try:
if queue_name == QueuePriority.PROFESSIONAL: task: AsyncResult[Any] | None = None
task = execute_workflow_professional.delay(task_data_dict) if queue_name == QueuePriority.PROFESSIONAL:
elif queue_name == QueuePriority.TEAM: task = execute_workflow_professional.delay(task_data_dict)
task = execute_workflow_team.delay(task_data_dict) elif queue_name == QueuePriority.TEAM:
else: # SANDBOX task = execute_workflow_team.delay(task_data_dict)
task = execute_workflow_sandbox.delay(task_data_dict) else: # SANDBOX
task = execute_workflow_sandbox.delay(task_data_dict)
quota_charge.commit()
except Exception:
quota_charge.refund()
raise
# 10. Update trigger log with task info # 10. Update trigger log with task info
trigger_log.status = WorkflowTriggerStatus.QUEUED trigger_log.status = WorkflowTriggerStatus.QUEUED
@ -295,13 +305,21 @@ class AsyncWorkflowService:
return [log.to_dict() for log in logs] return [log.to_dict() for log in logs]
@staticmethod @staticmethod
def _get_workflow(workflow_service: WorkflowService, app_model: App, workflow_id: str | None = None) -> Workflow: def _get_workflow(
workflow_service: WorkflowService,
app_model: App,
workflow_id: str | None = None,
session: Session | None = None,
) -> Workflow:
""" """
Get workflow for the app Get workflow for the app
Args: Args:
app_model: App model instance app_model: App model instance
workflow_id: Optional specific workflow ID workflow_id: Optional specific workflow ID
session: Reuse this SQLAlchemy session for the lookup when provided,
so the caller's explicit session bears the connection cost
instead of Flask's request-scoped ``db.session``.
Returns: Returns:
Workflow instance Workflow instance
@ -311,12 +329,12 @@ class AsyncWorkflowService:
""" """
if workflow_id: if workflow_id:
# Get specific published workflow # Get specific published workflow
workflow = workflow_service.get_published_workflow_by_id(app_model, workflow_id) workflow = workflow_service.get_published_workflow_by_id(app_model, workflow_id, session=session)
if not workflow: if not workflow:
raise WorkflowNotFoundError(f"Published workflow not found: {workflow_id}") raise WorkflowNotFoundError(f"Published workflow not found: {workflow_id}")
else: else:
# Get default published workflow # Get default published workflow
workflow = workflow_service.get_published_workflow(app_model) workflow = workflow_service.get_published_workflow(app_model, session=session)
if not workflow: if not workflow:
raise WorkflowNotFoundError(f"No published workflow found for app: {app_model.id}") raise WorkflowNotFoundError(f"No published workflow found for app: {app_model.id}")

View File

@ -32,6 +32,50 @@ class SubscriptionPlan(TypedDict):
expiration_date: int expiration_date: int
class QuotaReserveResult(TypedDict):
reservation_id: str
available: int
reserved: int
class QuotaCommitResult(TypedDict):
available: int
reserved: int
refunded: int
class QuotaReleaseResult(TypedDict):
available: int
reserved: int
released: int
_quota_reserve_adapter = TypeAdapter(QuotaReserveResult)
_quota_commit_adapter = TypeAdapter(QuotaCommitResult)
_quota_release_adapter = TypeAdapter(QuotaReleaseResult)
class _TenantFeatureQuota(TypedDict):
usage: int
limit: int
reset_date: NotRequired[int]
class TenantFeatureQuotaInfo(TypedDict):
"""Response of /quota/info.
NOTE (hj24):
- Same convention as BillingInfo: billing may return int fields as str,
always keep non-strict mode to auto-coerce.
"""
trigger_event: _TenantFeatureQuota
api_rate_limit: _TenantFeatureQuota
_tenant_feature_quota_info_adapter = TypeAdapter(TenantFeatureQuotaInfo)
class _BillingQuota(TypedDict): class _BillingQuota(TypedDict):
size: int size: int
limit: int limit: int
@ -149,11 +193,63 @@ class BillingService:
@classmethod @classmethod
def get_tenant_feature_plan_usage_info(cls, tenant_id: str): def get_tenant_feature_plan_usage_info(cls, tenant_id: str):
"""Deprecated: Use get_quota_info instead."""
params = {"tenant_id": tenant_id} params = {"tenant_id": tenant_id}
usage_info = cls._send_request("GET", "/tenant-feature-usage/info", params=params) usage_info = cls._send_request("GET", "/tenant-feature-usage/info", params=params)
return usage_info return usage_info
@classmethod
def get_quota_info(cls, tenant_id: str) -> TenantFeatureQuotaInfo:
params = {"tenant_id": tenant_id}
return _tenant_feature_quota_info_adapter.validate_python(
cls._send_request("GET", "/quota/info", params=params)
)
@classmethod
def quota_reserve(
cls, tenant_id: str, feature_key: str, request_id: str, amount: int = 1, meta: dict | None = None
) -> QuotaReserveResult:
"""Reserve quota before task execution."""
payload: dict = {
"tenant_id": tenant_id,
"feature_key": feature_key,
"request_id": request_id,
"amount": amount,
}
if meta:
payload["meta"] = meta
return _quota_reserve_adapter.validate_python(cls._send_request("POST", "/quota/reserve", json=payload))
@classmethod
def quota_commit(
cls, tenant_id: str, feature_key: str, reservation_id: str, actual_amount: int, meta: dict | None = None
) -> QuotaCommitResult:
"""Commit a reservation with actual consumption."""
payload: dict = {
"tenant_id": tenant_id,
"feature_key": feature_key,
"reservation_id": reservation_id,
"actual_amount": actual_amount,
}
if meta:
payload["meta"] = meta
return _quota_commit_adapter.validate_python(cls._send_request("POST", "/quota/commit", json=payload))
@classmethod
def quota_release(cls, tenant_id: str, feature_key: str, reservation_id: str) -> QuotaReleaseResult:
"""Release a reservation (cancel, return frozen quota)."""
return _quota_release_adapter.validate_python(
cls._send_request(
"POST",
"/quota/release",
json={
"tenant_id": tenant_id,
"feature_key": feature_key,
"reservation_id": reservation_id,
},
)
)
@classmethod @classmethod
def get_knowledge_rate_limit(cls, tenant_id: str) -> KnowledgeRateLimitDict: def get_knowledge_rate_limit(cls, tenant_id: str) -> KnowledgeRateLimitDict:
params = {"tenant_id": tenant_id} params = {"tenant_id": tenant_id}

View File

@ -3748,6 +3748,7 @@ class SegmentService:
ChildChunk.segment_id == segment.id, ChildChunk.segment_id == segment.id,
) )
) )
assert current_user.current_tenant_id
child_chunk = ChildChunk( child_chunk = ChildChunk(
tenant_id=current_user.current_tenant_id, tenant_id=current_user.current_tenant_id,
dataset_id=dataset.id, dataset_id=dataset.id,
@ -3758,7 +3759,7 @@ class SegmentService:
index_node_hash=index_node_hash, index_node_hash=index_node_hash,
content=content, content=content,
word_count=len(content), word_count=len(content),
type="customized", type=SegmentType.CUSTOMIZED,
created_by=current_user.id, created_by=current_user.id,
) )
db.session.add(child_chunk) db.session.add(child_chunk)
@ -3818,6 +3819,7 @@ class SegmentService:
if new_child_chunks_args: if new_child_chunks_args:
child_chunk_count = len(child_chunks) child_chunk_count = len(child_chunks)
for position, args in enumerate(new_child_chunks_args, start=child_chunk_count + 1): for position, args in enumerate(new_child_chunks_args, start=child_chunk_count + 1):
assert current_user.current_tenant_id
index_node_id = str(uuid.uuid4()) index_node_id = str(uuid.uuid4())
index_node_hash = helper.generate_text_hash(args.content) index_node_hash = helper.generate_text_hash(args.content)
child_chunk = ChildChunk( child_chunk = ChildChunk(
@ -3830,7 +3832,7 @@ class SegmentService:
index_node_hash=index_node_hash, index_node_hash=index_node_hash,
content=args.content, content=args.content,
word_count=len(args.content), word_count=len(args.content),
type="customized", type=SegmentType.CUSTOMIZED,
created_by=current_user.id, created_by=current_user.id,
) )

View File

@ -290,7 +290,7 @@ class FeatureService:
def _fulfill_params_from_billing_api(cls, features: FeatureModel, tenant_id: str): def _fulfill_params_from_billing_api(cls, features: FeatureModel, tenant_id: str):
billing_info = BillingService.get_info(tenant_id) billing_info = BillingService.get_info(tenant_id)
features_usage_info = BillingService.get_tenant_feature_plan_usage_info(tenant_id) features_usage_info = BillingService.get_quota_info(tenant_id)
features.billing.enabled = billing_info["enabled"] features.billing.enabled = billing_info["enabled"]
features.billing.subscription.plan = billing_info["subscription"]["plan"] features.billing.subscription.plan = billing_info["subscription"]["plan"]

View File

@ -0,0 +1,233 @@
from __future__ import annotations
import logging
import uuid
from dataclasses import dataclass, field
from typing import TYPE_CHECKING
from configs import dify_config
if TYPE_CHECKING:
from enums.quota_type import QuotaType
logger = logging.getLogger(__name__)
@dataclass
class QuotaCharge:
"""
Result of a quota reservation (Reserve phase).
Lifecycle:
charge = QuotaService.consume(QuotaType.TRIGGER, tenant_id)
try:
do_work()
charge.commit() # Confirm consumption
except:
charge.refund() # Release frozen quota
If neither commit() nor refund() is called, the billing system's
cleanup CronJob will auto-release the reservation within ~75 seconds.
"""
success: bool
charge_id: str | None # reservation_id
_quota_type: QuotaType
_tenant_id: str | None = None
_feature_key: str | None = None
_amount: int = 0
_committed: bool = field(default=False, repr=False)
def commit(self, actual_amount: int | None = None) -> None:
"""
Confirm the consumption with actual amount.
Args:
actual_amount: Actual amount consumed. Defaults to the reserved amount.
If less than reserved, the difference is refunded automatically.
"""
if self._committed or not self.charge_id or not self._tenant_id or not self._feature_key:
return
try:
from services.billing_service import BillingService
amount = actual_amount if actual_amount is not None else self._amount
BillingService.quota_commit(
tenant_id=self._tenant_id,
feature_key=self._feature_key,
reservation_id=self.charge_id,
actual_amount=amount,
)
self._committed = True
logger.debug(
"Committed %s quota for tenant %s, reservation_id: %s, amount: %d",
self._quota_type,
self._tenant_id,
self.charge_id,
amount,
)
except Exception:
logger.exception("Failed to commit quota, reservation_id: %s", self.charge_id)
def refund(self) -> None:
"""
Release the reserved quota (cancel the charge).
Safe to call even if:
- charge failed or was disabled (charge_id is None)
- already committed (Release after Commit is a no-op)
- already refunded (idempotent)
This method guarantees no exceptions will be raised.
"""
if not self.charge_id or not self._tenant_id or not self._feature_key:
return
QuotaService.release(self._quota_type, self.charge_id, self._tenant_id, self._feature_key)
def unlimited() -> QuotaCharge:
from enums.quota_type import QuotaType
return QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.UNLIMITED)
class QuotaService:
"""Orchestrates quota reserve / commit / release lifecycle via BillingService."""
@staticmethod
def consume(quota_type: QuotaType, tenant_id: str, amount: int = 1) -> QuotaCharge:
"""
Reserve + immediate Commit (one-shot mode).
The returned QuotaCharge supports .refund() which calls Release.
For two-phase usage (e.g. streaming), use reserve() directly.
"""
charge = QuotaService.reserve(quota_type, tenant_id, amount)
if charge.success and charge.charge_id:
charge.commit()
return charge
@staticmethod
def reserve(quota_type: QuotaType, tenant_id: str, amount: int = 1) -> QuotaCharge:
"""
Reserve quota before task execution (Reserve phase only).
The caller MUST call charge.commit() after the task succeeds,
or charge.refund() if the task fails.
Raises:
QuotaExceededError: When quota is insufficient
"""
from services.billing_service import BillingService
from services.errors.app import QuotaExceededError
if not dify_config.BILLING_ENABLED:
logger.debug("Billing disabled, allowing request for %s", tenant_id)
return QuotaCharge(success=True, charge_id=None, _quota_type=quota_type)
logger.info("Reserving %d %s quota for tenant %s", amount, quota_type.value, tenant_id)
if amount <= 0:
raise ValueError("Amount to reserve must be greater than 0")
request_id = str(uuid.uuid4())
feature_key = quota_type.billing_key
try:
reserve_resp = BillingService.quota_reserve(
tenant_id=tenant_id,
feature_key=feature_key,
request_id=request_id,
amount=amount,
)
reservation_id = reserve_resp.get("reservation_id")
if not reservation_id:
logger.warning(
"Reserve returned no reservation_id for %s, feature %s, response: %s",
tenant_id,
quota_type.value,
reserve_resp,
)
raise QuotaExceededError(feature=quota_type.value, tenant_id=tenant_id, required=amount)
logger.debug(
"Reserved %d %s quota for tenant %s, reservation_id: %s",
amount,
quota_type.value,
tenant_id,
reservation_id,
)
return QuotaCharge(
success=True,
charge_id=reservation_id,
_quota_type=quota_type,
_tenant_id=tenant_id,
_feature_key=feature_key,
_amount=amount,
)
except QuotaExceededError:
raise
except ValueError:
raise
except Exception:
logger.exception("Failed to reserve quota for %s, feature %s", tenant_id, quota_type.value)
return unlimited()
@staticmethod
def check(quota_type: QuotaType, tenant_id: str, amount: int = 1) -> bool:
if not dify_config.BILLING_ENABLED:
return True
if amount <= 0:
raise ValueError("Amount to check must be greater than 0")
try:
remaining = QuotaService.get_remaining(quota_type, tenant_id)
return remaining >= amount if remaining != -1 else True
except Exception:
logger.exception("Failed to check quota for %s, feature %s", tenant_id, quota_type.value)
return True
@staticmethod
def release(quota_type: QuotaType, reservation_id: str, tenant_id: str, feature_key: str) -> None:
"""Release a reservation. Guarantees no exceptions."""
try:
from services.billing_service import BillingService
if not dify_config.BILLING_ENABLED:
return
if not reservation_id:
return
logger.info("Releasing %s quota, reservation_id: %s", quota_type.value, reservation_id)
BillingService.quota_release(
tenant_id=tenant_id,
feature_key=feature_key,
reservation_id=reservation_id,
)
except Exception:
logger.exception("Failed to release quota, reservation_id: %s", reservation_id)
@staticmethod
def get_remaining(quota_type: QuotaType, tenant_id: str) -> int:
from services.billing_service import BillingService
try:
usage_info = BillingService.get_quota_info(tenant_id)
if isinstance(usage_info, dict):
feature_info = usage_info.get(quota_type.billing_key, {})
if isinstance(feature_info, dict):
limit = feature_info.get("limit", 0)
usage = feature_info.get("usage", 0)
if limit == -1:
return -1
return max(0, limit - usage)
return 0
except Exception:
logger.exception("Failed to get remaining quota for %s, feature %s", tenant_id, quota_type.value)
return -1

View File

@ -38,6 +38,7 @@ from models.workflow import Workflow
from services.async_workflow_service import AsyncWorkflowService from services.async_workflow_service import AsyncWorkflowService
from services.end_user_service import EndUserService from services.end_user_service import EndUserService
from services.errors.app import QuotaExceededError from services.errors.app import QuotaExceededError
from services.quota_service import QuotaService
from services.trigger.app_trigger_service import AppTriggerService from services.trigger.app_trigger_service import AppTriggerService
from services.workflow.entities import WebhookTriggerData from services.workflow.entities import WebhookTriggerData
@ -798,45 +799,47 @@ class WebhookService:
Exception: If workflow execution fails Exception: If workflow execution fails
""" """
try: try:
with Session(db.engine) as session: workflow_inputs = cls.build_workflow_inputs(webhook_data)
# Prepare inputs for the webhook node
# The webhook node expects webhook_data in the inputs
workflow_inputs = cls.build_workflow_inputs(webhook_data)
# Create trigger data trigger_data = WebhookTriggerData(
trigger_data = WebhookTriggerData( app_id=webhook_trigger.app_id,
app_id=webhook_trigger.app_id, workflow_id=workflow.id,
workflow_id=workflow.id, root_node_id=webhook_trigger.node_id,
root_node_id=webhook_trigger.node_id, # Start from the webhook node inputs=workflow_inputs,
inputs=workflow_inputs, tenant_id=webhook_trigger.tenant_id,
tenant_id=webhook_trigger.tenant_id, )
end_user = EndUserService.get_or_create_end_user_by_type(
type=InvokeFrom.TRIGGER,
tenant_id=webhook_trigger.tenant_id,
app_id=webhook_trigger.app_id,
user_id=None,
)
try:
quota_charge = QuotaService.reserve(QuotaType.TRIGGER, webhook_trigger.tenant_id)
except QuotaExceededError:
AppTriggerService.mark_tenant_triggers_rate_limited(webhook_trigger.tenant_id)
logger.info(
"Tenant %s rate limited, skipping webhook trigger %s",
webhook_trigger.tenant_id,
webhook_trigger.webhook_id,
) )
raise
end_user = EndUserService.get_or_create_end_user_by_type( try:
type=InvokeFrom.TRIGGER, # NOTE: don not use `with sessionmaker(bind=db.engine, expire_on_commit=False).begin()`
tenant_id=webhook_trigger.tenant_id, # trigger_workflow_async need to handle multipe session commits internally
app_id=webhook_trigger.app_id, with Session(db.engine, expire_on_commit=False) as session:
user_id=None, AsyncWorkflowService.trigger_workflow_async(
) session,
end_user,
# consume quota before triggering workflow execution trigger_data,
try:
QuotaType.TRIGGER.consume(webhook_trigger.tenant_id)
except QuotaExceededError:
AppTriggerService.mark_tenant_triggers_rate_limited(webhook_trigger.tenant_id)
logger.info(
"Tenant %s rate limited, skipping webhook trigger %s",
webhook_trigger.tenant_id,
webhook_trigger.webhook_id,
) )
raise quota_charge.commit()
except Exception:
# Trigger workflow execution asynchronously quota_charge.refund()
AsyncWorkflowService.trigger_workflow_async( raise
session,
end_user,
trigger_data,
)
except Exception: except Exception:
logger.exception("Failed to trigger workflow for webhook %s", webhook_trigger.webhook_id) logger.exception("Failed to trigger workflow for webhook %s", webhook_trigger.webhook_id)

View File

@ -16,6 +16,7 @@ from graphon.model_runtime.entities.model_entities import ModelType
from models import UploadFile from models import UploadFile
from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegment, SegmentAttachmentBinding from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegment, SegmentAttachmentBinding
from models.dataset import Document as DatasetDocument from models.dataset import Document as DatasetDocument
from models.enums import SegmentType
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -178,7 +179,7 @@ class VectorService:
index_node_hash=child_chunk.metadata["doc_hash"], index_node_hash=child_chunk.metadata["doc_hash"],
content=child_chunk.page_content, content=child_chunk.page_content,
word_count=len(child_chunk.page_content), word_count=len(child_chunk.page_content),
type="automatic", type=SegmentType.AUTOMATIC,
created_by=dataset_document.created_by, created_by=dataset_document.created_by,
) )
db.session.add(child_segment) db.session.add(child_segment)
@ -222,6 +223,7 @@ class VectorService:
) )
documents.append(new_child_document) documents.append(new_child_document)
for update_child_chunk in update_child_chunks: for update_child_chunk in update_child_chunks:
assert update_child_chunk.index_node_id
child_document = Document( child_document = Document(
page_content=update_child_chunk.content, page_content=update_child_chunk.content,
metadata={ metadata={
@ -234,6 +236,7 @@ class VectorService:
documents.append(child_document) documents.append(child_document)
delete_node_ids.append(update_child_chunk.index_node_id) delete_node_ids.append(update_child_chunk.index_node_id)
for delete_child_chunk in delete_child_chunks: for delete_child_chunk in delete_child_chunks:
assert delete_child_chunk.index_node_id
delete_node_ids.append(delete_child_chunk.index_node_id) delete_node_ids.append(delete_child_chunk.index_node_id)
if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY: if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY:
# update vector index # update vector index
@ -246,6 +249,7 @@ class VectorService:
@classmethod @classmethod
def delete_child_chunk_vector(cls, child_chunk: ChildChunk, dataset: Dataset): def delete_child_chunk_vector(cls, child_chunk: ChildChunk, dataset: Dataset):
vector = Vector(dataset=dataset) vector = Vector(dataset=dataset)
assert child_chunk.index_node_id
vector.delete_by_ids([child_chunk.index_node_id]) vector.delete_by_ids([child_chunk.index_node_id])
@classmethod @classmethod

View File

@ -156,11 +156,18 @@ class WorkflowService:
# return draft workflow # return draft workflow
return workflow return workflow
def get_published_workflow_by_id(self, app_model: App, workflow_id: str) -> Workflow | None: def get_published_workflow_by_id(
self, app_model: App, workflow_id: str, session: Session | None = None
) -> Workflow | None:
""" """
fetch published workflow by workflow_id fetch published workflow by workflow_id
When ``session`` is provided, reuse it so callers that already hold a
Session avoid checking out an extra request-scoped ``db.session``
connection. Falls back to ``db.session`` for backward compatibility.
""" """
workflow = db.session.scalar( bind = session if session is not None else db.session
workflow = bind.scalar(
select(Workflow) select(Workflow)
.where( .where(
Workflow.tenant_id == app_model.tenant_id, Workflow.tenant_id == app_model.tenant_id,
@ -178,16 +185,20 @@ class WorkflowService:
) )
return workflow return workflow
def get_published_workflow(self, app_model: App) -> Workflow | None: def get_published_workflow(self, app_model: App, session: Session | None = None) -> Workflow | None:
""" """
Get published workflow Get published workflow
When ``session`` is provided, reuse it so callers that already hold a
Session avoid checking out an extra request-scoped ``db.session``
connection. Falls back to ``db.session`` for backward compatibility.
""" """
if not app_model.workflow_id: if not app_model.workflow_id:
return None return None
# fetch published workflow by workflow_id bind = session if session is not None else db.session
workflow = db.session.scalar( workflow = bind.scalar(
select(Workflow) select(Workflow)
.where( .where(
Workflow.tenant_id == app_model.tenant_id, Workflow.tenant_id == app_model.tenant_id,

View File

@ -27,7 +27,7 @@ from core.trigger.entities.entities import TriggerProviderEntity
from core.trigger.provider import PluginTriggerProviderController from core.trigger.provider import PluginTriggerProviderController
from core.trigger.trigger_manager import TriggerManager from core.trigger.trigger_manager import TriggerManager
from core.workflow.nodes.trigger_plugin.entities import TriggerEventNodeData from core.workflow.nodes.trigger_plugin.entities import TriggerEventNodeData
from enums.quota_type import QuotaType, unlimited from enums.quota_type import QuotaType
from graphon.enums import WorkflowExecutionStatus from graphon.enums import WorkflowExecutionStatus
from models.enums import ( from models.enums import (
AppTriggerType, AppTriggerType,
@ -42,6 +42,7 @@ from models.workflow import Workflow, WorkflowAppLog, WorkflowAppLogCreatedFrom,
from services.async_workflow_service import AsyncWorkflowService from services.async_workflow_service import AsyncWorkflowService
from services.end_user_service import EndUserService from services.end_user_service import EndUserService
from services.errors.app import QuotaExceededError from services.errors.app import QuotaExceededError
from services.quota_service import QuotaService, unlimited
from services.trigger.app_trigger_service import AppTriggerService from services.trigger.app_trigger_service import AppTriggerService
from services.trigger.trigger_provider_service import TriggerProviderService from services.trigger.trigger_provider_service import TriggerProviderService
from services.trigger.trigger_request_service import TriggerHttpRequestCachingService from services.trigger.trigger_request_service import TriggerHttpRequestCachingService
@ -258,59 +259,58 @@ def dispatch_triggered_workflow(
tenant_id=subscription.tenant_id, provider_id=TriggerProviderID(subscription.provider_id) tenant_id=subscription.tenant_id, provider_id=TriggerProviderID(subscription.provider_id)
) )
trigger_entity: TriggerProviderEntity = provider_controller.entity trigger_entity: TriggerProviderEntity = provider_controller.entity
# Ensure expire_on_commit is set to False to remain workflows available
with session_factory.create_session() as session: with session_factory.create_session() as session:
workflows: Mapping[str, Workflow] = _get_latest_workflows_by_app_ids(session, subscribers) workflows: Mapping[str, Workflow] = _get_latest_workflows_by_app_ids(session, subscribers)
end_users: Mapping[str, EndUser] = EndUserService.create_end_user_batch( end_users: Mapping[str, EndUser] = EndUserService.create_end_user_batch(
type=InvokeFrom.TRIGGER, type=InvokeFrom.TRIGGER,
tenant_id=subscription.tenant_id, tenant_id=subscription.tenant_id,
app_ids=[plugin_trigger.app_id for plugin_trigger in subscribers], app_ids=[plugin_trigger.app_id for plugin_trigger in subscribers],
user_id=user_id, user_id=user_id,
) )
for plugin_trigger in subscribers:
# Get workflow from mapping
workflow: Workflow | None = workflows.get(plugin_trigger.app_id)
if not workflow:
logger.error(
"Workflow not found for app %s",
plugin_trigger.app_id,
)
continue
# Find the trigger node in the workflow for plugin_trigger in subscribers:
event_node = None workflow: Workflow | None = workflows.get(plugin_trigger.app_id)
for node_id, node_config in workflow.walk_nodes(TRIGGER_PLUGIN_NODE_TYPE): if not workflow:
if node_id == plugin_trigger.node_id: logger.error(
event_node = node_config "Workflow not found for app %s",
break plugin_trigger.app_id,
if not event_node:
logger.error("Trigger event node not found for app %s", plugin_trigger.app_id)
continue
# invoke trigger
trigger_metadata = PluginTriggerMetadata(
plugin_unique_identifier=provider_controller.plugin_unique_identifier or "",
endpoint_id=subscription.endpoint_id,
provider_id=subscription.provider_id,
event_name=event_name,
icon_filename=trigger_entity.identity.icon or "",
icon_dark_filename=trigger_entity.identity.icon_dark or "",
) )
continue
# consume quota before invoking trigger event_node = None
quota_charge = unlimited() for node_id, node_config in workflow.walk_nodes(TRIGGER_PLUGIN_NODE_TYPE):
try: if node_id == plugin_trigger.node_id:
quota_charge = QuotaType.TRIGGER.consume(subscription.tenant_id) event_node = node_config
except QuotaExceededError: break
AppTriggerService.mark_tenant_triggers_rate_limited(subscription.tenant_id)
logger.info(
"Tenant %s rate limited, skipping plugin trigger %s", subscription.tenant_id, plugin_trigger.id
)
return 0
node_data: TriggerEventNodeData = TriggerEventNodeData.model_validate(event_node) if not event_node:
invoke_response: TriggerInvokeEventResponse | None = None logger.error("Trigger event node not found for app %s", plugin_trigger.app_id)
continue
trigger_metadata = PluginTriggerMetadata(
plugin_unique_identifier=provider_controller.plugin_unique_identifier or "",
endpoint_id=subscription.endpoint_id,
provider_id=subscription.provider_id,
event_name=event_name,
icon_filename=trigger_entity.identity.icon or "",
icon_dark_filename=trigger_entity.identity.icon_dark or "",
)
quota_charge = unlimited()
try:
quota_charge = QuotaService.reserve(QuotaType.TRIGGER, subscription.tenant_id)
except QuotaExceededError:
AppTriggerService.mark_tenant_triggers_rate_limited(subscription.tenant_id)
logger.info("Tenant %s rate limited, skipping plugin trigger %s", subscription.tenant_id, plugin_trigger.id)
return dispatched_count
node_data: TriggerEventNodeData = TriggerEventNodeData.model_validate(event_node)
invoke_response: TriggerInvokeEventResponse | None = None
with session_factory.create_session() as session:
try: try:
invoke_response = TriggerManager.invoke_trigger_event( invoke_response = TriggerManager.invoke_trigger_event(
tenant_id=subscription.tenant_id, tenant_id=subscription.tenant_id,
@ -387,6 +387,7 @@ def dispatch_triggered_workflow(
raise ValueError(f"End user not found for app {plugin_trigger.app_id}") raise ValueError(f"End user not found for app {plugin_trigger.app_id}")
AsyncWorkflowService.trigger_workflow_async(session=session, user=end_user, trigger_data=trigger_data) AsyncWorkflowService.trigger_workflow_async(session=session, user=end_user, trigger_data=trigger_data)
quota_charge.commit()
dispatched_count += 1 dispatched_count += 1
logger.info( logger.info(
"Triggered workflow for app %s with trigger event %s", "Triggered workflow for app %s with trigger event %s",
@ -401,7 +402,7 @@ def dispatch_triggered_workflow(
plugin_trigger.app_id, plugin_trigger.app_id,
) )
return dispatched_count return dispatched_count
def dispatch_triggered_workflows( def dispatch_triggered_workflows(

View File

@ -8,10 +8,11 @@ from core.workflow.nodes.trigger_schedule.exc import (
ScheduleNotFoundError, ScheduleNotFoundError,
TenantOwnerNotFoundError, TenantOwnerNotFoundError,
) )
from enums.quota_type import QuotaType, unlimited from enums.quota_type import QuotaType
from models.trigger import WorkflowSchedulePlan from models.trigger import WorkflowSchedulePlan
from services.async_workflow_service import AsyncWorkflowService from services.async_workflow_service import AsyncWorkflowService
from services.errors.app import QuotaExceededError from services.errors.app import QuotaExceededError
from services.quota_service import QuotaService, unlimited
from services.trigger.app_trigger_service import AppTriggerService from services.trigger.app_trigger_service import AppTriggerService
from services.trigger.schedule_service import ScheduleService from services.trigger.schedule_service import ScheduleService
from services.workflow.entities import ScheduleTriggerData from services.workflow.entities import ScheduleTriggerData
@ -32,6 +33,7 @@ def run_schedule_trigger(schedule_id: str) -> None:
TenantOwnerNotFoundError: If no owner/admin for tenant TenantOwnerNotFoundError: If no owner/admin for tenant
ScheduleExecutionError: If workflow trigger fails ScheduleExecutionError: If workflow trigger fails
""" """
# Ensure expire_on_commit is set to False to remain schedule/tenant_owner available
with session_factory.create_session() as session: with session_factory.create_session() as session:
schedule = session.get(WorkflowSchedulePlan, schedule_id) schedule = session.get(WorkflowSchedulePlan, schedule_id)
if not schedule: if not schedule:
@ -41,16 +43,16 @@ def run_schedule_trigger(schedule_id: str) -> None:
if not tenant_owner: if not tenant_owner:
raise TenantOwnerNotFoundError(f"No owner or admin found for tenant {schedule.tenant_id}") raise TenantOwnerNotFoundError(f"No owner or admin found for tenant {schedule.tenant_id}")
quota_charge = unlimited() quota_charge = unlimited()
try: try:
quota_charge = QuotaType.TRIGGER.consume(schedule.tenant_id) quota_charge = QuotaService.reserve(QuotaType.TRIGGER, schedule.tenant_id)
except QuotaExceededError: except QuotaExceededError:
AppTriggerService.mark_tenant_triggers_rate_limited(schedule.tenant_id) AppTriggerService.mark_tenant_triggers_rate_limited(schedule.tenant_id)
logger.info("Tenant %s rate limited, skipping schedule trigger %s", schedule.tenant_id, schedule_id) logger.info("Tenant %s rate limited, skipping schedule trigger %s", schedule.tenant_id, schedule_id)
return return
try: try:
# Production dispatch: Trigger the workflow normally with session_factory.create_session() as session:
response = AsyncWorkflowService.trigger_workflow_async( response = AsyncWorkflowService.trigger_workflow_async(
session=session, session=session,
user=tenant_owner, user=tenant_owner,
@ -61,9 +63,10 @@ def run_schedule_trigger(schedule_id: str) -> None:
tenant_id=schedule.tenant_id, tenant_id=schedule.tenant_id,
), ),
) )
logger.info("Schedule %s triggered workflow: %s", schedule_id, response.workflow_trigger_log_id) quota_charge.commit()
except Exception as e: logger.info("Schedule %s triggered workflow: %s", schedule_id, response.workflow_trigger_log_id)
quota_charge.refund() except Exception as e:
raise ScheduleExecutionError( quota_charge.refund()
f"Failed to trigger workflow for schedule {schedule_id}, app {schedule.app_id}" raise ScheduleExecutionError(
) from e f"Failed to trigger workflow for schedule {schedule_id}, app {schedule.app_id}"
) from e

View File

@ -36,12 +36,19 @@ class TestAppGenerateService:
) as mock_message_based_generator, ) as mock_message_based_generator,
patch("services.account_service.FeatureService", autospec=True) as mock_account_feature_service, patch("services.account_service.FeatureService", autospec=True) as mock_account_feature_service,
patch("services.app_generate_service.dify_config", autospec=True) as mock_dify_config, patch("services.app_generate_service.dify_config", autospec=True) as mock_dify_config,
patch("services.quota_service.dify_config", autospec=True) as mock_quota_dify_config,
patch("configs.dify_config", autospec=True) as mock_global_dify_config, patch("configs.dify_config", autospec=True) as mock_global_dify_config,
): ):
# Setup default mock returns for billing service # Setup default mock returns for billing service
mock_billing_service.update_tenant_feature_plan_usage.return_value = { mock_billing_service.quota_reserve.return_value = {
"result": "success", "reservation_id": "test-reservation-id",
"history_id": "test_history_id", "available": 100,
"reserved": 1,
}
mock_billing_service.quota_commit.return_value = {
"available": 99,
"reserved": 0,
"refunded": 0,
} }
# Setup default mock returns for workflow service # Setup default mock returns for workflow service
@ -101,6 +108,8 @@ class TestAppGenerateService:
mock_dify_config.APP_DEFAULT_ACTIVE_REQUESTS = 100 mock_dify_config.APP_DEFAULT_ACTIVE_REQUESTS = 100
mock_dify_config.APP_DAILY_RATE_LIMIT = 1000 mock_dify_config.APP_DAILY_RATE_LIMIT = 1000
mock_quota_dify_config.BILLING_ENABLED = False
mock_global_dify_config.BILLING_ENABLED = False mock_global_dify_config.BILLING_ENABLED = False
mock_global_dify_config.APP_MAX_ACTIVE_REQUESTS = 100 mock_global_dify_config.APP_MAX_ACTIVE_REQUESTS = 100
mock_global_dify_config.APP_DAILY_RATE_LIMIT = 1000 mock_global_dify_config.APP_DAILY_RATE_LIMIT = 1000
@ -118,6 +127,7 @@ class TestAppGenerateService:
"message_based_generator": mock_message_based_generator, "message_based_generator": mock_message_based_generator,
"account_feature_service": mock_account_feature_service, "account_feature_service": mock_account_feature_service,
"dify_config": mock_dify_config, "dify_config": mock_dify_config,
"quota_dify_config": mock_quota_dify_config,
"global_dify_config": mock_global_dify_config, "global_dify_config": mock_global_dify_config,
} }
@ -465,6 +475,7 @@ class TestAppGenerateService:
# Set BILLING_ENABLED to True for this test # Set BILLING_ENABLED to True for this test
mock_external_service_dependencies["dify_config"].BILLING_ENABLED = True mock_external_service_dependencies["dify_config"].BILLING_ENABLED = True
mock_external_service_dependencies["quota_dify_config"].BILLING_ENABLED = True
mock_external_service_dependencies["global_dify_config"].BILLING_ENABLED = True mock_external_service_dependencies["global_dify_config"].BILLING_ENABLED = True
# Setup test arguments # Setup test arguments
@ -478,8 +489,10 @@ class TestAppGenerateService:
# Verify the result # Verify the result
assert result == ["test_response"] assert result == ["test_response"]
# Verify billing service was called to consume quota # Verify billing two-phase quota (reserve + commit)
mock_external_service_dependencies["billing_service"].update_tenant_feature_plan_usage.assert_called_once() billing = mock_external_service_dependencies["billing_service"]
billing.quota_reserve.assert_called_once()
billing.quota_commit.assert_called_once()
def test_generate_with_invalid_app_mode( def test_generate_with_invalid_app_mode(
self, db_session_with_containers: Session, mock_external_service_dependencies self, db_session_with_containers: Session, mock_external_service_dependencies

View File

@ -10,6 +10,7 @@ from sqlalchemy import select
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from core.trigger.constants import TRIGGER_WEBHOOK_NODE_TYPE from core.trigger.constants import TRIGGER_WEBHOOK_NODE_TYPE
from enums.quota_type import QuotaType
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.enums import AppTriggerStatus, AppTriggerType from models.enums import AppTriggerStatus, AppTriggerType
from models.model import App from models.model import App
@ -290,17 +291,26 @@ class TestWebhookServiceTriggerExecutionWithContainers:
end_user = SimpleNamespace(id=str(uuid4())) end_user = SimpleNamespace(id=str(uuid4()))
webhook_data = {"body": {"value": 1}, "headers": {}, "query_params": {}, "files": {}, "method": "POST"} webhook_data = {"body": {"value": 1}, "headers": {}, "query_params": {}, "files": {}, "method": "POST"}
quota_charge = MagicMock()
with ( with (
patch( patch(
"services.trigger.webhook_service.EndUserService.get_or_create_end_user_by_type", "services.trigger.webhook_service.EndUserService.get_or_create_end_user_by_type",
return_value=end_user, return_value=end_user,
), ),
patch("services.trigger.webhook_service.QuotaType.TRIGGER.consume") as mock_consume, patch(
"services.trigger.webhook_service.QuotaService.reserve",
return_value=quota_charge,
) as mock_reserve,
patch("services.trigger.webhook_service.AsyncWorkflowService.trigger_workflow_async") as mock_trigger, patch("services.trigger.webhook_service.AsyncWorkflowService.trigger_workflow_async") as mock_trigger,
): ):
WebhookService.trigger_workflow_execution(webhook_trigger, webhook_data, workflow) WebhookService.trigger_workflow_execution(webhook_trigger, webhook_data, workflow)
mock_consume.assert_called_once_with(webhook_trigger.tenant_id) mock_reserve.assert_called_once()
reserve_args = mock_reserve.call_args.args
assert reserve_args[0] == QuotaType.TRIGGER
assert reserve_args[1] == webhook_trigger.tenant_id
quota_charge.commit.assert_called_once()
mock_trigger.assert_called_once() mock_trigger.assert_called_once()
trigger_args = mock_trigger.call_args.args trigger_args = mock_trigger.call_args.args
assert trigger_args[1] is end_user assert trigger_args[1] is end_user
@ -327,7 +337,7 @@ class TestWebhookServiceTriggerExecutionWithContainers:
return_value=SimpleNamespace(id=str(uuid4())), return_value=SimpleNamespace(id=str(uuid4())),
), ),
patch( patch(
"services.trigger.webhook_service.QuotaType.TRIGGER.consume", "services.trigger.webhook_service.QuotaService.reserve",
side_effect=QuotaExceededError(feature="trigger", tenant_id=tenant.id, required=1), side_effect=QuotaExceededError(feature="trigger", tenant_id=tenant.id, required=1),
), ),
patch( patch(

View File

@ -605,9 +605,9 @@ def test_schedule_trigger_creates_trigger_log(
) )
# Mock quota to avoid rate limiting # Mock quota to avoid rate limiting
from enums import quota_type from services import quota_service
monkeypatch.setattr(quota_type.QuotaType.TRIGGER, "consume", lambda _tenant_id: quota_type.unlimited()) monkeypatch.setattr(quota_service.QuotaService, "reserve", lambda *_args, **_kwargs: quota_service.unlimited())
# Execute schedule trigger # Execute schedule trigger
workflow_schedule_tasks.run_schedule_trigger(plan.id) workflow_schedule_tasks.run_schedule_trigger(plan.id)

View File

View File

@ -0,0 +1,349 @@
"""Unit tests for QuotaType, QuotaService, and QuotaCharge."""
from unittest.mock import patch
import pytest
from enums.quota_type import QuotaType
from services.quota_service import QuotaCharge, QuotaService, unlimited
class TestQuotaType:
def test_billing_key_trigger(self):
assert QuotaType.TRIGGER.billing_key == "trigger_event"
def test_billing_key_workflow(self):
assert QuotaType.WORKFLOW.billing_key == "api_rate_limit"
def test_billing_key_unlimited_raises(self):
with pytest.raises(ValueError, match="Invalid quota type"):
_ = QuotaType.UNLIMITED.billing_key
class TestQuotaService:
def test_reserve_billing_disabled(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService"),
):
mock_cfg.BILLING_ENABLED = False
charge = QuotaService.reserve(QuotaType.TRIGGER, "t1")
assert charge.success is True
assert charge.charge_id is None
def test_reserve_zero_amount_raises(self):
with patch("services.quota_service.dify_config") as mock_cfg:
mock_cfg.BILLING_ENABLED = True
with pytest.raises(ValueError, match="greater than 0"):
QuotaService.reserve(QuotaType.TRIGGER, "t1", amount=0)
def test_reserve_success(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = True
mock_bs.quota_reserve.return_value = {"reservation_id": "rid-1", "available": 99}
charge = QuotaService.reserve(QuotaType.TRIGGER, "t1", amount=1)
assert charge.success is True
assert charge.charge_id == "rid-1"
assert charge._tenant_id == "t1"
assert charge._feature_key == "trigger_event"
assert charge._amount == 1
mock_bs.quota_reserve.assert_called_once()
def test_reserve_no_reservation_id_raises(self):
from services.errors.app import QuotaExceededError
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = True
mock_bs.quota_reserve.return_value = {}
with pytest.raises(QuotaExceededError):
QuotaService.reserve(QuotaType.TRIGGER, "t1")
def test_reserve_quota_exceeded_propagates(self):
from services.errors.app import QuotaExceededError
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = True
mock_bs.quota_reserve.side_effect = QuotaExceededError(feature="trigger", tenant_id="t1", required=1)
with pytest.raises(QuotaExceededError):
QuotaService.reserve(QuotaType.TRIGGER, "t1")
def test_reserve_api_exception_returns_unlimited(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = True
mock_bs.quota_reserve.side_effect = RuntimeError("network")
charge = QuotaService.reserve(QuotaType.TRIGGER, "t1")
assert charge.success is True
assert charge.charge_id is None
def test_consume_calls_reserve_and_commit(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = True
mock_bs.quota_reserve.return_value = {"reservation_id": "rid-c"}
mock_bs.quota_commit.return_value = {}
charge = QuotaService.consume(QuotaType.TRIGGER, "t1")
assert charge.success is True
mock_bs.quota_commit.assert_called_once()
def test_check_billing_disabled(self):
with patch("services.quota_service.dify_config") as mock_cfg:
mock_cfg.BILLING_ENABLED = False
assert QuotaService.check(QuotaType.TRIGGER, "t1") is True
def test_check_zero_amount_raises(self):
with patch("services.quota_service.dify_config") as mock_cfg:
mock_cfg.BILLING_ENABLED = True
with pytest.raises(ValueError, match="greater than 0"):
QuotaService.check(QuotaType.TRIGGER, "t1", amount=0)
def test_check_sufficient_quota(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch.object(QuotaService, "get_remaining", return_value=100),
):
mock_cfg.BILLING_ENABLED = True
assert QuotaService.check(QuotaType.TRIGGER, "t1", amount=50) is True
def test_check_insufficient_quota(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch.object(QuotaService, "get_remaining", return_value=5),
):
mock_cfg.BILLING_ENABLED = True
assert QuotaService.check(QuotaType.TRIGGER, "t1", amount=10) is False
def test_check_unlimited_quota(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch.object(QuotaService, "get_remaining", return_value=-1),
):
mock_cfg.BILLING_ENABLED = True
assert QuotaService.check(QuotaType.TRIGGER, "t1", amount=999) is True
def test_check_exception_returns_true(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch.object(QuotaService, "get_remaining", side_effect=RuntimeError),
):
mock_cfg.BILLING_ENABLED = True
assert QuotaService.check(QuotaType.TRIGGER, "t1") is True
def test_release_billing_disabled(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = False
QuotaService.release(QuotaType.TRIGGER, "rid-1", "t1", "trigger_event")
mock_bs.quota_release.assert_not_called()
def test_release_empty_reservation(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = True
QuotaService.release(QuotaType.TRIGGER, "", "t1", "trigger_event")
mock_bs.quota_release.assert_not_called()
def test_release_success(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = True
mock_bs.quota_release.return_value = {}
QuotaService.release(QuotaType.TRIGGER, "rid-1", "t1", "trigger_event")
mock_bs.quota_release.assert_called_once_with(
tenant_id="t1", feature_key="trigger_event", reservation_id="rid-1"
)
def test_release_exception_swallowed(self):
with (
patch("services.quota_service.dify_config") as mock_cfg,
patch("services.billing_service.BillingService") as mock_bs,
):
mock_cfg.BILLING_ENABLED = True
mock_bs.quota_release.side_effect = RuntimeError("fail")
QuotaService.release(QuotaType.TRIGGER, "rid-1", "t1", "trigger_event")
def test_get_remaining_normal(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.get_quota_info.return_value = {"trigger_event": {"limit": 100, "usage": 30}}
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 70
def test_get_remaining_unlimited(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.get_quota_info.return_value = {"trigger_event": {"limit": -1, "usage": 0}}
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == -1
def test_get_remaining_over_limit_returns_zero(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.get_quota_info.return_value = {"trigger_event": {"limit": 10, "usage": 15}}
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 0
def test_get_remaining_exception_returns_neg1(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.get_quota_info.side_effect = RuntimeError
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == -1
def test_get_remaining_empty_response(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.get_quota_info.return_value = {}
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 0
def test_get_remaining_non_dict_response(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.get_quota_info.return_value = "invalid"
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 0
def test_get_remaining_feature_not_in_response(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.get_quota_info.return_value = {"other_feature": {"limit": 100, "usage": 0}}
remaining = QuotaService.get_remaining(QuotaType.TRIGGER, "t1")
assert remaining == 0
def test_get_remaining_non_dict_feature_info(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.get_quota_info.return_value = {"trigger_event": "not_a_dict"}
assert QuotaService.get_remaining(QuotaType.TRIGGER, "t1") == 0
class TestQuotaCharge:
def test_commit_success(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.quota_commit.return_value = {}
charge = QuotaCharge(
success=True,
charge_id="rid-1",
_quota_type=QuotaType.TRIGGER,
_tenant_id="t1",
_feature_key="trigger_event",
_amount=1,
)
charge.commit()
mock_bs.quota_commit.assert_called_once_with(
tenant_id="t1",
feature_key="trigger_event",
reservation_id="rid-1",
actual_amount=1,
)
assert charge._committed is True
def test_commit_with_actual_amount(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.quota_commit.return_value = {}
charge = QuotaCharge(
success=True,
charge_id="rid-1",
_quota_type=QuotaType.TRIGGER,
_tenant_id="t1",
_feature_key="trigger_event",
_amount=10,
)
charge.commit(actual_amount=5)
call_kwargs = mock_bs.quota_commit.call_args[1]
assert call_kwargs["actual_amount"] == 5
def test_commit_idempotent(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.quota_commit.return_value = {}
charge = QuotaCharge(
success=True,
charge_id="rid-1",
_quota_type=QuotaType.TRIGGER,
_tenant_id="t1",
_feature_key="trigger_event",
_amount=1,
)
charge.commit()
charge.commit()
assert mock_bs.quota_commit.call_count == 1
def test_commit_no_charge_id_noop(self):
with patch("services.billing_service.BillingService") as mock_bs:
charge = QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.TRIGGER)
charge.commit()
mock_bs.quota_commit.assert_not_called()
def test_commit_no_tenant_id_noop(self):
with patch("services.billing_service.BillingService") as mock_bs:
charge = QuotaCharge(
success=True,
charge_id="rid-1",
_quota_type=QuotaType.TRIGGER,
_tenant_id=None,
_feature_key="trigger_event",
)
charge.commit()
mock_bs.quota_commit.assert_not_called()
def test_commit_exception_swallowed(self):
with patch("services.billing_service.BillingService") as mock_bs:
mock_bs.quota_commit.side_effect = RuntimeError("fail")
charge = QuotaCharge(
success=True,
charge_id="rid-1",
_quota_type=QuotaType.TRIGGER,
_tenant_id="t1",
_feature_key="trigger_event",
_amount=1,
)
charge.commit()
def test_refund_success(self):
with patch.object(QuotaService, "release") as mock_rel:
charge = QuotaCharge(
success=True,
charge_id="rid-1",
_quota_type=QuotaType.TRIGGER,
_tenant_id="t1",
_feature_key="trigger_event",
)
charge.refund()
mock_rel.assert_called_once_with(QuotaType.TRIGGER, "rid-1", "t1", "trigger_event")
def test_refund_no_charge_id_noop(self):
with patch.object(QuotaService, "release") as mock_rel:
charge = QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.TRIGGER)
charge.refund()
mock_rel.assert_not_called()
def test_refund_no_tenant_id_noop(self):
with patch.object(QuotaService, "release") as mock_rel:
charge = QuotaCharge(
success=True,
charge_id="rid-1",
_quota_type=QuotaType.TRIGGER,
_tenant_id=None,
)
charge.refund()
mock_rel.assert_not_called()
class TestUnlimited:
def test_unlimited_returns_success_with_no_charge_id(self):
charge = unlimited()
assert charge.success is True
assert charge.charge_id is None
assert charge._quota_type == QuotaType.UNLIMITED

View File

@ -711,6 +711,8 @@ class TestMessageAnnotation:
annotation = MessageAnnotation( annotation = MessageAnnotation(
app_id=app_id, app_id=app_id,
question="What is AI?", question="What is AI?",
conversation_id=None,
message_id=None,
content="AI stands for Artificial Intelligence.", content="AI stands for Artificial Intelligence.",
account_id=account_id, account_id=account_id,
) )
@ -728,6 +730,8 @@ class TestMessageAnnotation:
annotation = MessageAnnotation( annotation = MessageAnnotation(
app_id=str(uuid4()), app_id=str(uuid4()),
question="Test question", question="Test question",
conversation_id=None,
message_id=None,
content="Test content", content="Test content",
account_id=str(uuid4()), account_id=str(uuid4()),
) )
@ -1068,6 +1072,8 @@ class TestModelIntegration:
app_id=app_id, app_id=app_id,
question="What is AI?", question="What is AI?",
content="AI stands for Artificial Intelligence.", content="AI stands for Artificial Intelligence.",
conversation_id=None,
message_id=message_id,
account_id=account_id, account_id=account_id,
) )
annotation.id = annotation_id annotation.id = annotation_id

View File

@ -365,7 +365,6 @@ def _make_segment(
def _make_child_chunk() -> ChildChunk: def _make_child_chunk() -> ChildChunk:
return ChildChunk( return ChildChunk(
id="child-a",
tenant_id="tenant-1", tenant_id="tenant-1",
dataset_id="dataset-1", dataset_id="dataset-1",
document_id="doc-1", document_id="doc-1",

View File

@ -238,6 +238,8 @@ class TestAppAnnotationServiceUpInsert:
assert result == annotation_instance assert result == annotation_instance
mock_cls.assert_called_once_with( mock_cls.assert_called_once_with(
app_id=app.id, app_id=app.id,
conversation_id=None,
message_id=None,
content="hello", content="hello",
question="q1", question="q1",
account_id=current_user.id, account_id=current_user.id,

View File

@ -23,6 +23,7 @@ import pytest
import services.app_generate_service as ags_module import services.app_generate_service as ags_module
from core.app.entities.app_invoke_entities import InvokeFrom from core.app.entities.app_invoke_entities import InvokeFrom
from enums.quota_type import QuotaType
from models.model import AppMode from models.model import AppMode
from services.app_generate_service import AppGenerateService from services.app_generate_service import AppGenerateService
from services.errors.app import WorkflowIdFormatError, WorkflowNotFoundError from services.errors.app import WorkflowIdFormatError, WorkflowNotFoundError
@ -448,8 +449,8 @@ class TestGenerateBilling:
def test_billing_enabled_consumes_quota(self, mocker, monkeypatch): def test_billing_enabled_consumes_quota(self, mocker, monkeypatch):
monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True) monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True)
quota_charge = MagicMock() quota_charge = MagicMock()
consume_mock = mocker.patch( reserve_mock = mocker.patch(
"services.app_generate_service.QuotaType.WORKFLOW.consume", "services.app_generate_service.QuotaService.reserve",
return_value=quota_charge, return_value=quota_charge,
) )
mocker.patch( mocker.patch(
@ -468,7 +469,8 @@ class TestGenerateBilling:
invoke_from=InvokeFrom.SERVICE_API, invoke_from=InvokeFrom.SERVICE_API,
streaming=False, streaming=False,
) )
consume_mock.assert_called_once_with("tenant-id") reserve_mock.assert_called_once_with(QuotaType.WORKFLOW, "tenant-id")
quota_charge.commit.assert_called_once()
def test_billing_quota_exceeded_raises_rate_limit_error(self, mocker, monkeypatch): def test_billing_quota_exceeded_raises_rate_limit_error(self, mocker, monkeypatch):
from services.errors.app import QuotaExceededError from services.errors.app import QuotaExceededError
@ -476,7 +478,7 @@ class TestGenerateBilling:
monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True) monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True)
mocker.patch( mocker.patch(
"services.app_generate_service.QuotaType.WORKFLOW.consume", "services.app_generate_service.QuotaService.reserve",
side_effect=QuotaExceededError(feature="workflow", tenant_id="t", required=1), side_effect=QuotaExceededError(feature="workflow", tenant_id="t", required=1),
) )
@ -493,7 +495,7 @@ class TestGenerateBilling:
monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True) monkeypatch.setattr(ags_module.dify_config, "BILLING_ENABLED", True)
quota_charge = MagicMock() quota_charge = MagicMock()
mocker.patch( mocker.patch(
"services.app_generate_service.QuotaType.WORKFLOW.consume", "services.app_generate_service.QuotaService.reserve",
return_value=quota_charge, return_value=quota_charge,
) )
mocker.patch( mocker.patch(

View File

@ -57,7 +57,7 @@ class TestAsyncWorkflowService:
- repo: SQLAlchemyWorkflowTriggerLogRepository - repo: SQLAlchemyWorkflowTriggerLogRepository
- dispatcher_manager_class: QueueDispatcherManager class - dispatcher_manager_class: QueueDispatcherManager class
- dispatcher: dispatcher instance - dispatcher: dispatcher instance
- quota_workflow: QuotaType.WORKFLOW - quota_service: QuotaService mock
- get_workflow: AsyncWorkflowService._get_workflow method - get_workflow: AsyncWorkflowService._get_workflow method
- professional_task: execute_workflow_professional - professional_task: execute_workflow_professional
- team_task: execute_workflow_team - team_task: execute_workflow_team
@ -72,7 +72,7 @@ class TestAsyncWorkflowService:
mock_repo.create.side_effect = _create_side_effect mock_repo.create.side_effect = _create_side_effect
mock_dispatcher = MagicMock() mock_dispatcher = MagicMock()
quota_workflow = MagicMock() mock_quota_service = MagicMock()
with ( with (
patch.object( patch.object(
@ -88,8 +88,8 @@ class TestAsyncWorkflowService:
) as mock_get_workflow, ) as mock_get_workflow,
patch.object( patch.object(
async_workflow_service_module, async_workflow_service_module,
"QuotaType", "QuotaService",
new=SimpleNamespace(WORKFLOW=quota_workflow), new=mock_quota_service,
), ),
patch.object(async_workflow_service_module, "execute_workflow_professional") as mock_professional_task, patch.object(async_workflow_service_module, "execute_workflow_professional") as mock_professional_task,
patch.object(async_workflow_service_module, "execute_workflow_team") as mock_team_task, patch.object(async_workflow_service_module, "execute_workflow_team") as mock_team_task,
@ -102,7 +102,7 @@ class TestAsyncWorkflowService:
"repo": mock_repo, "repo": mock_repo,
"dispatcher_manager_class": mock_dispatcher_manager_class, "dispatcher_manager_class": mock_dispatcher_manager_class,
"dispatcher": mock_dispatcher, "dispatcher": mock_dispatcher,
"quota_workflow": quota_workflow, "quota_service": mock_quota_service,
"get_workflow": mock_get_workflow, "get_workflow": mock_get_workflow,
"professional_task": mock_professional_task, "professional_task": mock_professional_task,
"team_task": mock_team_task, "team_task": mock_team_task,
@ -141,6 +141,9 @@ class TestAsyncWorkflowService:
mocks["team_task"].delay.return_value = task_result mocks["team_task"].delay.return_value = task_result
mocks["sandbox_task"].delay.return_value = task_result mocks["sandbox_task"].delay.return_value = task_result
quota_charge_mock = MagicMock()
mocks["quota_service"].reserve.return_value = quota_charge_mock
class DummyAccount: class DummyAccount:
def __init__(self, user_id: str): def __init__(self, user_id: str):
self.id = user_id self.id = user_id
@ -158,8 +161,9 @@ class TestAsyncWorkflowService:
assert result.status == "queued" assert result.status == "queued"
assert result.queue == queue_name assert result.queue == queue_name
mocks["quota_workflow"].consume.assert_called_once_with("tenant-123") mocks["quota_service"].reserve.assert_called_once()
assert session.commit.call_count == 2 quota_charge_mock.commit.assert_called_once()
assert session.commit.call_count == 3
created_log = mocks["repo"].create.call_args[0][0] created_log = mocks["repo"].create.call_args[0][0]
assert created_log.status == WorkflowTriggerStatus.QUEUED assert created_log.status == WorkflowTriggerStatus.QUEUED
@ -245,7 +249,7 @@ class TestAsyncWorkflowService:
mocks = async_workflow_trigger_mocks mocks = async_workflow_trigger_mocks
mocks["dispatcher"].get_queue_name.return_value = QueuePriority.TEAM mocks["dispatcher"].get_queue_name.return_value = QueuePriority.TEAM
mocks["get_workflow"].return_value = workflow mocks["get_workflow"].return_value = workflow
mocks["quota_workflow"].consume.side_effect = QuotaExceededError( mocks["quota_service"].reserve.side_effect = QuotaExceededError(
feature="workflow", feature="workflow",
tenant_id="tenant-123", tenant_id="tenant-123",
required=1, required=1,
@ -262,7 +266,7 @@ class TestAsyncWorkflowService:
trigger_data=trigger_data, trigger_data=trigger_data,
) )
assert session.commit.call_count == 2 assert session.commit.call_count == 3
updated_log = mocks["repo"].update.call_args[0][0] updated_log = mocks["repo"].update.call_args[0][0]
assert updated_log.status == WorkflowTriggerStatus.RATE_LIMITED assert updated_log.status == WorkflowTriggerStatus.RATE_LIMITED
assert "Quota limit reached" in updated_log.error assert "Quota limit reached" in updated_log.error
@ -465,7 +469,7 @@ class TestAsyncWorkflowServiceGetWorkflow:
# Assert # Assert
assert result == workflow assert result == workflow
workflow_service.get_published_workflow_by_id.assert_called_once_with(app_model, "workflow-123") workflow_service.get_published_workflow_by_id.assert_called_once_with(app_model, "workflow-123", session=None)
workflow_service.get_published_workflow.assert_not_called() workflow_service.get_published_workflow.assert_not_called()
def test_should_raise_when_specific_workflow_id_not_found(self): def test_should_raise_when_specific_workflow_id_not_found(self):
@ -493,7 +497,7 @@ class TestAsyncWorkflowServiceGetWorkflow:
# Assert # Assert
assert result == workflow assert result == workflow
workflow_service.get_published_workflow.assert_called_once_with(app_model) workflow_service.get_published_workflow.assert_called_once_with(app_model, session=None)
workflow_service.get_published_workflow_by_id.assert_not_called() workflow_service.get_published_workflow_by_id.assert_not_called()
def test_should_raise_when_default_published_workflow_not_found(self): def test_should_raise_when_default_published_workflow_not_found(self):

View File

@ -425,7 +425,7 @@ class TestBillingServiceUsageCalculation:
yield mock yield mock
def test_get_tenant_feature_plan_usage_info(self, mock_send_request): def test_get_tenant_feature_plan_usage_info(self, mock_send_request):
"""Test retrieval of tenant feature plan usage information.""" """Test retrieval of tenant feature plan usage information (legacy endpoint)."""
# Arrange # Arrange
tenant_id = "tenant-123" tenant_id = "tenant-123"
expected_response = {"features": {"trigger": {"used": 50, "limit": 100}, "workflow": {"used": 20, "limit": 50}}} expected_response = {"features": {"trigger": {"used": 50, "limit": 100}, "workflow": {"used": 20, "limit": 50}}}
@ -438,6 +438,20 @@ class TestBillingServiceUsageCalculation:
assert result == expected_response assert result == expected_response
mock_send_request.assert_called_once_with("GET", "/tenant-feature-usage/info", params={"tenant_id": tenant_id}) mock_send_request.assert_called_once_with("GET", "/tenant-feature-usage/info", params={"tenant_id": tenant_id})
def test_get_quota_info(self, mock_send_request):
"""Test retrieval of quota info from new endpoint."""
# Arrange
tenant_id = "tenant-123"
expected_response = {"trigger_event": {"limit": 100, "usage": 30}, "api_rate_limit": {"limit": -1, "usage": 0}}
mock_send_request.return_value = expected_response
# Act
result = BillingService.get_quota_info(tenant_id)
# Assert
assert result == expected_response
mock_send_request.assert_called_once_with("GET", "/quota/info", params={"tenant_id": tenant_id})
def test_update_tenant_feature_plan_usage_positive_delta(self, mock_send_request): def test_update_tenant_feature_plan_usage_positive_delta(self, mock_send_request):
"""Test updating tenant feature usage with positive delta (adding credits).""" """Test updating tenant feature usage with positive delta (adding credits)."""
# Arrange # Arrange
@ -515,6 +529,150 @@ class TestBillingServiceUsageCalculation:
) )
class TestBillingServiceQuotaOperations:
"""Unit tests for quota reserve/commit/release operations."""
@pytest.fixture
def mock_send_request(self):
with patch.object(BillingService, "_send_request") as mock:
yield mock
def test_quota_reserve_success(self, mock_send_request):
expected = {"reservation_id": "rid-1", "available": 99, "reserved": 1}
mock_send_request.return_value = expected
result = BillingService.quota_reserve(tenant_id="t1", feature_key="trigger_event", request_id="req-1", amount=1)
assert result == expected
mock_send_request.assert_called_once_with(
"POST",
"/quota/reserve",
json={"tenant_id": "t1", "feature_key": "trigger_event", "request_id": "req-1", "amount": 1},
)
def test_quota_reserve_coerces_string_to_int(self, mock_send_request):
"""Test that TypeAdapter coerces string values to int."""
mock_send_request.return_value = {"reservation_id": "rid-str", "available": "99", "reserved": "1"}
result = BillingService.quota_reserve(tenant_id="t1", feature_key="trigger_event", request_id="req-s", amount=1)
assert result["available"] == 99
assert isinstance(result["available"], int)
assert result["reserved"] == 1
assert isinstance(result["reserved"], int)
def test_quota_reserve_with_meta(self, mock_send_request):
mock_send_request.return_value = {"reservation_id": "rid-2", "available": 98, "reserved": 1}
meta = {"source": "webhook"}
BillingService.quota_reserve(
tenant_id="t1", feature_key="trigger_event", request_id="req-2", amount=1, meta=meta
)
call_json = mock_send_request.call_args[1]["json"]
assert call_json["meta"] == {"source": "webhook"}
def test_quota_commit_success(self, mock_send_request):
expected = {"available": 98, "reserved": 0, "refunded": 0}
mock_send_request.return_value = expected
result = BillingService.quota_commit(
tenant_id="t1", feature_key="trigger_event", reservation_id="rid-1", actual_amount=1
)
assert result == expected
mock_send_request.assert_called_once_with(
"POST",
"/quota/commit",
json={
"tenant_id": "t1",
"feature_key": "trigger_event",
"reservation_id": "rid-1",
"actual_amount": 1,
},
)
def test_quota_commit_coerces_string_to_int(self, mock_send_request):
"""Test that TypeAdapter coerces string values to int."""
mock_send_request.return_value = {"available": "97", "reserved": "0", "refunded": "1"}
result = BillingService.quota_commit(
tenant_id="t1", feature_key="trigger_event", reservation_id="rid-s", actual_amount=1
)
assert result["available"] == 97
assert isinstance(result["available"], int)
assert result["refunded"] == 1
assert isinstance(result["refunded"], int)
def test_quota_commit_with_meta(self, mock_send_request):
mock_send_request.return_value = {"available": 97, "reserved": 0, "refunded": 0}
meta = {"reason": "partial"}
BillingService.quota_commit(
tenant_id="t1", feature_key="trigger_event", reservation_id="rid-1", actual_amount=1, meta=meta
)
call_json = mock_send_request.call_args[1]["json"]
assert call_json["meta"] == {"reason": "partial"}
def test_quota_release_success(self, mock_send_request):
expected = {"available": 100, "reserved": 0, "released": 1}
mock_send_request.return_value = expected
result = BillingService.quota_release(tenant_id="t1", feature_key="trigger_event", reservation_id="rid-1")
assert result == expected
mock_send_request.assert_called_once_with(
"POST",
"/quota/release",
json={"tenant_id": "t1", "feature_key": "trigger_event", "reservation_id": "rid-1"},
)
def test_quota_release_coerces_string_to_int(self, mock_send_request):
"""Test that TypeAdapter coerces string values to int."""
mock_send_request.return_value = {"available": "100", "reserved": "0", "released": "1"}
result = BillingService.quota_release(tenant_id="t1", feature_key="trigger_event", reservation_id="rid-s")
assert result["available"] == 100
assert isinstance(result["available"], int)
assert result["released"] == 1
assert isinstance(result["released"], int)
def test_get_quota_info_coerces_string_to_int(self, mock_send_request):
"""Test that TypeAdapter coerces string values to int for get_quota_info."""
mock_send_request.return_value = {
"trigger_event": {"usage": "42", "limit": "3000", "reset_date": "1700000000"},
"api_rate_limit": {"usage": "10", "limit": "-1", "reset_date": "-1"},
}
result = BillingService.get_quota_info("t1")
assert result["trigger_event"]["usage"] == 42
assert isinstance(result["trigger_event"]["usage"], int)
assert result["trigger_event"]["limit"] == 3000
assert isinstance(result["trigger_event"]["limit"], int)
assert result["trigger_event"]["reset_date"] == 1700000000
assert isinstance(result["trigger_event"]["reset_date"], int)
assert result["api_rate_limit"]["limit"] == -1
assert isinstance(result["api_rate_limit"]["limit"], int)
def test_get_quota_info_accepts_int_values(self, mock_send_request):
"""Test that get_quota_info works with native int values."""
expected = {
"trigger_event": {"usage": 42, "limit": 3000, "reset_date": 1700000000},
"api_rate_limit": {"usage": 0, "limit": -1},
}
mock_send_request.return_value = expected
result = BillingService.get_quota_info("t1")
assert result["trigger_event"]["usage"] == 42
assert result["trigger_event"]["limit"] == 3000
assert result["api_rate_limit"]["limit"] == -1
class TestBillingServiceRateLimitEnforcement: class TestBillingServiceRateLimitEnforcement:
"""Unit tests for rate limit enforcement mechanisms. """Unit tests for rate limit enforcement mechanisms.

View File

@ -89,7 +89,6 @@ class TestSegmentServiceChildChunks:
document = _make_document() document = _make_document()
segment = _make_segment() segment = _make_segment()
existing_a = ChildChunk( existing_a = ChildChunk(
id="child-a",
tenant_id="tenant-1", tenant_id="tenant-1",
dataset_id="dataset-1", dataset_id="dataset-1",
document_id="doc-1", document_id="doc-1",
@ -100,7 +99,6 @@ class TestSegmentServiceChildChunks:
created_by="user-1", created_by="user-1",
) )
existing_b = ChildChunk( existing_b = ChildChunk(
id="child-b",
tenant_id="tenant-1", tenant_id="tenant-1",
dataset_id="dataset-1", dataset_id="dataset-1",
document_id="doc-1", document_id="doc-1",
@ -110,7 +108,8 @@ class TestSegmentServiceChildChunks:
word_count=9, word_count=9,
created_by="user-1", created_by="user-1",
) )
existing_a.id = "child-a"
existing_b.id = "child-b"
with ( with (
patch("services.dataset_service.db") as mock_db, patch("services.dataset_service.db") as mock_db,
patch("services.dataset_service.uuid.uuid4", return_value="node-new"), patch("services.dataset_service.uuid.uuid4", return_value="node-new"),

View File

@ -0,0 +1,204 @@
from unittest.mock import MagicMock, patch
import pytest
import tasks.trigger_processing_tasks as trigger_processing_tasks_module
from services.errors.app import QuotaExceededError
from tasks.trigger_processing_tasks import dispatch_triggered_workflow
class TestDispatchTriggeredWorkflow:
"""Unit tests covering branch behaviours of ``dispatch_triggered_workflow``.
The covered branches are:
- workflow missing for ``plugin_trigger.app_id`` log + ``continue``
- ``QuotaService.reserve`` raising ``QuotaExceededError``
``mark_tenant_triggers_rate_limited`` + early ``return``
- ``trigger_workflow_async`` succeeds
``quota_charge.commit()`` + ``dispatched_count`` increments
"""
@pytest.fixture
def subscription(self):
sub = MagicMock()
sub.id = "subscription-123"
sub.tenant_id = "tenant-123"
sub.provider_id = "langgenius/test_plugin/test_plugin"
sub.endpoint_id = "endpoint-123"
sub.credentials = {}
sub.credential_type = "api_key"
return sub
@pytest.fixture
def plugin_trigger(self):
trigger = MagicMock()
trigger.id = "plugin-trigger-123"
trigger.app_id = "app-123"
trigger.node_id = "node-123"
return trigger
@pytest.fixture
def provider_controller(self):
controller = MagicMock()
controller.plugin_unique_identifier = "langgenius/test_plugin:0.0.1"
controller.entity.identity.name = "Test Plugin"
controller.entity.identity.icon = "icon.svg"
controller.entity.identity.icon_dark = "icon_dark.svg"
return controller
@pytest.fixture
def dispatch_mocks(self, subscription, plugin_trigger, provider_controller):
"""Patch all external dependencies reached by ``dispatch_triggered_workflow``.
Defaults are configured so the code flow can reach the final async
trigger block (line ~385); each test overrides specific handles
(``get_workflows``, ``reserve``, ``create_end_user_batch``, ...) to
drive the path it targets.
"""
session_cm = MagicMock()
session_cm.__enter__.return_value = MagicMock()
session_cm.__exit__.return_value = False
invoke_response = MagicMock()
invoke_response.cancelled = False
invoke_response.variables = {}
quota_charge = MagicMock()
with (
patch.object(
trigger_processing_tasks_module.TriggerHttpRequestCachingService,
"get_request",
return_value=MagicMock(),
),
patch.object(
trigger_processing_tasks_module.TriggerHttpRequestCachingService,
"get_payload",
return_value=MagicMock(),
),
patch.object(
trigger_processing_tasks_module.TriggerSubscriptionOperatorService,
"get_subscriber_triggers",
return_value=[plugin_trigger],
),
patch.object(
trigger_processing_tasks_module.TriggerManager,
"get_trigger_provider",
return_value=provider_controller,
),
patch.object(
trigger_processing_tasks_module.TriggerManager,
"invoke_trigger_event",
return_value=invoke_response,
) as invoke_trigger_event,
patch.object(
trigger_processing_tasks_module.TriggerEventNodeData,
"model_validate",
return_value=MagicMock(),
),
patch.object(
trigger_processing_tasks_module,
"_get_latest_workflows_by_app_ids",
) as get_workflows,
patch.object(
trigger_processing_tasks_module.EndUserService,
"create_end_user_batch",
return_value={},
) as create_end_user_batch,
patch.object(
trigger_processing_tasks_module.session_factory,
"create_session",
return_value=session_cm,
),
patch.object(
trigger_processing_tasks_module.QuotaService,
"reserve",
return_value=quota_charge,
) as reserve,
patch.object(
trigger_processing_tasks_module.AppTriggerService,
"mark_tenant_triggers_rate_limited",
) as mark_rate_limited,
patch.object(
trigger_processing_tasks_module.AsyncWorkflowService,
"trigger_workflow_async",
) as trigger_workflow_async,
):
yield {
"get_workflows": get_workflows,
"reserve": reserve,
"quota_charge": quota_charge,
"mark_rate_limited": mark_rate_limited,
"invoke_trigger_event": invoke_trigger_event,
"invoke_response": invoke_response,
"create_end_user_batch": create_end_user_batch,
"trigger_workflow_async": trigger_workflow_async,
}
def test_dispatch_skips_when_workflow_missing(self, subscription, dispatch_mocks):
"""Covers missing workflow → log + ``continue``."""
dispatch_mocks["get_workflows"].return_value = {}
dispatched = dispatch_triggered_workflow(
user_id="user-123",
subscription=subscription,
event_name="test_event",
request_id="request-123",
)
assert dispatched == 0
dispatch_mocks["reserve"].assert_not_called()
dispatch_mocks["invoke_trigger_event"].assert_not_called()
dispatch_mocks["mark_rate_limited"].assert_not_called()
def test_dispatch_marks_rate_limited_when_quota_exceeded(self, subscription, plugin_trigger, dispatch_mocks):
"""Covers QuotaExceededError → mark rate-limited + early return."""
workflow_mock = MagicMock()
workflow_mock.walk_nodes.return_value = iter(
[(plugin_trigger.node_id, {"type": trigger_processing_tasks_module.TRIGGER_PLUGIN_NODE_TYPE})]
)
dispatch_mocks["get_workflows"].return_value = {plugin_trigger.app_id: workflow_mock}
dispatch_mocks["reserve"].side_effect = QuotaExceededError(
feature="trigger", tenant_id=subscription.tenant_id, required=1
)
dispatched = dispatch_triggered_workflow(
user_id="user-123",
subscription=subscription,
event_name="test_event",
request_id="request-123",
)
assert dispatched == 0
dispatch_mocks["reserve"].assert_called_once()
dispatch_mocks["mark_rate_limited"].assert_called_once_with(subscription.tenant_id)
dispatch_mocks["invoke_trigger_event"].assert_not_called()
def test_dispatch_commits_quota_and_counts_when_workflow_triggered(
self, subscription, plugin_trigger, dispatch_mocks
):
"""Happy path: end user exists and async trigger succeeds."""
workflow_mock = MagicMock()
workflow_mock.id = "workflow-123"
workflow_mock.walk_nodes.return_value = iter(
[(plugin_trigger.node_id, {"type": trigger_processing_tasks_module.TRIGGER_PLUGIN_NODE_TYPE})]
)
dispatch_mocks["get_workflows"].return_value = {plugin_trigger.app_id: workflow_mock}
end_user_mock = MagicMock()
dispatch_mocks["create_end_user_batch"].return_value = {plugin_trigger.app_id: end_user_mock}
dispatched = dispatch_triggered_workflow(
user_id="user-123",
subscription=subscription,
event_name="test_event",
request_id="request-123",
)
assert dispatched == 1
dispatch_mocks["trigger_workflow_async"].assert_called_once()
_, kwargs = dispatch_mocks["trigger_workflow_async"].call_args
assert kwargs["user"] is end_user_mock
dispatch_mocks["quota_charge"].commit.assert_called_once()
dispatch_mocks["quota_charge"].refund.assert_not_called()
dispatch_mocks["mark_rate_limited"].assert_not_called()

286
api/uv.lock generated
View File

@ -604,29 +604,29 @@ wheels = [
[[package]] [[package]]
name = "boto3" name = "boto3"
version = "1.42.91" version = "1.42.96"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "botocore" }, { name = "botocore" },
{ name = "jmespath" }, { name = "jmespath" },
{ name = "s3transfer" }, { name = "s3transfer" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a7/c0/98b8cec7ca22dde776df48c58940ae1abc425593959b7226e270760d726f/boto3-1.42.91.tar.gz", hash = "sha256:03d70532b17f7f84df37ca7e8c21553280454dea53ae12b15d1cfef9b16fcb8a", size = 113181, upload-time = "2026-04-17T19:31:06.251Z" } sdist = { url = "https://files.pythonhosted.org/packages/a6/2d/69fb3acd50bab83fb295c167d33c4b653faeb5fb0f42bfca4d9b69d6fb68/boto3-1.42.96.tar.gz", hash = "sha256:b38a9e4a3fbbee9017252576f1379780d0a5814768676c08df2f539d31fcdd68", size = 113203, upload-time = "2026-04-24T19:47:18.677Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/02/29/faba6521257c34085cc9b439ef98235b581772580f417fa3629728007270/boto3-1.42.91-py3-none-any.whl", hash = "sha256:04e72071cde022951ce7f81bd9933c90095ab8923e8ced61c8dacfe9edac0f5c", size = 140553, upload-time = "2026-04-17T19:31:02.57Z" }, { url = "https://files.pythonhosted.org/packages/2b/9d/b3f617d011c42eb804d993103b8fa9acdce153e181a3042f58bfe33d7cb4/boto3-1.42.96-py3-none-any.whl", hash = "sha256:2f4566da2c209a98bdbfc874d813ef231c84ad24e4f815e9bc91de5f63351a24", size = 140557, upload-time = "2026-04-24T19:47:15.824Z" },
] ]
[[package]] [[package]]
name = "boto3-stubs" name = "boto3-stubs"
version = "1.42.92" version = "1.42.96"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "botocore-stubs" }, { name = "botocore-stubs" },
{ name = "types-s3transfer" }, { name = "types-s3transfer" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/fa/b4/7f472d64a89f6aa6b8e8eeadc876667b7e4edfb526c6118efe2b2c98ba17/boto3_stubs-1.42.92.tar.gz", hash = "sha256:4bc934069c5e8c7b3cdd2442569dae14e8272fe207d445bd38aa578b8463638f", size = 102696, upload-time = "2026-04-20T19:55:19.858Z" } sdist = { url = "https://files.pythonhosted.org/packages/77/86/65f45f84621cccc2471871088bab8fe515b4346ba9e48d9001484ec440d6/boto3_stubs-1.42.96.tar.gz", hash = "sha256:1e7819c34d1eae8e5e3cfaf9d144fdcad65aad184b380488871de1d0b2851879", size = 102691, upload-time = "2026-04-24T20:25:13.984Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/6c/ce/2fe2c6456f8dc0b8bb8d80e05e154c7975ec058991bedf54f3aeed634b79/boto3_stubs-1.42.92-py3-none-any.whl", hash = "sha256:b3994e60f0133b2dd3d9a88ceaeef48fa6367d9a9429426e919575768a1ad9c6", size = 70666, upload-time = "2026-04-20T19:55:16.398Z" }, { url = "https://files.pythonhosted.org/packages/a7/51/bdac1ff9fd4321091183776c5adffce5fc7b4d0fec7e38af9064e24a2497/boto3_stubs-1.42.96-py3-none-any.whl", hash = "sha256:2c112e257f40006147a53f6f62075804689154271973b2807f5656feaa804216", size = 70668, upload-time = "2026-04-24T20:25:09.736Z" },
] ]
[package.optional-dependencies] [package.optional-dependencies]
@ -636,16 +636,16 @@ bedrock-runtime = [
[[package]] [[package]]
name = "botocore" name = "botocore"
version = "1.42.91" version = "1.42.96"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "jmespath" }, { name = "jmespath" },
{ name = "python-dateutil" }, { name = "python-dateutil" },
{ name = "urllib3" }, { name = "urllib3" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/21/bc/a4b7c46471c2e789ad8c4c7acfd7f302fdb481d93ff870f441249b924ae6/botocore-1.42.91.tar.gz", hash = "sha256:d252e27bc454afdbf5ed3dc617aa423f2c855c081e98b7963093399483ecc698", size = 15213010, upload-time = "2026-04-17T19:30:50.793Z" } sdist = { url = "https://files.pythonhosted.org/packages/61/77/2c333622a1d47cf5bf73cdcab0cb6c92addafbef2ec05f81b9f75687d9e5/botocore-1.42.96.tar.gz", hash = "sha256:75b3b841ffacaa944f645196655a21ca777591dd8911e732bfb6614545af0250", size = 15263344, upload-time = "2026-04-24T19:47:05.283Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/b1/fc/24cc0a47c824f13933e210e9ad034b4fba22f7185b8d904c0fbf5a3b2be8/botocore-1.42.91-py3-none-any.whl", hash = "sha256:7a28c3cc6bfab5724ad18899d52402b776a0de7d87fa20c3c5270bcaaf199ce8", size = 14897344, upload-time = "2026-04-17T19:30:44.245Z" }, { url = "https://files.pythonhosted.org/packages/45/56/152c3a859ca1b9d77ed16deac3cf81682013677c68cf5715698781fc81bd/botocore-1.42.96-py3-none-any.whl", hash = "sha256:db2c3e2006628be6fde81a24124a6563c363d6982fb92728837cf174bad9d98a", size = 14945920, upload-time = "2026-04-24T19:47:00.323Z" },
] ]
[[package]] [[package]]
@ -1058,7 +1058,7 @@ wheels = [
[[package]] [[package]]
name = "cos-python-sdk-v5" name = "cos-python-sdk-v5"
version = "1.9.41" version = "1.9.42"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "crcmod" }, { name = "crcmod" },
@ -1067,9 +1067,9 @@ dependencies = [
{ name = "six" }, { name = "six" },
{ name = "xmltodict" }, { name = "xmltodict" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/0e/38/c0029f413f51238aa2319715f45d74bcae931768e36c7e4604b02f407c6c/cos_python_sdk_v5-1.9.41.tar.gz", hash = "sha256:68f4be7d8fe27a1d186b3159b93c622816e398effdc236eddd442b86db592b82", size = 102625, upload-time = "2026-01-06T07:00:11.692Z" } sdist = { url = "https://files.pythonhosted.org/packages/40/e3/b903b4acde334510f481d126a686bc4013710c00e2af34bff369511329ac/cos_python_sdk_v5-1.9.42.tar.gz", hash = "sha256:2a01d1868f50c5a70771f2b67da868f1dc6c6f3890f8009715313834404decc4", size = 102670, upload-time = "2026-04-23T11:08:27.949Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/aa/2f/ead3fb551509fdc94e4a42093b770e3de2827ff7227570165df5e35c2a3e/cos_python_sdk_v5-1.9.41-py3-none-any.whl", hash = "sha256:f465aae43a4ba3f1caa8caeaca838d0395932f6848e89d6dde2807725e3c88a0", size = 98285, upload-time = "2026-01-06T06:43:02.754Z" }, { url = "https://files.pythonhosted.org/packages/ee/bf/4ea660bb79d91fd41ba394605eccffd3d0943ed547b3fe2bdc6c7a52d2d1/cos_python_sdk_v5-1.9.42-py3-none-any.whl", hash = "sha256:02e583a1094e1794e6c0f56618d5190eb9eb7bfe75909f1dfac41bbee46e46c5", size = 98375, upload-time = "2026-04-23T11:05:14.519Z" },
] ]
[[package]] [[package]]
@ -1299,6 +1299,7 @@ dependencies = [
{ name = "celery" }, { name = "celery" },
{ name = "croniter" }, { name = "croniter" },
{ name = "fastopenapi", extra = ["flask"] }, { name = "fastopenapi", extra = ["flask"] },
{ name = "flask" },
{ name = "flask-compress" }, { name = "flask-compress" },
{ name = "flask-cors" }, { name = "flask-cors" },
{ name = "flask-login" }, { name = "flask-login" },
@ -1577,10 +1578,11 @@ requires-dist = [
{ name = "aliyun-log-python-sdk", specifier = ">=0.9.44,<1.0.0" }, { name = "aliyun-log-python-sdk", specifier = ">=0.9.44,<1.0.0" },
{ name = "azure-identity", specifier = ">=1.25.3,<2.0.0" }, { name = "azure-identity", specifier = ">=1.25.3,<2.0.0" },
{ name = "bleach", specifier = ">=6.3.0" }, { name = "bleach", specifier = ">=6.3.0" },
{ name = "boto3", specifier = ">=1.42.91" }, { name = "boto3", specifier = ">=1.42.96" },
{ name = "celery", specifier = ">=5.6.3" }, { name = "celery", specifier = ">=5.6.3" },
{ name = "croniter", specifier = ">=6.2.2" }, { name = "croniter", specifier = ">=6.2.2" },
{ name = "fastopenapi", extras = ["flask"], specifier = "~=0.7.0" }, { name = "fastopenapi", extras = ["flask"], specifier = "~=0.7.0" },
{ name = "flask", specifier = ">=3.1.3,<4.0.0" },
{ name = "flask-compress", specifier = ">=1.24,<2.0.0" }, { name = "flask-compress", specifier = ">=1.24,<2.0.0" },
{ name = "flask-cors", specifier = ">=6.0.2" }, { name = "flask-cors", specifier = ">=6.0.2" },
{ name = "flask-login", specifier = ">=0.6.3,<1.0.0" }, { name = "flask-login", specifier = ">=0.6.3,<1.0.0" },
@ -1597,15 +1599,15 @@ requires-dist = [
{ name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<1.0.0" }, { name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<1.0.0" },
{ name = "httpx-sse", specifier = "~=0.4.0" }, { name = "httpx-sse", specifier = "~=0.4.0" },
{ name = "json-repair", specifier = "~=0.59.4" }, { name = "json-repair", specifier = "~=0.59.4" },
{ name = "opentelemetry-distro", specifier = ">=0.62b0,<1.0.0" }, { name = "opentelemetry-distro", specifier = ">=0.62b1,<1.0.0" },
{ name = "opentelemetry-instrumentation-celery", specifier = ">=0.62b0,<1.0.0" }, { name = "opentelemetry-instrumentation-celery", specifier = ">=0.62b0,<1.0.0" },
{ name = "opentelemetry-instrumentation-flask", specifier = ">=0.62b0,<1.0.0" }, { name = "opentelemetry-instrumentation-flask", specifier = ">=0.62b0,<1.0.0" },
{ name = "opentelemetry-instrumentation-httpx", specifier = ">=0.62b0,<1.0.0" }, { name = "opentelemetry-instrumentation-httpx", specifier = ">=0.62b0,<1.0.0" },
{ name = "opentelemetry-instrumentation-redis", specifier = ">=0.62b0,<1.0.0" }, { name = "opentelemetry-instrumentation-redis", specifier = ">=0.62b0,<1.0.0" },
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = ">=0.62b0,<1.0.0" }, { name = "opentelemetry-instrumentation-sqlalchemy", specifier = ">=0.62b0,<1.0.0" },
{ name = "opentelemetry-propagator-b3", specifier = ">=1.41.0,<2.0.0" }, { name = "opentelemetry-propagator-b3", specifier = ">=1.41.1,<2.0.0" },
{ name = "psycogreen", specifier = ">=1.0.2" }, { name = "psycogreen", specifier = ">=1.0.2" },
{ name = "psycopg2-binary", specifier = ">=2.9.11" }, { name = "psycopg2-binary", specifier = ">=2.9.12" },
{ name = "python-socketio", specifier = ">=5.13.0" }, { name = "python-socketio", specifier = ">=5.13.0" },
{ name = "readabilipy", specifier = ">=0.3.0,<1.0.0" }, { name = "readabilipy", specifier = ">=0.3.0,<1.0.0" },
{ name = "redis", extras = ["hiredis"], specifier = ">=7.4.0" }, { name = "redis", extras = ["hiredis"], specifier = ">=7.4.0" },
@ -1617,15 +1619,15 @@ requires-dist = [
[package.metadata.requires-dev] [package.metadata.requires-dev]
dev = [ dev = [
{ name = "basedpyright", specifier = ">=1.39.3" }, { name = "basedpyright", specifier = ">=1.39.3" },
{ name = "boto3-stubs", specifier = ">=1.42.92" }, { name = "boto3-stubs", specifier = ">=1.42.96" },
{ name = "celery-types", specifier = ">=0.23.0" }, { name = "celery-types", specifier = ">=0.23.0" },
{ name = "coverage", specifier = ">=7.13.4" }, { name = "coverage", specifier = ">=7.13.4" },
{ name = "dotenv-linter", specifier = ">=0.7.0" }, { name = "dotenv-linter", specifier = ">=0.7.0" },
{ name = "faker", specifier = ">=40.15.0" }, { name = "faker", specifier = ">=40.15.0" },
{ name = "hypothesis", specifier = ">=6.152.1" }, { name = "hypothesis", specifier = ">=6.152.3" },
{ name = "import-linter", specifier = ">=2.3" }, { name = "import-linter", specifier = ">=2.3" },
{ name = "lxml-stubs", specifier = ">=0.5.1" }, { name = "lxml-stubs", specifier = ">=0.5.1" },
{ name = "mypy", specifier = ">=1.20.1" }, { name = "mypy", specifier = ">=1.20.2" },
{ name = "pandas-stubs", specifier = ">=3.0.0" }, { name = "pandas-stubs", specifier = ">=3.0.0" },
{ name = "pyrefly", specifier = ">=0.62.0" }, { name = "pyrefly", specifier = ">=0.62.0" },
{ name = "pytest", specifier = ">=9.0.3" }, { name = "pytest", specifier = ">=9.0.3" },
@ -1635,7 +1637,7 @@ dev = [
{ name = "pytest-mock", specifier = ">=3.15.1" }, { name = "pytest-mock", specifier = ">=3.15.1" },
{ name = "pytest-timeout", specifier = ">=2.4.0" }, { name = "pytest-timeout", specifier = ">=2.4.0" },
{ name = "pytest-xdist", specifier = ">=3.8.0" }, { name = "pytest-xdist", specifier = ">=3.8.0" },
{ name = "ruff", specifier = ">=0.15.11" }, { name = "ruff", specifier = ">=0.15.12" },
{ name = "scipy-stubs", specifier = ">=1.17.1.4" }, { name = "scipy-stubs", specifier = ">=1.17.1.4" },
{ name = "testcontainers", specifier = ">=4.14.2" }, { name = "testcontainers", specifier = ">=4.14.2" },
{ name = "types-aiofiles", specifier = ">=25.1.0" }, { name = "types-aiofiles", specifier = ">=25.1.0" },
@ -1660,7 +1662,7 @@ dev = [
{ name = "types-pexpect", specifier = ">=4.9.0" }, { name = "types-pexpect", specifier = ">=4.9.0" },
{ name = "types-protobuf", specifier = ">=7.34.1" }, { name = "types-protobuf", specifier = ">=7.34.1" },
{ name = "types-psutil", specifier = ">=7.2.2" }, { name = "types-psutil", specifier = ">=7.2.2" },
{ name = "types-psycopg2", specifier = ">=2.9.21" }, { name = "types-psycopg2", specifier = ">=2.9.21.20260422" },
{ name = "types-pygments", specifier = ">=2.20.0" }, { name = "types-pygments", specifier = ">=2.20.0" },
{ name = "types-pymysql", specifier = ">=1.1.0" }, { name = "types-pymysql", specifier = ">=1.1.0" },
{ name = "types-pyopenssl", specifier = ">=24.1.0" }, { name = "types-pyopenssl", specifier = ">=24.1.0" },
@ -1677,17 +1679,17 @@ dev = [
{ name = "types-tensorflow", specifier = ">=2.18.0.20260408" }, { name = "types-tensorflow", specifier = ">=2.18.0.20260408" },
{ name = "types-tqdm", specifier = ">=4.67.3.20260408" }, { name = "types-tqdm", specifier = ">=4.67.3.20260408" },
{ name = "types-ujson", specifier = ">=5.10.0" }, { name = "types-ujson", specifier = ">=5.10.0" },
{ name = "xinference-client", specifier = ">=2.5.0" }, { name = "xinference-client", specifier = ">=2.7.0" },
] ]
storage = [ storage = [
{ name = "azure-storage-blob", specifier = ">=12.28.0" }, { name = "azure-storage-blob", specifier = ">=12.28.0" },
{ name = "bce-python-sdk", specifier = ">=0.9.70" }, { name = "bce-python-sdk", specifier = ">=0.9.70" },
{ name = "cos-python-sdk-v5", specifier = ">=1.9.41" }, { name = "cos-python-sdk-v5", specifier = ">=1.9.42" },
{ name = "esdk-obs-python", specifier = ">=3.22.2" }, { name = "esdk-obs-python", specifier = ">=3.22.2" },
{ name = "google-cloud-storage", specifier = ">=3.10.1" }, { name = "google-cloud-storage", specifier = ">=3.10.1" },
{ name = "opendal", specifier = ">=0.46.0" }, { name = "opendal", specifier = ">=0.46.0" },
{ name = "oss2", specifier = ">=2.19.1" }, { name = "oss2", specifier = ">=2.19.1" },
{ name = "supabase", specifier = ">=2.28.3" }, { name = "supabase", specifier = ">=2.29.0" },
{ name = "tos", specifier = ">=2.9.0" }, { name = "tos", specifier = ">=2.9.0" },
] ]
tools = [ tools = [
@ -1774,7 +1776,7 @@ vdb-upstash = [{ name = "dify-vdb-upstash", editable = "providers/vdb/vdb-upstas
vdb-vastbase = [{ name = "dify-vdb-vastbase", editable = "providers/vdb/vdb-vastbase" }] vdb-vastbase = [{ name = "dify-vdb-vastbase", editable = "providers/vdb/vdb-vastbase" }]
vdb-vikingdb = [{ name = "dify-vdb-vikingdb", editable = "providers/vdb/vdb-vikingdb" }] vdb-vikingdb = [{ name = "dify-vdb-vikingdb", editable = "providers/vdb/vdb-vikingdb" }]
vdb-weaviate = [{ name = "dify-vdb-weaviate", editable = "providers/vdb/vdb-weaviate" }] vdb-weaviate = [{ name = "dify-vdb-weaviate", editable = "providers/vdb/vdb-weaviate" }]
vdb-xinference = [{ name = "xinference-client", specifier = ">=2.5.0" }] vdb-xinference = [{ name = "xinference-client", specifier = ">=2.7.0" }]
[[package]] [[package]]
name = "dify-trace-aliyun" name = "dify-trace-aliyun"
@ -2655,14 +2657,14 @@ wheels = [
[[package]] [[package]]
name = "gitpython" name = "gitpython"
version = "3.1.45" version = "3.1.47"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "gitdb" }, { name = "gitdb" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/9a/c8/dd58967d119baab745caec2f9d853297cec1989ec1d63f677d3880632b88/gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c", size = 215076, upload-time = "2025-07-24T03:45:54.871Z" } sdist = { url = "https://files.pythonhosted.org/packages/c1/bd/50db468e9b1310529a19fce651b3b0e753b5c07954d486cba31bbee9a5d5/gitpython-3.1.47.tar.gz", hash = "sha256:dba27f922bd2b42cb54c87a8ab3cb6beb6bf07f3d564e21ac848913a05a8a3cd", size = 216978, upload-time = "2026-04-22T02:44:44.059Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168, upload-time = "2025-07-24T03:45:52.517Z" }, { url = "https://files.pythonhosted.org/packages/f2/c5/a1bc0996af85757903cf2bf444a7824e68e0035ce63fb41d6f76f9def68b/gitpython-3.1.47-py3-none-any.whl", hash = "sha256:489f590edfd6d20571b2c0e72c6a6ac6915ee8b8cd04572330e3842207a78905", size = 209547, upload-time = "2026-04-22T02:44:41.271Z" },
] ]
[[package]] [[package]]
@ -3317,14 +3319,14 @@ wheels = [
[[package]] [[package]]
name = "hypothesis" name = "hypothesis"
version = "6.152.1" version = "6.152.3"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "sortedcontainers" }, { name = "sortedcontainers" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/64/b1/c32bcddb9aab9e3abc700f1f56faf14e7655c64a16ca47701a57362276ea/hypothesis-6.152.1.tar.gz", hash = "sha256:4f4ed934eee295dd84ee97592477d23e8dc03e9f12ae0ee30a4e7c9ef3fca3b0", size = 465029, upload-time = "2026-04-14T22:29:24.062Z" } sdist = { url = "https://files.pythonhosted.org/packages/70/90/fc0b263b6f2622e5f8d2aa93f2e95ba79718a5faa7d2a74bfab10d6b0905/hypothesis-6.152.3.tar.gz", hash = "sha256:c4e5300d3755b6c8a270a28fe5abff40153e927328e89d2bb0229c1384618998", size = 466478, upload-time = "2026-04-26T17:31:07.657Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/83/860fb3075e00b0fc19a22a2301bc3c96f00437558c3911bdd0a3573a4a53/hypothesis-6.152.1-py3-none-any.whl", hash = "sha256:40a3619d9e0cb97b018857c7986f75cf5de2e5ec0fa8a0b172d00747758f749e", size = 530752, upload-time = "2026-04-14T22:29:20.893Z" }, { url = "https://files.pythonhosted.org/packages/90/38/15475b91a4c12721d2be3349e9d6cf8649c76ed9bc1287e2de7c8d06c261/hypothesis-6.152.3-py3-none-any.whl", hash = "sha256:4b47f00916c858ed49cf870a2f08b04e5fff5afae0bb78f3b4a6d9c74fd6c7bc", size = 532154, upload-time = "2026-04-26T17:31:04.42Z" },
] ]
[[package]] [[package]]
@ -3945,7 +3947,7 @@ wheels = [
[[package]] [[package]]
name = "mypy" name = "mypy"
version = "1.20.1" version = "1.20.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "librt", marker = "platform_python_implementation != 'PyPy'" }, { name = "librt", marker = "platform_python_implementation != 'PyPy'" },
@ -3953,16 +3955,16 @@ dependencies = [
{ name = "pathspec" }, { name = "pathspec" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/0b/3d/5b373635b3146264eb7a68d09e5ca11c305bbb058dfffbb47c47daf4f632/mypy-1.20.1.tar.gz", hash = "sha256:6fc3f4ecd52de81648fed1945498bf42fa2993ddfad67c9056df36ae5757f804", size = 3815892, upload-time = "2026-04-13T02:46:51.474Z" } sdist = { url = "https://files.pythonhosted.org/packages/04/af/e3d4b3e9ec91a0ff9aabfdb38692952acf49bbb899c2e4c29acb3a6da3ae/mypy-1.20.2.tar.gz", hash = "sha256:e8222c26daaafd9e8626dec58ae36029f82585890589576f769a650dd20fd665", size = 3817349, upload-time = "2026-04-21T17:12:28.473Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/69/1b/75a7c825a02781ca10bc2f2f12fba2af5202f6d6005aad8d2d1f264d8d78/mypy-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:36ee2b9c6599c230fea89bbd79f401f9f9f8e9fcf0c777827789b19b7da90f51", size = 14494077, upload-time = "2026-04-13T02:45:55.085Z" }, { url = "https://files.pythonhosted.org/packages/71/4e/7560e4528db9e9b147e4c0f22660466bf30a0a1fe3d63d1b9d3b0fd354ee/mypy-1.20.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4dbfcf869f6b0517f70cf0030ba6ea1d6645e132337a7d5204a18d8d5636c02b", size = 14539393, upload-time = "2026-04-21T17:07:12.52Z" },
{ url = "https://files.pythonhosted.org/packages/b0/54/5e5a569ea5c2b4d48b729fb32aa936eeb4246e4fc3e6f5b3d36a2dfbefb9/mypy-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fba3fb0968a7b48806b0c90f38d39296f10766885a94c83bd21399de1e14eb28", size = 13319495, upload-time = "2026-04-13T02:45:29.674Z" }, { url = "https://files.pythonhosted.org/packages/32/d9/34a5efed8124f5a9234f55ac6a4ced4201e2c5b81e1109c49ad23190ec8c/mypy-1.20.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b6481b228d072315b053210b01ac320e1be243dc17f9e5887ef167f23f5fae4", size = 13361642, upload-time = "2026-04-21T17:06:53.742Z" },
{ url = "https://files.pythonhosted.org/packages/6f/a4/a1945b19f33e91721b59deee3abb484f2fa5922adc33bb166daf5325d76d/mypy-1.20.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef1415a637cd3627d6304dfbeddbadd21079dafc2a8a753c477ce4fc0c2af54f", size = 13696948, upload-time = "2026-04-13T02:46:15.006Z" }, { url = "https://files.pythonhosted.org/packages/d1/14/eb377acf78c03c92d566a1510cda8137348215b5335085ef662ab82ecd3a/mypy-1.20.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34397cdced6b90b836e38182076049fdb41424322e0b0728c946b0939ebdf9f6", size = 13740347, upload-time = "2026-04-21T17:12:04.73Z" },
{ url = "https://files.pythonhosted.org/packages/b2/c6/75e969781c2359b2f9c15b061f28ec6d67c8b61865ceda176e85c8e7f2de/mypy-1.20.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef3461b1ad5cd446e540016e90b5984657edda39f982f4cc45ca317b628f5a37", size = 14706744, upload-time = "2026-04-13T02:46:00.482Z" }, { url = "https://files.pythonhosted.org/packages/b9/94/7e4634a32b641aa1c112422eed1bbece61ee16205f674190e8b536f884de/mypy-1.20.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5da6976f20cae27059ea8d0c86e7cef3de720e04c4bb9ee18e3690fdb792066", size = 14734042, upload-time = "2026-04-21T17:07:43.16Z" },
{ url = "https://files.pythonhosted.org/packages/a8/6e/b221b1de981fc4262fe3e0bf9ec272d292dfe42394a689c2d49765c144c4/mypy-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:542dd63c9e1339b6092eb25bd515f3a32a1453aee8c9521d2ddb17dacd840237", size = 14949035, upload-time = "2026-04-13T02:45:06.021Z" }, { url = "https://files.pythonhosted.org/packages/7a/f3/f7e62395cb7f434541b4491a01149a4439e28ace4c0c632bbf5431e92d1f/mypy-1.20.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:56908d7e08318d39f85b1f0c6cfd47b0cac1a130da677630dac0de3e0623e102", size = 14964958, upload-time = "2026-04-21T17:11:00.665Z" },
{ url = "https://files.pythonhosted.org/packages/ca/4b/298ba2de0aafc0da3ff2288da06884aae7ba6489bc247c933f87847c41b3/mypy-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:1d55c7cd8ca22e31f93af2a01160a9e95465b5878de23dba7e48116052f20a8d", size = 10883216, upload-time = "2026-04-13T02:45:47.232Z" }, { url = "https://files.pythonhosted.org/packages/3e/0d/47e3c3a0ec2a876e35aeac365df3cac7776c36bbd4ed18cc521e1b9d255b/mypy-1.20.2-cp312-cp312-win_amd64.whl", hash = "sha256:d52ad8d78522da1d308789df651ee5379088e77c76cb1994858d40a426b343b9", size = 10911340, upload-time = "2026-04-21T17:10:49.179Z" },
{ url = "https://files.pythonhosted.org/packages/c7/f9/5e25b8f0b8cb92f080bfed9c21d3279b2a0b6a601cdca369a039ba84789d/mypy-1.20.1-cp312-cp312-win_arm64.whl", hash = "sha256:f5b84a79070586e0d353ee07b719d9d0a4aa7c8ee90c0ea97747e98cbe193019", size = 9814299, upload-time = "2026-04-13T02:45:21.934Z" }, { url = "https://files.pythonhosted.org/packages/d6/b2/6c852d72e0ea8b01f49da817fb52539993cde327e7d010e0103dc12d0dac/mypy-1.20.2-cp312-cp312-win_arm64.whl", hash = "sha256:785b08db19c9f214dc37d65f7c165d19a30fcecb48abfa30f31b01b5acaabb58", size = 9833947, upload-time = "2026-04-21T17:09:05.267Z" },
{ url = "https://files.pythonhosted.org/packages/d8/28/926bd972388e65a39ee98e188ccf67e81beb3aacfd5d6b310051772d974b/mypy-1.20.1-py3-none-any.whl", hash = "sha256:1aae28507f253fe82d883790d1c0a0d35798a810117c88184097fe8881052f06", size = 2636553, upload-time = "2026-04-13T02:46:30.45Z" }, { url = "https://files.pythonhosted.org/packages/28/9a/f23c163e25b11074188251b0b5a0342625fc1cdb6af604757174fa9acc9b/mypy-1.20.2-py3-none-any.whl", hash = "sha256:a94c5a76ab46c5e6257c7972b6c8cff0574201ca7dc05647e33e795d78680563", size = 2637314, upload-time = "2026-04-21T17:05:54.5Z" },
] ]
[[package]] [[package]]
@ -4233,29 +4235,29 @@ wheels = [
[[package]] [[package]]
name = "opentelemetry-api" name = "opentelemetry-api"
version = "1.41.0" version = "1.41.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "importlib-metadata" }, { name = "importlib-metadata" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/47/8e/3778a7e87801d994869a9396b9fc2a289e5f9be91ff54a27d41eace494b0/opentelemetry_api-1.41.0.tar.gz", hash = "sha256:9421d911326ec12dee8bc933f7839090cad7a3f13fcfb0f9e82f8174dc003c09", size = 71416, upload-time = "2026-04-09T14:38:34.544Z" } sdist = { url = "https://files.pythonhosted.org/packages/fa/fc/b7564cbef36601aef0d6c9bc01f7badb64be8e862c2e1c3c5c3b43b53e4f/opentelemetry_api-1.41.1.tar.gz", hash = "sha256:0ad1814d73b875f84494387dae86ce0b12c68556331ce6ce8fe789197c949621", size = 71416, upload-time = "2026-04-24T13:15:38.262Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/58/ee/99ab786653b3bda9c37ade7e24a7b607a1b1f696063172768417539d876d/opentelemetry_api-1.41.0-py3-none-any.whl", hash = "sha256:0e77c806e6a89c9e4f8d372034622f3e1418a11bdbe1c80a50b3d3397ad0fa4f", size = 69007, upload-time = "2026-04-09T14:38:11.833Z" }, { url = "https://files.pythonhosted.org/packages/29/59/3e7118ed140f76b0982ba4321bdaed1997a0473f9720de2d10788a577033/opentelemetry_api-1.41.1-py3-none-any.whl", hash = "sha256:a22df900e75c76dc08440710e51f52f1aa6b451b429298896023e60db5b3139f", size = 69007, upload-time = "2026-04-24T13:15:15.662Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-distro" name = "opentelemetry-distro"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
{ name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation" },
{ name = "opentelemetry-sdk" }, { name = "opentelemetry-sdk" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/72/c6/52b0dbcc8fbdecf179047921940516cbb8aaf05f6b737faa526ad76fec51/opentelemetry_distro-0.62b0.tar.gz", hash = "sha256:aa0308fbe50ad8f17d4446982dbf26870e20b8031ba38d8e1224ecf7aedd3184", size = 2611, upload-time = "2026-04-09T14:40:20.404Z" } sdist = { url = "https://files.pythonhosted.org/packages/45/f1/314e5015e353a001948e03f48a6935ca7ef00e99107b8e3e63871426b0f6/opentelemetry_distro-0.62b1.tar.gz", hash = "sha256:0169b128b9d6d5cab809ae4c4fb3d576bfc5d3f30b32d8a43b770b587f04f253", size = 2606, upload-time = "2026-04-24T13:22:29.403Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/7e/5858bba1c7ed880c7b0fe7d9a1ea40ab8affd18c9ebc1e16c2d69c501da1/opentelemetry_distro-0.62b0-py3-none-any.whl", hash = "sha256:23e9065a35cef12868ad5efb18ce9c88a9103800256b318dec4c9c850c6c78c1", size = 3348, upload-time = "2026-04-09T14:39:17.406Z" }, { url = "https://files.pythonhosted.org/packages/b9/19/c58c119a299298f03d0797fcb780f221880e8d725959c71bcfb4ae034738/opentelemetry_distro-0.62b1-py3-none-any.whl", hash = "sha256:fd938de6ca1d047ffd15a65fa09d89f4b4ca7dd97ef25601a12d6d10efd693a0", size = 3348, upload-time = "2026-04-24T13:21:27.389Z" },
] ]
[[package]] [[package]]
@ -4321,7 +4323,7 @@ wheels = [
[[package]] [[package]]
name = "opentelemetry-instrumentation" name = "opentelemetry-instrumentation"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
@ -4329,14 +4331,14 @@ dependencies = [
{ name = "packaging" }, { name = "packaging" },
{ name = "wrapt" }, { name = "wrapt" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/f9/fd/b8e90bb340957f059084376f94cff336b0e871a42feba7d3f7342365e987/opentelemetry_instrumentation-0.62b0.tar.gz", hash = "sha256:aa1b0b9ab2e1722c2a8a5384fb016fc28d30bba51826676c8036074790d2861e", size = 34042, upload-time = "2026-04-09T14:40:22.843Z" } sdist = { url = "https://files.pythonhosted.org/packages/52/cb/0523b92c112a6cc70be43724343dc45225d3af134419844d7879a07755d4/opentelemetry_instrumentation-0.62b1.tar.gz", hash = "sha256:90e92a905ba4f84db06ac3aec96701df6c079b2d66e9379f8739f0a1bdcc7f45", size = 34043, upload-time = "2026-04-24T13:22:31.997Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/00/b6/3356d2e335e3c449c5183e9b023f30f04f1b7073a6583c68745ea2e704b1/opentelemetry_instrumentation-0.62b0-py3-none-any.whl", hash = "sha256:30d4e76486eae64fb095264a70c2c809c4bed17b73373e53091470661f7d477c", size = 34158, upload-time = "2026-04-09T14:39:21.428Z" }, { url = "https://files.pythonhosted.org/packages/4d/0f/45adbaea1f81b847cffdcee4f4b5f89297e42facf7fac78c7aaac4c38e75/opentelemetry_instrumentation-0.62b1-py3-none-any.whl", hash = "sha256:976fc6e640f2006599e97429c949e622c108d0c17c2059347d1e6c93c707f257", size = 34163, upload-time = "2026-04-24T13:21:31.722Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-instrumentation-asgi" name = "opentelemetry-instrumentation-asgi"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "asgiref" }, { name = "asgiref" },
@ -4345,28 +4347,28 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" }, { name = "opentelemetry-util-http" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/f1/38/999bf777774878971c2716de4b7a03cd57a7decb4af25090e703b79fa0e5/opentelemetry_instrumentation_asgi-0.62b0.tar.gz", hash = "sha256:93cde8c62e5918a3c1ff9ba020518127300e5e0816b7e8b14baf46a26ba619fc", size = 26779, upload-time = "2026-04-09T14:40:26.566Z" } sdist = { url = "https://files.pythonhosted.org/packages/54/43/b2f0703ff46718ff7b17d7fbf8e9d7f20e26a23c7c325092dd762d09cf9d/opentelemetry_instrumentation_asgi-0.62b1.tar.gz", hash = "sha256:7cf5f5d5c493bbb1edd2bd6d51fa879d964e94048904017258a32ffa47329310", size = 26781, upload-time = "2026-04-24T13:22:37.158Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/25/cf/29df82f5870178143bdb5c9a7be044b9f78c71e1c5dcf995242e86d80158/opentelemetry_instrumentation_asgi-0.62b0-py3-none-any.whl", hash = "sha256:89b62a6f996b260b162f515c25e6d78e39286e4cbe2f935899e51b32f31027e2", size = 17011, upload-time = "2026-04-09T14:39:27.305Z" }, { url = "https://files.pythonhosted.org/packages/d0/41/968c1fe12fb90abffca6620e65d4af91451c02ecca8f74a17a62cac490de/opentelemetry_instrumentation_asgi-0.62b1-py3-none-any.whl", hash = "sha256:b7f89be48528512619bd54fa2459f72afb1695ba71d7024d382ad96d467e7fa8", size = 17011, upload-time = "2026-04-24T13:21:38.006Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-instrumentation-celery" name = "opentelemetry-instrumentation-celery"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
{ name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation" },
{ name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-semantic-conventions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/01/b4/20a3c8c669dc45aa3703c0370041d67e8be613f1829523cdaf634a5f9626/opentelemetry_instrumentation_celery-0.62b0.tar.gz", hash = "sha256:55e8fa48e5b886bcca448fa32e28a6cc2165157745e8328de479a826d3903095", size = 14808, upload-time = "2026-04-09T14:40:31.603Z" } sdist = { url = "https://files.pythonhosted.org/packages/35/86/9e78c174b2f6ea92af3f99aa7488807b74290a5cd44a8e05bfbfd7b109be/opentelemetry_instrumentation_celery-0.62b1.tar.gz", hash = "sha256:f0035abd464a2989414a9c5ecdd79a25c87bd8c43f96c7f39e07000c6f25dfef", size = 14809, upload-time = "2026-04-24T13:22:45.656Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/f6/60/cf951e6bd6ec62ec55bd2384e0ba9841ea38f2d128c773d85dc60da97172/opentelemetry_instrumentation_celery-0.62b0-py3-none-any.whl", hash = "sha256:cadfd3e65287a36099dce5ba7e05d98e4c5f9479a455241e01d140ecc5c10935", size = 13864, upload-time = "2026-04-09T14:39:35.009Z" }, { url = "https://files.pythonhosted.org/packages/24/51/f38a31ac8f8e3bd365f301f697661679addaf548d52a05cfdde4448a5493/opentelemetry_instrumentation_celery-0.62b1-py3-none-any.whl", hash = "sha256:50567a47b7adc4ea552d09709de4d73fea7b4ff24ab0e9d38739d03fcd3f95ef", size = 13864, upload-time = "2026-04-24T13:21:46.557Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-instrumentation-fastapi" name = "opentelemetry-instrumentation-fastapi"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
@ -4375,14 +4377,14 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" }, { name = "opentelemetry-util-http" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/37/09/92740c6d114d1bef392557a03ae6de64065c83c1b331dae9b57fe718497c/opentelemetry_instrumentation_fastapi-0.62b0.tar.gz", hash = "sha256:e4748e4e575077e08beaf2c5d2f369da63dd90882d89d73c4192a97356637dec", size = 25056, upload-time = "2026-04-09T14:40:36.438Z" } sdist = { url = "https://files.pythonhosted.org/packages/77/38/91780475a25370b6d483afbaed3e1e170459d6351c5f7c08d66b65e2172e/opentelemetry_instrumentation_fastapi-0.62b1.tar.gz", hash = "sha256:b377d4ba32868fb1ff0f64da3fcdd3aa154d698fc83d65f5d380ea21bf31ee19", size = 25054, upload-time = "2026-04-24T13:22:50.222Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/64/bb/186ffe0fde0ad33ceb50e1d3596cc849b732d3b825592a6a507a40c8c49b/opentelemetry_instrumentation_fastapi-0.62b0-py3-none-any.whl", hash = "sha256:06d3272ad15f9daea5a0a27c32831aff376110a4b0394197120256ef6d610e6e", size = 13482, upload-time = "2026-04-09T14:39:43.446Z" }, { url = "https://files.pythonhosted.org/packages/8c/6f/602e4081d3fe82731aff7e3e9c2f1662d85701841d6dc25f16a1874e11cd/opentelemetry_instrumentation_fastapi-0.62b1-py3-none-any.whl", hash = "sha256:93fa9cc4f315819aee5f4fceb6196c1e5b0fbd789c5520c631de228bd3e5285b", size = 13484, upload-time = "2026-04-24T13:21:54.538Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-instrumentation-flask" name = "opentelemetry-instrumentation-flask"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
@ -4392,14 +4394,14 @@ dependencies = [
{ name = "opentelemetry-util-http" }, { name = "opentelemetry-util-http" },
{ name = "packaging" }, { name = "packaging" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/8e/86/522294f6a80d59560d8f722da59513d2ed2d53c6178fa109789dacc5dd50/opentelemetry_instrumentation_flask-0.62b0.tar.gz", hash = "sha256:330e903c0e92b06aae32f9eb7b8a923599d7a29440f50841a59dbba34ec6dd9f", size = 24100, upload-time = "2026-04-09T14:40:37.111Z" } sdist = { url = "https://files.pythonhosted.org/packages/d3/08/e52e6eab550db1736c5657a7e38484c22a101009e77fc67eb00b272a96c1/opentelemetry_instrumentation_flask-0.62b1.tar.gz", hash = "sha256:37662ad159570dab1e3017a2a415193c014a5798fc32d33f3bdd254469e8c69a", size = 24100, upload-time = "2026-04-24T13:22:50.845Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/bc/c8/9f3bb38281bcb50c93c3d2358b303645f6917bf972c167484c09f9a97ff1/opentelemetry_instrumentation_flask-0.62b0-py3-none-any.whl", hash = "sha256:8c1f8986ec3887d08899d2eb654625252c929105174911b3b50dcf12b1001807", size = 16006, upload-time = "2026-04-09T14:39:44.401Z" }, { url = "https://files.pythonhosted.org/packages/2b/58/d0e5e82d225365987bd192576095b1125f6b172decc4db79963373c92b74/opentelemetry_instrumentation_flask-0.62b1-py3-none-any.whl", hash = "sha256:6df32684a7dd5dab5feb499c0748a4628b3fd139bffd8171326fb479aa525367", size = 16007, upload-time = "2026-04-24T13:21:55.462Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-instrumentation-httpx" name = "opentelemetry-instrumentation-httpx"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
@ -4408,14 +4410,14 @@ dependencies = [
{ name = "opentelemetry-util-http" }, { name = "opentelemetry-util-http" },
{ name = "wrapt" }, { name = "wrapt" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/77/a7/63e2c6325c8e99cd9b8e0229a8b61c37520ee537214a2c8d514e84486a94/opentelemetry_instrumentation_httpx-0.62b0.tar.gz", hash = "sha256:d865398db3f3c289ba226e355bf4d94460a4301c0c8916e3136caea55ae18000", size = 24182, upload-time = "2026-04-09T14:40:38.719Z" } sdist = { url = "https://files.pythonhosted.org/packages/33/cb/7a418e69c7dad281803529cb4f6de1b747d802cca44c38032668690b4836/opentelemetry_instrumentation_httpx-0.62b1.tar.gz", hash = "sha256:a1fac9bcc3a6ef5996a7990563f1af0798468b2c146de535fd598369383fba7e", size = 24181, upload-time = "2026-04-24T13:22:52.124Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/c0/5e/7d5fc28487637871b015128cd5dbb3c36f6d343a9098b893bd803d5a9cca/opentelemetry_instrumentation_httpx-0.62b0-py3-none-any.whl", hash = "sha256:c7660b939c12608fec67743126e9b4dc23dceef0ed631c415924966b0d1579e3", size = 17200, upload-time = "2026-04-09T14:39:46.618Z" }, { url = "https://files.pythonhosted.org/packages/c7/e0/eca824e9492ccec00e055bdd243aeda8eb7c5eda746d98af4d7a2d97ecf3/opentelemetry_instrumentation_httpx-0.62b1-py3-none-any.whl", hash = "sha256:88614015df451d61bc7e73f22524e6f223611f80b6caad2f6bdcbe05fa0df653", size = 17201, upload-time = "2026-04-24T13:21:58.072Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-instrumentation-redis" name = "opentelemetry-instrumentation-redis"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
@ -4423,14 +4425,14 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-semantic-conventions" },
{ name = "wrapt" }, { name = "wrapt" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/55/7d/5acdb4e4e36c522f9393cfa91f7a431ee089663c77855e524bc97f993020/opentelemetry_instrumentation_redis-0.62b0.tar.gz", hash = "sha256:513bc6679ee251436f0aff7be7ddab6186637dde09a795a8dc9659103f103bef", size = 14796, upload-time = "2026-04-09T14:40:48.391Z" } sdist = { url = "https://files.pythonhosted.org/packages/f5/ff/35414ad80409bd9e472c7959832524c5f2c8f63965af08c41c2b42d3a6a6/opentelemetry_instrumentation_redis-0.62b1.tar.gz", hash = "sha256:2d3c421d95e05ade075bee5becbe34e743b1cdf5bdee2085cb524f88c4f13dcb", size = 14796, upload-time = "2026-04-24T13:23:01.138Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/de/42/a13a7da074c972a51c14277e7f747e90037b9d815515c73b802e95897690/opentelemetry_instrumentation_redis-0.62b0-py3-none-any.whl", hash = "sha256:92ada3d7bdf395785f660549b0e6e8e5bac7cab80e7f1369a7d02228b27684c3", size = 15501, upload-time = "2026-04-09T14:40:00.69Z" }, { url = "https://files.pythonhosted.org/packages/31/37/bc2271f3472e3041eeade8b8da1cfd3b06badae76fe5d0ff135b6285e70c/opentelemetry_instrumentation_redis-0.62b1-py3-none-any.whl", hash = "sha256:9aedd02c1acf631251d1d676634db47da9da04e0a626cd0c7d83fe0eb791d165", size = 15501, upload-time = "2026-04-24T13:22:11.705Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-instrumentation-sqlalchemy" name = "opentelemetry-instrumentation-sqlalchemy"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
@ -4439,14 +4441,14 @@ dependencies = [
{ name = "packaging" }, { name = "packaging" },
{ name = "wrapt" }, { name = "wrapt" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/2a/3d/40adc8c38e5be017ceb230a28ca57ca81981d4dc0c4b902cc930c77fd14f/opentelemetry_instrumentation_sqlalchemy-0.62b0.tar.gz", hash = "sha256:d02f85b83f349e9ef70a34cb3f4c3a3481fa15b11747f09209818663e161cac4", size = 18539, upload-time = "2026-04-09T14:40:50.251Z" } sdist = { url = "https://files.pythonhosted.org/packages/1a/53/fa511ab998dd66b4eb66a36d8c262d0604cc5bad7a9c82e923be038dda97/opentelemetry_instrumentation_sqlalchemy-0.62b1.tar.gz", hash = "sha256:bdeac015351a1de057e8ea39f1fe26c9e60ea6bedbf1d5ad6a8262a516b3dc7d", size = 18539, upload-time = "2026-04-24T13:23:03.169Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/e0/77954ac593f34740dc32e28a15fe7170e90f6ba6398eaaa5c88b34c05ed1/opentelemetry_instrumentation_sqlalchemy-0.62b0-py3-none-any.whl", hash = "sha256:ec576e0660080d9d15ce4fa44d2a07fff8cb4b796a84344cb0f2c9e5d6e26f79", size = 15534, upload-time = "2026-04-09T14:40:03.957Z" }, { url = "https://files.pythonhosted.org/packages/2d/c5/aa2abcf8752a435536901636c5d540ba7a2c0ba2c4e98c7d119482e04262/opentelemetry_instrumentation_sqlalchemy-0.62b1-py3-none-any.whl", hash = "sha256:613542ecd52aabeec83d8813b5c287a3fb6c9ac3cd660694c94c0571f066e972", size = 15536, upload-time = "2026-04-24T13:22:14.767Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-instrumentation-wsgi" name = "opentelemetry-instrumentation-wsgi"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
@ -4454,22 +4456,22 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" }, { name = "opentelemetry-util-http" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/b7/5c/ed45ff053d76c94c59173f2bcde3d61052adb10214f70f028f760aa56625/opentelemetry_instrumentation_wsgi-0.62b0.tar.gz", hash = "sha256:d179f969ecce0c29a15ffd4d982580dfae57c8ff2fd4d9366e299a6d4815e668", size = 19922, upload-time = "2026-04-09T14:40:56.227Z" } sdist = { url = "https://files.pythonhosted.org/packages/36/db/19f1d66cead56e52291fccaa235b07ad45a5c24be1c740301a840c68235a/opentelemetry_instrumentation_wsgi-0.62b1.tar.gz", hash = "sha256:02a364fd9c940a46b19c825c5bfe386b007d5292ef91573894164836953fe831", size = 19919, upload-time = "2026-04-24T13:23:09.796Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/f6/cb/753dbbe624df88594fa35a3ff26302fea22623385ed64462f6c8ee7c81eb/opentelemetry_instrumentation_wsgi-0.62b0-py3-none-any.whl", hash = "sha256:2714ab5ab2f35e67dc181ffa3a43fa15313c85c09b4d024c36d72cf1efa29c9a", size = 14628, upload-time = "2026-04-09T14:40:13.529Z" }, { url = "https://files.pythonhosted.org/packages/f7/0e/60fec0780e16929c821df7c55c4f0bea45d6ef562e662c5f27f47d0ff195/opentelemetry_instrumentation_wsgi-0.62b1-py3-none-any.whl", hash = "sha256:a2df11de0113f504043e2b0fa0288238a93ee49ff607bd5100cb2d3a75bc771f", size = 14629, upload-time = "2026-04-24T13:22:23.951Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-propagator-b3" name = "opentelemetry-propagator-b3"
version = "1.41.0" version = "1.41.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ef/43/cea77e171c014324876104cf2a17c78f5e931408b977b9e64979f950912c/opentelemetry_propagator_b3-1.41.0.tar.gz", hash = "sha256:ef98b715b3a05e8b0b03ebaea1bf295b4ad61a0e306e2d1da81d32af7395e6ad", size = 9588, upload-time = "2026-04-09T14:38:43.328Z" } sdist = { url = "https://files.pythonhosted.org/packages/8a/ef/e2c1093e21fb9b5f8e44fa6cebacf2cbb60b47b4646d652805dcce48f3b8/opentelemetry_propagator_b3-1.41.1.tar.gz", hash = "sha256:e8563b588aa5f1f90740dcd678f04d5634de2d4e0077b7ca4a177c71a02f745d", size = 9587, upload-time = "2026-04-24T13:15:48.349Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/50/c1/11345c06774ec6ed6d89e3994dd1f62ad2ab41dfeb312eacd6b2a2323280/opentelemetry_propagator_b3-1.41.0-py3-none-any.whl", hash = "sha256:0b085c26ba59fcb66771226f967e91886bdeef998b3b5f2e9da6a604918c6f90", size = 8923, upload-time = "2026-04-09T14:38:26.865Z" }, { url = "https://files.pythonhosted.org/packages/c8/78/388ea1ae84fd3d2858c782f0410d73d936ffbd1a54711e45874490c576e7/opentelemetry_propagator_b3-1.41.1-py3-none-any.whl", hash = "sha256:f4b045d0aa4b5c17ac25a371bf3d08173a2f4b8f19a94357e57ae690c15415dc", size = 8921, upload-time = "2026-04-24T13:15:30.408Z" },
] ]
[[package]] [[package]]
@ -4486,38 +4488,38 @@ wheels = [
[[package]] [[package]]
name = "opentelemetry-sdk" name = "opentelemetry-sdk"
version = "1.41.0" version = "1.41.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
{ name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-semantic-conventions" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/f8/0e/a586df1186f9f56b5a0879d52653effc40357b8e88fc50fe300038c3c08b/opentelemetry_sdk-1.41.0.tar.gz", hash = "sha256:7bddf3961131b318fc2d158947971a8e37e38b1cd23470cfb72b624e7cc108bd", size = 230181, upload-time = "2026-04-09T14:38:47.225Z" } sdist = { url = "https://files.pythonhosted.org/packages/58/d0/54ee30dab82fb0acda23d144502771ff76ef8728459c83c3e89ef9fb1825/opentelemetry_sdk-1.41.1.tar.gz", hash = "sha256:724b615e1215b5aeacda0abb8a6a8922c9a1853068948bd0bd225a56d0c792e6", size = 230180, upload-time = "2026-04-24T13:15:50.991Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/13/a7825118208cb32e6a4edcd0a99f925cbef81e77b3b0aedfd9125583c543/opentelemetry_sdk-1.41.0-py3-none-any.whl", hash = "sha256:a596f5687964a3e0d7f8edfdcf5b79cbca9c93c7025ebf5fb00f398a9443b0bd", size = 180214, upload-time = "2026-04-09T14:38:30.657Z" }, { url = "https://files.pythonhosted.org/packages/b4/e7/a1420b698aad018e1cf60fdbaaccbe49021fb415e2a0d81c242f4c518f54/opentelemetry_sdk-1.41.1-py3-none-any.whl", hash = "sha256:edee379c126c1bce952b0c812b48fe8ff35b30df0eecf17e98afa4d598b7d85d", size = 180213, upload-time = "2026-04-24T13:15:33.767Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-semantic-conventions" name = "opentelemetry-semantic-conventions"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "opentelemetry-api" }, { name = "opentelemetry-api" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a3/b0/c14f723e86c049b7bf8ff431160d982519b97a7be2857ed2247377397a24/opentelemetry_semantic_conventions-0.62b0.tar.gz", hash = "sha256:cbfb3c8fc259575cf68a6e1b94083cc35adc4a6b06e8cf431efa0d62606c0097", size = 145753, upload-time = "2026-04-09T14:38:48.274Z" } sdist = { url = "https://files.pythonhosted.org/packages/9e/de/911ac9e309052aca1b20b2d5549d3db45d1011e1a610e552c6ccdd1b64f8/opentelemetry_semantic_conventions-0.62b1.tar.gz", hash = "sha256:c5cc6e04a7f8c7cdd30be2ed81499fa4e75bfbd52c9cb70d40af1f9cd3619802", size = 145750, upload-time = "2026-04-24T13:15:52.236Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/58/6c/5e86fa1759a525ef91c2d8b79d668574760ff3f900d114297765eb8786cb/opentelemetry_semantic_conventions-0.62b0-py3-none-any.whl", hash = "sha256:0ddac1ce59eaf1a827d9987ab60d9315fb27aea23304144242d1fcad9e16b489", size = 231619, upload-time = "2026-04-09T14:38:32.394Z" }, { url = "https://files.pythonhosted.org/packages/eb/a6/83dc2ab6fa397ee66fba04fe2e74bdf7be3b3870005359ceb7689103c058/opentelemetry_semantic_conventions-0.62b1-py3-none-any.whl", hash = "sha256:cf506938103d331fbb78eded0d9788095f7fd59016f2bda813c3324e5a74a93c", size = 231620, upload-time = "2026-04-24T13:15:35.454Z" },
] ]
[[package]] [[package]]
name = "opentelemetry-util-http" name = "opentelemetry-util-http"
version = "0.62b0" version = "0.62b1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/830f7c57135158eb8a8efd3f94ab191a89e3b8a49bed314a35ee501da3f2/opentelemetry_util_http-0.62b0.tar.gz", hash = "sha256:a62e4b19b8a432c0de657f167dee3455516136bb9c6ed463ca8063019970d835", size = 11393, upload-time = "2026-04-09T14:40:59.442Z" } sdist = { url = "https://files.pythonhosted.org/packages/24/1b/aa71b63e18d30a8384036b9937f40f7618f8030a7aa213155fb54f6f2b47/opentelemetry_util_http-0.62b1.tar.gz", hash = "sha256:adf6facbb89aef8f8bc566e2f04624942ba08a7b678b3479a91051a8f4dc70a3", size = 11393, upload-time = "2026-04-24T13:23:12.994Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/3d/7f/5c1b7d4385852b9e5eacd4e7f9d8b565d3d351d17463b24916ad098adf1a/opentelemetry_util_http-0.62b0-py3-none-any.whl", hash = "sha256:c20462808d8cc95b69b0dc4a3e02a9d36beb663347e96c931f51ffd78bd318ad", size = 9294, upload-time = "2026-04-09T14:40:19.014Z" }, { url = "https://files.pythonhosted.org/packages/5d/85/a9d9d32161c1ced61346267db4c9702da54f81ec5dc88214bc65c23f4e9d/opentelemetry_util_http-0.62b1-py3-none-any.whl", hash = "sha256:c57e8a6c19fc422c288e6074e882f506f85030b69b7376182f74f9257b9261f0", size = 9295, upload-time = "2026-04-24T13:22:28.078Z" },
] ]
[[package]] [[package]]
@ -4808,7 +4810,7 @@ wheels = [
[[package]] [[package]]
name = "postgrest" name = "postgrest"
version = "2.28.3" version = "2.29.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "deprecation" }, { name = "deprecation" },
@ -4816,9 +4818,9 @@ dependencies = [
{ name = "pydantic" }, { name = "pydantic" },
{ name = "yarl" }, { name = "yarl" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/40/60/9378ddd6e21b6005b34aeb42dc7a9ed9985c673c97c9b6a1858f9c52ebbd/postgrest-2.28.3.tar.gz", hash = "sha256:56336e9304950a78315ec7d6c8eb307cdb964d0878a7bec6111392ddb6c16a45", size = 13758, upload-time = "2026-03-20T14:38:06.542Z" } sdist = { url = "https://files.pythonhosted.org/packages/52/98/f216b8b5c4d116ab6a2fb21339b5821da279ee773e163612418e1c56c012/postgrest-2.29.0.tar.gz", hash = "sha256:a87081858f627fcd57e8e7137004a1ef0adbdf0dbdfed1384e9ea1d7a9c525ec", size = 14217, upload-time = "2026-04-24T13:13:00.281Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/7f/5e/6eeb1d53d010d80e800204c1eee6b3d5419a6a2b985c364f56f36cf48cca/postgrest-2.28.3-py3-none-any.whl", hash = "sha256:5a44d6c6d509abdbe0f928c86f0dc31ef26bda36e0357129836ec54dfb50b083", size = 21865, upload-time = "2026-03-20T14:38:05.55Z" }, { url = "https://files.pythonhosted.org/packages/2c/0b/08b670a93a90d625c557b9e64b8a5fdeec80c3542d2d0265f0b4d6b16646/postgrest-2.29.0-py3-none-any.whl", hash = "sha256:3ee48e146f726272733d20e2b12de354cdb6cb9dd9cc3a61ed97ce69047aeb96", size = 22735, upload-time = "2026-04-24T13:12:58.405Z" },
] ]
[[package]] [[package]]
@ -4980,21 +4982,21 @@ wheels = [
[[package]] [[package]]
name = "psycopg2-binary" name = "psycopg2-binary"
version = "2.9.11" version = "2.9.12"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } sdist = { url = "https://files.pythonhosted.org/packages/2a/60/a3624f79acea344c16fbef3a94d28b89a8042ddfb8f3e4ca83f538671409/psycopg2_binary-2.9.12.tar.gz", hash = "sha256:5ac9444edc768c02a6b6a591f070b8aae28ff3a99be57560ac996001580f294c", size = 379686, upload-time = "2026-04-21T09:40:34.304Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, { url = "https://files.pythonhosted.org/packages/e2/9f/ef4ef3c8e15083df90ca35265cfd1a081a2f0cc07bb229c6314c6af817f4/psycopg2_binary-2.9.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5cdc05117180c5fa9c40eea8ea559ce64d73824c39d928b7da9fb5f6a9392433", size = 3712459, upload-time = "2026-04-20T23:34:30.549Z" },
{ url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, { url = "https://files.pythonhosted.org/packages/b5/01/3dd14e46ba48c1e1a6ec58ee599fa1b5efa00c246d5046cd903d0eeb1af1/psycopg2_binary-2.9.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d3227a3bc228c10d21011a99245edca923e4e8bf461857e869a507d9a41fe9f6", size = 3822936, upload-time = "2026-04-20T23:34:32.77Z" },
{ url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, { url = "https://files.pythonhosted.org/packages/a6/f7/0640e4901119d8a9f7a1784b927f494e2198e213ceb593753d1f2c8b1b30/psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:995ce929eede89db6254b50827e2b7fd61e50d11f0b116b29fffe4a2e53c4580", size = 4578676, upload-time = "2026-04-20T23:34:35.18Z" },
{ url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, { url = "https://files.pythonhosted.org/packages/b0/55/44df3965b5f297c50cc0b1b594a31c67d6127a9d133045b8a66611b14dfb/psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9fe06d93e72f1c048e731a2e3e7854a5bfaa58fc736068df90b352cefe66f03f", size = 4274917, upload-time = "2026-04-20T23:34:37.982Z" },
{ url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, { url = "https://files.pythonhosted.org/packages/b0/4b/74535248b1eac0c9336862e8617c765ac94dac76f9e25d7c4a79588c8907/psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40e7b28b63aaf737cb3a1edc3a9bbc9a9f4ad3dcb7152e8c1130e4050eddcb7d", size = 5894843, upload-time = "2026-04-20T23:34:40.856Z" },
{ url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, { url = "https://files.pythonhosted.org/packages/f2/ba/f1bf8d2ae71868ad800b661099086ee52bc0f8d9f05be1acd8ebb06757cc/psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:89d19a9f7899e8eb0656a2b3a08e0da04c720a06db6e0033eab5928aabe60fa9", size = 4110556, upload-time = "2026-04-20T23:34:44.016Z" },
{ url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, { url = "https://files.pythonhosted.org/packages/45/46/c15706c338403b7c420bcc0c2905aad116cc064545686d8bf85f1999ea00/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:612b965daee295ae2da8f8218ce1d274645dc76ef3f1abf6a0a94fd57eff876d", size = 3655714, upload-time = "2026-04-20T23:34:46.233Z" },
{ url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, { url = "https://files.pythonhosted.org/packages/b3/7c/a2d5dc09b64a4564db242a0fe418fde7d33f6f8259dd2c5b9d7def00fb5a/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b9a339b79d37c1b45f3235265f07cdeb0cb5ad7acd2ac7720a5920989c17c24e", size = 3301154, upload-time = "2026-04-20T23:34:49.528Z" },
{ url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, { url = "https://files.pythonhosted.org/packages/c0/e8/cc8c9a4ce71461f9ec548d38cadc41dc184b34c73e6455450775a9334ccd/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3471336e1acfd9c7fe507b8bad5af9317b6a89294f9eb37bd9a030bb7bebcdc6", size = 3048882, upload-time = "2026-04-20T23:34:51.86Z" },
{ url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, { url = "https://files.pythonhosted.org/packages/19/6a/31e2296bc0787c5ab75d3d118e40b239db8151b5192b90b77c72bc9256e9/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7af18183109e23502c8b2ae7f6926c0882766f35b5175a4cd737ad825e4d7a1b", size = 3351298, upload-time = "2026-04-20T23:34:54.124Z" },
{ url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, { url = "https://files.pythonhosted.org/packages/5f/a8/75f4e3e11203b590150abed2cf7794b9c9c9f7eceddae955191138b44dde/psycopg2_binary-2.9.12-cp312-cp312-win_amd64.whl", hash = "sha256:398fcd4db988c7d7d3713e2b8e18939776fd3fb447052daae4f24fa39daede4c", size = 2757230, upload-time = "2026-04-20T23:34:56.242Z" },
] ]
[[package]] [[package]]
@ -5721,16 +5723,16 @@ wheels = [
[[package]] [[package]]
name = "realtime" name = "realtime"
version = "2.28.3" version = "2.29.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "pydantic" }, { name = "pydantic" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
{ name = "websockets" }, { name = "websockets" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/9c/3d/ef6ed9221f98766f3a503e6e3ac68fa7ca25c117b383f1efc448294232ac/realtime-2.28.3.tar.gz", hash = "sha256:5cc83a6217874426799d8bf74e96d904ac6fa77c39fa8982fa99287947eb2cbf", size = 18723, upload-time = "2026-03-20T14:38:08.424Z" } sdist = { url = "https://files.pythonhosted.org/packages/6e/f1/08c42a42653942fadfbef495d5b0239356140e7186cc528704956c5f06d4/realtime-2.29.0.tar.gz", hash = "sha256:8efe4a1b3a548a5fda09de701bd041fa0970c5a2fe7d13db0b9861ce11828be2", size = 18715, upload-time = "2026-04-24T13:13:02.315Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/d5/659405f9d4c9b022b7ac02bd52986ccc081f211db081051440f46bf4f358/realtime-2.28.3-py3-none-any.whl", hash = "sha256:efe484d6d39024c7e00ef70f70be600142e9407e5d802de8c96e86e014ce3b36", size = 22378, upload-time = "2026-03-20T14:38:07.144Z" }, { url = "https://files.pythonhosted.org/packages/77/48/f6375c0a24923beb988f0c71c052604c96641cf43c2d22b91ec1df86afa0/realtime-2.29.0-py3-none-any.whl", hash = "sha256:1a4891e6c82e88ac9d96ac715e435e086f6f8c7665212a8717346de829cbb509", size = 22374, upload-time = "2026-04-24T13:13:01.103Z" },
] ]
[[package]] [[package]]
@ -5887,27 +5889,27 @@ wheels = [
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.15.11" version = "0.15.12"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e4/8d/192f3d7103816158dfd5ea50d098ef2aec19194e6cbccd4b3485bdb2eb2d/ruff-0.15.11.tar.gz", hash = "sha256:f092b21708bf0e7437ce9ada249dfe688ff9a0954fc94abab05dcea7dcd29c33", size = 4637264, upload-time = "2026-04-16T18:46:26.58Z" } sdist = { url = "https://files.pythonhosted.org/packages/99/43/3291f1cc9106f4c63bdce7a8d0df5047fe8422a75b091c16b5e9355e0b11/ruff-0.15.12.tar.gz", hash = "sha256:ecea26adb26b4232c0c2ca19ccbc0083a68344180bba2a600605538ce51a40a6", size = 4643852, upload-time = "2026-04-24T18:17:14.305Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/02/1e/6aca3427f751295ab011828e15e9bf452200ac74484f1db4be0197b8170b/ruff-0.15.11-py3-none-linux_armv6l.whl", hash = "sha256:e927cfff503135c558eb581a0c9792264aae9507904eb27809cdcff2f2c847b7", size = 10607943, upload-time = "2026-04-16T18:46:05.967Z" }, { url = "https://files.pythonhosted.org/packages/c3/6e/e78ffb61d4686f3d96ba3df2c801161843746dcbcbb17a1e927d4829312b/ruff-0.15.12-py3-none-linux_armv6l.whl", hash = "sha256:f86f176e188e94d6bdbc09f09bfd9dc729059ad93d0e7390b5a73efe19f8861c", size = 10640713, upload-time = "2026-04-24T18:17:22.841Z" },
{ url = "https://files.pythonhosted.org/packages/e7/26/1341c262e74f36d4e84f3d6f4df0ac68cd53331a66bfc5080daa17c84c0b/ruff-0.15.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7a1b5b2938d8f890b76084d4fa843604d787a912541eae85fd7e233398bbb73e", size = 10988592, upload-time = "2026-04-16T18:46:00.742Z" }, { url = "https://files.pythonhosted.org/packages/ae/08/a317bc231fb9e7b93e4ef3089501e51922ff88d6936ce5cf870c4fe55419/ruff-0.15.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3bcd123364c3770b8e1b7baaf343cc99a35f197c5c6e8af79015c666c423a6c", size = 11069267, upload-time = "2026-04-24T18:17:30.105Z" },
{ url = "https://files.pythonhosted.org/packages/03/71/850b1d6ffa9564fbb6740429bad53df1094082fe515c8c1e74b6d8d05f18/ruff-0.15.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d4176f3d194afbdaee6e41b9ccb1a2c287dba8700047df474abfbe773825d1cb", size = 10338501, upload-time = "2026-04-16T18:46:03.723Z" }, { url = "https://files.pythonhosted.org/packages/aa/a4/f828e9718d3dce1f5f11c39c4f65afd32783c8b2aebb2e3d259e492c47bd/ruff-0.15.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fe87510d000220aa1ed530d4448a7c696a0cae1213e5ec30e5874287b66557b5", size = 10397182, upload-time = "2026-04-24T18:17:07.177Z" },
{ url = "https://files.pythonhosted.org/packages/f2/11/cc1284d3e298c45a817a6aadb6c3e1d70b45c9b36d8d9cce3387b495a03a/ruff-0.15.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b17c886fb88203ced3afe7f14e8d5ae96e9d2f4ccc0ee66aa19f2c2675a27e4", size = 10670693, upload-time = "2026-04-16T18:46:41.941Z" }, { url = "https://files.pythonhosted.org/packages/71/e0/3310fc6d1b5e1fdea22bf3b1b807c7e187b581021b0d7d4514cccdb5fb71/ruff-0.15.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84a1630093121375a3e2a95b4a6dc7b59e2b4ee76216e32d81aae550a832d002", size = 10758012, upload-time = "2026-04-24T18:16:55.759Z" },
{ url = "https://files.pythonhosted.org/packages/ce/9e/f8288b034ab72b371513c13f9a41d9ba3effac54e24bfb467b007daee2ca/ruff-0.15.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:49fafa220220afe7758a487b048de4c8f9f767f37dfefad46b9dd06759d003eb", size = 10416177, upload-time = "2026-04-16T18:46:21.717Z" }, { url = "https://files.pythonhosted.org/packages/11/c1/a606911aee04c324ddaa883ae418f3569792fd3c4a10c50e0dd0a2311e1e/ruff-0.15.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb129f40f114f089ebe0ca56c0d251cf2061b17651d464bb6478dc01e69f11f5", size = 10447479, upload-time = "2026-04-24T18:16:51.677Z" },
{ url = "https://files.pythonhosted.org/packages/85/71/504d79abfd3d92532ba6bbe3d1c19fada03e494332a59e37c7c2dabae427/ruff-0.15.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2ab8427e74a00d93b8bda1307b1e60970d40f304af38bccb218e056c220120d", size = 11221886, upload-time = "2026-04-16T18:46:15.086Z" }, { url = "https://files.pythonhosted.org/packages/9d/68/4201e8444f0894f21ab4aeeaee68aa4f10b51613514a20d80bd628d57e88/ruff-0.15.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0c862b172d695db7598426b8af465e7e9ac00a3ea2a3630ee67eb82e366aaa6", size = 11234040, upload-time = "2026-04-24T18:17:16.529Z" },
{ url = "https://files.pythonhosted.org/packages/43/5a/947e6ab7a5ad603d65b474be15a4cbc6d29832db5d762cd142e4e3a74164/ruff-0.15.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:195072c0c8e1fc8f940652073df082e37a5d9cb43b4ab1e4d0566ab8977a13b7", size = 12075183, upload-time = "2026-04-16T18:46:07.944Z" }, { url = "https://files.pythonhosted.org/packages/34/ff/8a6d6cf4ccc23fd67060874e832c18919d1557a0611ebef03fdb01fff11e/ruff-0.15.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2849ea9f3484c3aca43a82f484210370319e7170df4dfe4843395ddf6c57bc33", size = 12087377, upload-time = "2026-04-24T18:17:04.944Z" },
{ url = "https://files.pythonhosted.org/packages/9f/a1/0b7bb6268775fdd3a0818aee8efd8f5b4e231d24dd4d528ced2534023182/ruff-0.15.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3a0996d486af3920dec930a2e7daed4847dfc12649b537a9335585ada163e9e", size = 11516575, upload-time = "2026-04-16T18:46:31.687Z" }, { url = "https://files.pythonhosted.org/packages/85/f6/c669cf73f5152f623d34e69866a46d5e6185816b19fcd5b6dd8a2d299922/ruff-0.15.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e77c7e51c07fe396826d5969a5b846d9cd4c402535835fb6e21ce8b28fef847", size = 11367784, upload-time = "2026-04-24T18:17:25.409Z" },
{ url = "https://files.pythonhosted.org/packages/30/c3/bb5168fc4d233cc06e95f482770d0f3c87945a0cd9f614b90ea8dc2f2833/ruff-0.15.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bef2cb556d509259f1fe440bb9cd33c756222cf0a7afe90d15edf0866702431", size = 11306537, upload-time = "2026-04-16T18:46:36.988Z" }, { url = "https://files.pythonhosted.org/packages/e8/39/c61d193b8a1daaa8977f7dea9e8d8ba866e02ea7b65d32f6861693aa4c12/ruff-0.15.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b2f4f2f3b1026b5fb449b467d9264bf22067b600f7b6f41fc5958909f449d0", size = 11344088, upload-time = "2026-04-24T18:17:12.258Z" },
{ url = "https://files.pythonhosted.org/packages/e4/92/4cfae6441f3967317946f3b788136eecf093729b94d6561f963ed810c82e/ruff-0.15.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:030d921a836d7d4a12cf6e8d984a88b66094ccb0e0f17ddd55067c331191bf19", size = 11296813, upload-time = "2026-04-16T18:46:24.182Z" }, { url = "https://files.pythonhosted.org/packages/c2/8d/49afab3645e31e12c590acb6d3b5b69d7aab5b81926dbaf7461f9441f37a/ruff-0.15.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9ba3b8f1afd7e2e43d8943e55f249e13f9682fde09711644a6e7290eb4f3e339", size = 11271770, upload-time = "2026-04-24T18:17:02.457Z" },
{ url = "https://files.pythonhosted.org/packages/43/26/972784c5dde8313acde8ac71ba8ac65475b85db4a2352a76c9934361f9bc/ruff-0.15.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0e783b599b4577788dbbb66b9addcef87e9a8832f4ce0c19e34bf55543a2f890", size = 10633136, upload-time = "2026-04-16T18:46:39.802Z" }, { url = "https://files.pythonhosted.org/packages/46/06/33f41fe94403e2b755481cdfb9b7ef3e4e0ed031c4581124658d935d52b4/ruff-0.15.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e852ba9fdc890655e1d78f2df1499efbe0e54126bd405362154a75e2bde159c5", size = 10719355, upload-time = "2026-04-24T18:17:27.648Z" },
{ url = "https://files.pythonhosted.org/packages/5b/53/3985a4f185020c2f367f2e08a103032e12564829742a1b417980ce1514a0/ruff-0.15.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ae90592246625ba4a34349d68ec28d4400d75182b71baa196ddb9f82db025ef5", size = 10424701, upload-time = "2026-04-16T18:46:10.381Z" }, { url = "https://files.pythonhosted.org/packages/0d/59/18aa4e014debbf559670e4048e39260a85c7fcee84acfd761ac01e7b8d35/ruff-0.15.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dd8aed930da53780d22fc70bdf84452c843cf64f8cb4eb38984319c24c5cd5fd", size = 10462758, upload-time = "2026-04-24T18:17:32.347Z" },
{ url = "https://files.pythonhosted.org/packages/d3/57/bf0dfb32241b56c83bb663a826133da4bf17f682ba8c096973065f6e6a68/ruff-0.15.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1f111d62e3c983ed20e0ca2e800f8d77433a5b1161947df99a5c2a3fb60514f0", size = 10873887, upload-time = "2026-04-16T18:46:29.157Z" }, { url = "https://files.pythonhosted.org/packages/25/e7/cc9f16fd0f3b5fddcbd7ec3d6ae30c8f3fde1047f32a4093a98d633c6570/ruff-0.15.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01da3988d225628b709493d7dc67c3b9b12c0210016b08690ef9bd27970b262b", size = 10953498, upload-time = "2026-04-24T18:17:20.674Z" },
{ url = "https://files.pythonhosted.org/packages/02/05/e48076b2a57dc33ee8c7a957296f97c744ca891a8ffb4ffb1aaa3b3f517d/ruff-0.15.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:06f483d6646f59eaffba9ae30956370d3a886625f511a3108994000480621d1c", size = 11404316, upload-time = "2026-04-16T18:46:19.462Z" }, { url = "https://files.pythonhosted.org/packages/72/7a/a9ba7f98c7a575978698f4230c5e8cc54bbc761af34f560818f933dafa0c/ruff-0.15.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9cae0f92bd5700d1213188b31cd3bdd2b315361296d10b96b8e2337d3d11f53e", size = 11447765, upload-time = "2026-04-24T18:17:09.755Z" },
{ url = "https://files.pythonhosted.org/packages/88/27/0195d15fe7a897cbcba0904792c4b7c9fdd958456c3a17d2ea6093716a9a/ruff-0.15.11-py3-none-win32.whl", hash = "sha256:476a2aa56b7da0b73a3ee80b6b2f0e19cce544245479adde7baa65466664d5f3", size = 10655535, upload-time = "2026-04-16T18:46:12.47Z" }, { url = "https://files.pythonhosted.org/packages/ea/f9/0ae446942c846b8266059ad8a30702a35afae55f5cdc54c5adf8d7afdc27/ruff-0.15.12-py3-none-win32.whl", hash = "sha256:d0185894e038d7043ba8fd6aee7499ece6462dc0ea9f1e260c7451807c714c20", size = 10657277, upload-time = "2026-04-24T18:17:18.591Z" },
{ url = "https://files.pythonhosted.org/packages/3a/5e/c927b325bd4c1d3620211a4b96f47864633199feed60fa936025ab27e090/ruff-0.15.11-py3-none-win_amd64.whl", hash = "sha256:8b6756d88d7e234fb0c98c91511aae3cd519d5e3ed271cae31b20f39cb2a12a3", size = 11779692, upload-time = "2026-04-16T18:46:17.268Z" }, { url = "https://files.pythonhosted.org/packages/33/f1/9614e03e1cdcbf9437570b5400ced8a720b5db22b28d8e0f1bda429f660d/ruff-0.15.12-py3-none-win_amd64.whl", hash = "sha256:c87a162d61ab3adca47c03f7f717c68672edec7d1b5499e652331780fe74950d", size = 11837758, upload-time = "2026-04-24T18:17:00.113Z" },
{ url = "https://files.pythonhosted.org/packages/63/b6/aeadee5443e49baa2facd51131159fd6301cc4ccfc1541e4df7b021c37dd/ruff-0.15.11-py3-none-win_arm64.whl", hash = "sha256:063fed18cc1bbe0ee7393957284a6fe8b588c6a406a285af3ee3f46da2391ee4", size = 11032614, upload-time = "2026-04-16T18:46:34.487Z" }, { url = "https://files.pythonhosted.org/packages/c0/98/6beb4b351e472e5f4c4613f7c35a5290b8be2497e183825310c4c3a3984b/ruff-0.15.12-py3-none-win_arm64.whl", hash = "sha256:a538f7a82d061cee7be55542aca1d86d1393d55d81d4fcc314370f4340930d4f", size = 11120821, upload-time = "2026-04-24T18:16:57.979Z" },
] ]
[[package]] [[package]]
@ -6212,7 +6214,7 @@ wheels = [
[[package]] [[package]]
name = "storage3" name = "storage3"
version = "2.28.3" version = "2.29.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "deprecation" }, { name = "deprecation" },
@ -6221,9 +6223,9 @@ dependencies = [
{ name = "pyiceberg" }, { name = "pyiceberg" },
{ name = "yarl" }, { name = "yarl" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/eb/b5/18df59ba92951d74774eb0265072bf236ead5e3cbc4b802d8bf1cf3581a0/storage3-2.28.3.tar.gz", hash = "sha256:2b3f843cbd44c4a3b483ec076a12c27de88c0ad5358a43067ed44ef08292353f", size = 20109, upload-time = "2026-03-20T14:38:11.467Z" } sdist = { url = "https://files.pythonhosted.org/packages/d7/be/771246434b5caf3c6187bfdc932eaede00bf5f2937b47475ab25209ede3e/storage3-2.29.0.tar.gz", hash = "sha256:b0cc2f6714655d725c998d2c5ae8c6fb4f56a513bd31e4f85770df557fe021e3", size = 20160, upload-time = "2026-04-24T13:13:04.626Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/ad/a5/2dbe216954e026a8c2e2dc7dfa5fd7b1a1ae0824d10972e62462f4f15aca/storage3-2.28.3-py3-none-any.whl", hash = "sha256:bac35c5087619174448fdef6a337db4e3dfebf3de69f685bd706de93ddcdad69", size = 28239, upload-time = "2026-03-20T14:38:10.423Z" }, { url = "https://files.pythonhosted.org/packages/fc/c3/790c31866f52c13b26f108b45759bf50dafae3a0bafb4511fadc98ba7c33/storage3-2.29.0-py3-none-any.whl", hash = "sha256:043ef7ff27cc8b9da12be403cf78ee4586180edfcf62b227ff61e1bd79594b06", size = 28284, upload-time = "2026-04-24T13:13:03.338Z" },
] ]
[[package]] [[package]]
@ -6249,7 +6251,7 @@ wheels = [
[[package]] [[package]]
name = "supabase" name = "supabase"
version = "2.28.3" version = "2.29.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "httpx" }, { name = "httpx" },
@ -6260,37 +6262,37 @@ dependencies = [
{ name = "supabase-functions" }, { name = "supabase-functions" },
{ name = "yarl" }, { name = "yarl" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/5a/98/2f1c95a2269ce995a34f275760b1c2ee71ee7a75649238ca0470afdfc2ef/supabase-2.28.3.tar.gz", hash = "sha256:1200961e46cdec17c7c280a1e09a159544643eada2759591ea69835303a2e1a4", size = 9687, upload-time = "2026-03-20T14:38:13.272Z" } sdist = { url = "https://files.pythonhosted.org/packages/51/a0/2407d616fdf68e8632bbbfb063d1685c38377ac0199e8ca11deaea1f3bf0/supabase-2.29.0.tar.gz", hash = "sha256:a88c4a4eb50fbb903e2e962fbc7c27733b00589140139f9e837bc9fe30dd3615", size = 9689, upload-time = "2026-04-24T13:13:06.728Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/de/96/1b48eb664153401c22087bbf77f6a428965e830cc8e0d0c6d68324a28342/supabase-2.28.3-py3-none-any.whl", hash = "sha256:52a7ce4a1d2d55fa6d657bf4760672935058143a5bedc64165851be25ce01dbd", size = 16634, upload-time = "2026-03-20T14:38:12.319Z" }, { url = "https://files.pythonhosted.org/packages/22/52/232f6bbf5326e04ae12e2ef04a24f011a0d7cab379a8b9698652bc8ff78f/supabase-2.29.0-py3-none-any.whl", hash = "sha256:16c3ec4b7094f6b92efc5cd3bb3f96826d3b6dd5d24fe15c89c81166efce88fe", size = 16633, upload-time = "2026-04-24T13:13:05.722Z" },
] ]
[[package]] [[package]]
name = "supabase-auth" name = "supabase-auth"
version = "2.28.3" version = "2.29.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "httpx", extra = ["http2"] }, { name = "httpx", extra = ["http2"] },
{ name = "pydantic" }, { name = "pydantic" },
{ name = "pyjwt", extra = ["crypto"] }, { name = "pyjwt", extra = ["crypto"] },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/cc/6f/1bf81293374ba71183b321bf5dfd7151c3db0c2e24715f35783bc1c56385/supabase_auth-2.28.3.tar.gz", hash = "sha256:41c049da82f9d7fc2f111808e57e984015f128d033f58caa67fd76f428472807", size = 39160, upload-time = "2026-03-20T14:38:15.128Z" } sdist = { url = "https://files.pythonhosted.org/packages/51/7f/7ceeb4c7a2caa188062e934897f0e08e1af0a0e47e376c7645c26b4c39d8/supabase_auth-2.29.0.tar.gz", hash = "sha256:46efc6a3455a23957b846dc974303a844ba0413718cfa899425477ac977f95b3", size = 39154, upload-time = "2026-04-24T13:13:08.509Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/c3/d3/e012315aa895b434fa77bc475e2dfeb87119e67918ecca4d88a25f96814d/supabase_auth-2.28.3-py3-none-any.whl", hash = "sha256:e47c5caec7bbf3c258964d027fbbe99f3cc4a956d3a635f898c962b4d22832dd", size = 48378, upload-time = "2026-03-20T14:38:14.169Z" }, { url = "https://files.pythonhosted.org/packages/f1/ac/3c35cf52281f940b9497cf17abfc5c2050ca49f342d60cfafe22dac3482b/supabase_auth-2.29.0-py3-none-any.whl", hash = "sha256:64de6ef8cae80f97d3aa8d5ca507d5427dda5c89885c0bcfe9f8b0263b6fb9a4", size = 48379, upload-time = "2026-04-24T13:13:07.417Z" },
] ]
[[package]] [[package]]
name = "supabase-functions" name = "supabase-functions"
version = "2.28.3" version = "2.29.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "httpx", extra = ["http2"] }, { name = "httpx", extra = ["http2"] },
{ name = "strenum" }, { name = "strenum" },
{ name = "yarl" }, { name = "yarl" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/19/ea/59bf327960e5384fcc9e69afbdf97260a2cf2684a25c0731968a8a393b9c/supabase_functions-2.28.3.tar.gz", hash = "sha256:5a6255d60a263d44251c5ca250fcdde2408a8483a8bf31f4ac80255de8f3fcae", size = 4679, upload-time = "2026-03-20T14:38:16.742Z" } sdist = { url = "https://files.pythonhosted.org/packages/e5/19/1a1d22749f38f2a6cbca93a6f5a35c9f816c2c3c06bfaa077fa336e90537/supabase_functions-2.29.0.tar.gz", hash = "sha256:0f8a14a2ea9f12b1c208f61dc6f55e2f4b1121f81bf01c08f9b487d22888744d", size = 4683, upload-time = "2026-04-24T13:13:10.432Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/a5/ca/1e720f1347a88519e3d52b6d801cd031c3a7a5df66640c5dc6e81d925057/supabase_functions-2.28.3-py3-none-any.whl", hash = "sha256:eb30578866103fed9322c54e95dd68c2f1a4b6b177e129d9369edd364637904e", size = 8801, upload-time = "2026-03-20T14:38:15.883Z" }, { url = "https://files.pythonhosted.org/packages/e0/10/6f8ef0b408ade76b5a439afab588ce5849e9604a23040ca73cfe0b90cb9e/supabase_functions-2.29.0-py3-none-any.whl", hash = "sha256:6f08de52eec5820eae53616868b85e849e181beffaa5d05b8ea1708ceae5e48e", size = 8799, upload-time = "2026-04-24T13:13:09.214Z" },
] ]
[[package]] [[package]]
@ -6780,11 +6782,11 @@ wheels = [
[[package]] [[package]]
name = "types-psycopg2" name = "types-psycopg2"
version = "2.9.21.20260408" version = "2.9.21.20260422"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cd/24/d8ae11a0c056535557aaabeb7d7838423abdfdcf1e5f8dfb2c04d316c65d/types_psycopg2-2.9.21.20260408.tar.gz", hash = "sha256:bb65cd12f53b6633077fd782607a33065e1f3bf585219c9f786b61ad2b72211c", size = 27078, upload-time = "2026-04-08T04:26:15.848Z" } sdist = { url = "https://files.pythonhosted.org/packages/b9/a2/ecb04604074a7f2e82231ab1f2d3b5a792589aa3c21a597cb3232a38ece3/types_psycopg2-2.9.21.20260422.tar.gz", hash = "sha256:ad7574fa8e25d9aa96ab96cd280c4dee20872725cd1fe6a6d3facc354f2644d4", size = 27123, upload-time = "2026-04-22T04:36:33.263Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/1a/fe/9aab9239640107b6e46afddcee578a916b8b98bfee36e03da5b0d2c95124/types_psycopg2-2.9.21.20260408-py3-none-any.whl", hash = "sha256:49b086bfc9e0ce901c6537403ead1c19c75275571040b037af0248a8e48c322f", size = 24921, upload-time = "2026-04-08T04:26:14.715Z" }, { url = "https://files.pythonhosted.org/packages/61/08/82f86c2d0a7ae4d335c6fe3c4ad193c4a57f0d6bfe1a676289cf63667275/types_psycopg2-2.9.21.20260422-py3-none-any.whl", hash = "sha256:e240684ac37946c5a2a058b04ea1f2fd0e4ee2655719b8c3ec9abf37f96da5ba", size = 24918, upload-time = "2026-04-22T04:36:32.108Z" },
] ]
[[package]] [[package]]
@ -7479,7 +7481,7 @@ wheels = [
[[package]] [[package]]
name = "xinference-client" name = "xinference-client"
version = "2.5.0" version = "2.7.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "aiohttp" }, { name = "aiohttp" },
@ -7487,9 +7489,9 @@ dependencies = [
{ name = "requests" }, { name = "requests" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/d0/8a/4d7c72510f3c462195c2e7aa63559cafcf20f7d1901132d533b7498bab1c/xinference_client-2.5.0.tar.gz", hash = "sha256:0680324e2f438b8b208ca80e8a7e1c22e9152fce54f8c024c75e2ce57bfa5639", size = 58430, upload-time = "2026-04-13T07:21:40.145Z" } sdist = { url = "https://files.pythonhosted.org/packages/99/86/89723d8a4f862bac49581ef99c9e52c014acf42355710335470062efabf1/xinference_client-2.7.0.tar.gz", hash = "sha256:51c174bc1704a505512550097d4b2025480a840d97bed8097dfbfaec2172ca9e", size = 58577, upload-time = "2026-04-25T14:37:37.345Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/5b/dd/4fd501b8092c01f0775142850e3b601d743edf733077b756defe4a01cc37/xinference_client-2.5.0-py3-none-any.whl", hash = "sha256:bb90f069a2c30ac6ea7453ab37a0fadd34c28b655afa51fe20c18e67a361c269", size = 40006, upload-time = "2026-04-13T07:21:38.851Z" }, { url = "https://files.pythonhosted.org/packages/1c/22/f9b92941be1cba5b2347211bb04c354a6ba2bad0e7b2da41510f77959327/xinference_client-2.7.0-py3-none-any.whl", hash = "sha256:76377804eb7fd2ece8a7d1e5c517d8aed8b5a511834066e43414ad74bcb34c09", size = 40154, upload-time = "2026-04-25T14:37:35.959Z" },
] ]
[[package]] [[package]]

1
depot.json Normal file
View File

@ -0,0 +1 @@
{"id":"smkxz53ddb"}

View File

@ -0,0 +1,19 @@
@apps @authenticated @core
Feature: Share app publicly
Scenario: Enable public share for a published workflow app
Given I am signed in as the default E2E admin
And a "workflow" app has been created via API
And a minimal runnable workflow draft has been synced
When I open the app from the app list
And I open the publish panel
And I publish the app
And I navigate to the app overview page
And I enable the Web App share
Then the Web App should be in service
@unauthenticated
Scenario: Access a shared workflow app without authentication
Given a workflow app has been published and shared via API
When I open the shared app URL
Then the shared app page should be accessible

View File

@ -0,0 +1,13 @@
@apps @authenticated @core @mode-matrix
Feature: Workflow run and publish
Scenario: Run and publish a minimal workflow app
Given I am signed in as the default E2E admin
And a "workflow" app has been created via API
And a minimal runnable workflow draft has been synced
When I open the app from the app list
And I run the workflow
Then the workflow run should succeed
When I open the publish panel
And I publish the app
Then the app should be marked as published

View File

@ -0,0 +1,39 @@
import type { DifyWorld } from '../../support/world'
import { Given, Then, When } from '@cucumber/cucumber'
import { expect } from '@playwright/test'
import { createTestApp, enableAppSiteAndGetURL, publishWorkflowApp, syncRunnableWorkflowDraft } from '../../../support/api'
When('I enable the Web App share', async function (this: DifyWorld) {
const page = this.getPage()
const appName = this.lastCreatedAppName
if (!appName)
throw new Error('No app name available. Run "a \\"workflow\\" app has been created via API" first.')
await page.locator('button').filter({ hasText: appName }).filter({ hasText: 'Workflow' }).click()
await expect(page.getByRole('switch').first()).toBeEnabled({ timeout: 15_000 })
await page.getByRole('switch').first().click()
})
Then('the Web App should be in service', async function (this: DifyWorld) {
await expect(this.getPage().getByText('In Service').first()).toBeVisible({ timeout: 10_000 })
})
Given('a workflow app has been published and shared via API', async function (this: DifyWorld) {
const app = await createTestApp(`E2E Share ${Date.now()}`, 'workflow')
this.createdAppIds.push(app.id)
this.lastCreatedAppName = app.name
await syncRunnableWorkflowDraft(app.id)
await publishWorkflowApp(app.id)
this.shareURL = await enableAppSiteAndGetURL(app.id)
})
When('I open the shared app URL', async function (this: DifyWorld) {
if (!this.shareURL)
throw new Error('No share URL available. Run "a workflow app has been published and shared via API" first.')
await this.getPage().goto(this.shareURL, { timeout: 20_000 })
})
Then('the shared app page should be accessible', async function (this: DifyWorld) {
await expect(this.getPage()).toHaveURL(/\/(workflow|chat)\/[a-zA-Z0-9]+/, { timeout: 15_000 })
await expect(this.getPage().locator('body')).toBeVisible({ timeout: 10_000 })
})

View File

@ -0,0 +1,23 @@
import type { DifyWorld } from '../../support/world'
import { Given, Then, When } from '@cucumber/cucumber'
import { expect } from '@playwright/test'
import { syncRunnableWorkflowDraft } from '../../../support/api'
Given('a minimal runnable workflow draft has been synced', async function (this: DifyWorld) {
const appId = this.createdAppIds.at(-1)
if (!appId)
throw new Error('No app ID found. Run "a \\"workflow\\" app has been created via API" first.')
await syncRunnableWorkflowDraft(appId)
})
When('I run the workflow', async function (this: DifyWorld) {
const page = this.getPage()
await page.getByText('Test Run').click()
await expect(page.getByText('Running').first()).toBeVisible({ timeout: 15_000 })
})
Then('the workflow run should succeed', async function (this: DifyWorld) {
const page = this.getPage()
await page.getByText('DETAIL').click()
await expect(page.getByText('SUCCESS').first()).toBeVisible({ timeout: 55_000 })
})

View File

@ -15,6 +15,7 @@ export class DifyWorld extends World {
lastCreatedAppName: string | undefined lastCreatedAppName: string | undefined
createdAppIds: string[] = [] createdAppIds: string[] = []
capturedDownloads: Download[] = [] capturedDownloads: Download[] = []
shareURL: string | undefined
constructor(options: IWorldOptions) { constructor(options: IWorldOptions) {
super(options) super(options)
@ -27,6 +28,7 @@ export class DifyWorld extends World {
this.lastCreatedAppName = undefined this.lastCreatedAppName = undefined
this.createdAppIds = [] this.createdAppIds = []
this.capturedDownloads = [] this.capturedDownloads = []
this.shareURL = undefined
} }
async startSession(browser: Browser, authenticated: boolean) { async startSession(browser: Browser, authenticated: boolean) {

View File

@ -67,11 +67,20 @@ const main = async () => {
logFilePath: path.join(logDir, 'cucumber-api.log'), logFilePath: path.join(logDir, 'cucumber-api.log'),
}) })
const celeryProcess = await startLoggedProcess({
command: 'npx',
args: ['tsx', './scripts/setup.ts', 'celery'],
cwd: e2eDir,
label: 'celery worker',
logFilePath: path.join(logDir, 'cucumber-celery.log'),
})
let cleanupPromise: Promise<void> | undefined let cleanupPromise: Promise<void> | undefined
const cleanup = async () => { const cleanup = async () => {
if (!cleanupPromise) { if (!cleanupPromise) {
cleanupPromise = (async () => { cleanupPromise = (async () => {
await stopWebServer() await stopWebServer()
await stopManagedProcess(celeryProcess)
await stopManagedProcess(apiProcess) await stopManagedProcess(apiProcess)
if (startMiddlewareForRun) { if (startMiddlewareForRun) {

View File

@ -202,6 +202,32 @@ export const startApi = async () => {
}) })
} }
export const startCelery = async () => {
const env = await getApiEnvironment()
await runForegroundProcess({
command: 'uv',
args: [
'run',
'--project',
'.',
'--no-sync',
'celery',
'-A',
'app.celery',
'worker',
'--pool',
'solo',
'--loglevel',
'INFO',
'-Q',
'workflow_based_app_execution',
],
cwd: apiDir,
env,
})
}
export const stopMiddleware = async () => { export const stopMiddleware = async () => {
await runCommandOrThrow({ await runCommandOrThrow({
command: 'docker', command: 'docker',
@ -308,7 +334,7 @@ export const startMiddleware = async () => {
} }
const printUsage = () => { const printUsage = () => {
console.log('Usage: tsx ./scripts/setup.ts <reset|middleware-up|middleware-down|api|web>') console.log('Usage: tsx ./scripts/setup.ts <reset|middleware-up|middleware-down|api|celery|web>')
} }
const main = async () => { const main = async () => {
@ -318,6 +344,9 @@ const main = async () => {
case 'api': case 'api':
await startApi() await startApi()
return return
case 'celery':
await startCelery()
return
case 'middleware-down': case 'middleware-down':
await stopMiddleware() await stopMiddleware()
return return

View File

@ -80,3 +80,83 @@ export async function deleteTestApp(id: string): Promise<void> {
await ctx.dispose() await ctx.dispose()
} }
} }
export async function syncRunnableWorkflowDraft(appId: string): Promise<void> {
const ctx = await createApiContext()
try {
await ctx.post(`/console/api/apps/${appId}/workflows/draft`, {
data: {
graph: {
nodes: [
{
id: 'start',
type: 'custom',
position: { x: 80, y: 282 },
data: { id: 'start', type: 'start', title: 'Start', variables: [] },
},
{
id: 'end',
type: 'custom',
position: { x: 480, y: 282 },
data: {
id: 'end',
type: 'end',
title: 'End',
outputs: [{ variable: 'result', value_selector: ['sys', 'workflow_run_id'] }],
},
},
],
edges: [
{
id: 'start-end',
type: 'custom',
source: 'start',
target: 'end',
sourceHandle: 'source',
targetHandle: 'target',
},
],
viewport: { x: 0, y: 0, zoom: 1 },
},
features: {},
environment_variables: [],
conversation_variables: [],
},
})
}
finally {
await ctx.dispose()
}
}
export async function publishWorkflowApp(appId: string): Promise<void> {
const ctx = await createApiContext()
try {
await ctx.post(`/console/api/apps/${appId}/workflows/publish`, {
data: { marked_name: '', marked_comment: '' },
})
}
finally {
await ctx.dispose()
}
}
type AppDetailWithSite = {
site: { access_token: string, app_base_url: string, enable_site: boolean }
}
export async function enableAppSiteAndGetURL(appId: string): Promise<string> {
const ctx = await createApiContext()
try {
await ctx.post(`/console/api/apps/${appId}/site-enable`, {
data: { enable_site: true },
})
const res = await ctx.get(`/console/api/apps/${appId}`)
const body = (await res.json()) as AppDetailWithSite
const { app_base_url, access_token } = body.site
return `${app_base_url}/workflow/${access_token}`
}
finally {
await ctx.dispose()
}
}