diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 3dd00ee4db..c03f281858 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/devcontainers/python:3.12-bullseye +FROM mcr.microsoft.com/devcontainers/python:3.12-bookworm RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install libgmp-dev libmpfr-dev libmpc-dev diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 8246544061..ddec42e0ee 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -11,7 +11,7 @@ "nodeGypDependencies": true, "version": "lts" }, - "ghcr.io/devcontainers-contrib/features/npm-package:1": { + "ghcr.io/devcontainers-extra/features/npm-package:1": { "package": "typescript", "version": "latest" }, diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 116fc59ee8..37d351627b 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -39,25 +39,11 @@ jobs: - name: Install dependencies run: uv sync --project api --dev - - name: Run Unit tests - run: | - uv run --project api bash dev/pytest/pytest_unit_tests.sh - - name: Run pyrefly check run: | cd api uv add --dev pyrefly uv run pyrefly check || true - - name: Coverage Summary - run: | - set -x - # Extract coverage percentage and create a summary - TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])') - - # Create a detailed coverage summary - echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY - echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY - uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY - name: Run dify config tests run: uv run --project api dev/pytest/pytest_config_tests.py @@ -93,3 +79,19 @@ jobs: - name: Run TestContainers run: uv run --project api bash dev/pytest/pytest_testcontainers.sh + + - name: Run Unit tests + run: | + uv run --project api bash dev/pytest/pytest_unit_tests.sh + + - name: Coverage Summary + run: | + set -x + # Extract coverage percentage and create a summary + TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])') + + # Create a detailed coverage summary + echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY + echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY + uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY + diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml index ef69e08da9..0cae2ef552 100644 --- a/.github/workflows/autofix.yml +++ b/.github/workflows/autofix.yml @@ -30,6 +30,8 @@ jobs: run: | uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all + uvx --from ast-grep-cli sg -p '$A = db.Column($$$B)' -r '$A = mapped_column($$$B)' -l py --update-all + uvx --from ast-grep-cli sg -p '$A : $T = db.Column($$$B)' -r '$A : $T = mapped_column($$$B)' -l py --update-all # Convert Optional[T] to T | None (ignoring quoted types) cat > /tmp/optional-rule.yml << 'EOF' id: convert-optional-to-union diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index 24a9da4400..f7f464a601 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -4,8 +4,7 @@ on: push: branches: - "main" - - "deploy/dev" - - "deploy/enterprise" + - "deploy/**" - "build/**" - "release/e-*" - "hotfix/**" diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index de732c3134..cd1c86e668 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -18,7 +18,7 @@ jobs: - name: Deploy to server uses: appleboy/ssh-action@v0.1.8 with: - host: ${{ secrets.RAG_SSH_HOST }} + host: ${{ secrets.SSH_HOST }} username: ${{ secrets.SSH_USER }} key: ${{ secrets.SSH_PRIVATE_KEY }} script: | diff --git a/.github/workflows/deploy-rag-dev.yml b/.github/workflows/deploy-trigger-dev.yml similarity index 75% rename from .github/workflows/deploy-rag-dev.yml rename to .github/workflows/deploy-trigger-dev.yml index 86265aad6d..2d9a904fc5 100644 --- a/.github/workflows/deploy-rag-dev.yml +++ b/.github/workflows/deploy-trigger-dev.yml @@ -1,4 +1,4 @@ -name: Deploy RAG Dev +name: Deploy Trigger Dev permissions: contents: read @@ -7,7 +7,7 @@ on: workflow_run: workflows: ["Build and Push API & Web"] branches: - - "deploy/rag-dev" + - "deploy/trigger-dev" types: - completed @@ -16,12 +16,12 @@ jobs: runs-on: ubuntu-latest if: | github.event.workflow_run.conclusion == 'success' && - github.event.workflow_run.head_branch == 'deploy/rag-dev' + github.event.workflow_run.head_branch == 'deploy/trigger-dev' steps: - name: Deploy to server uses: appleboy/ssh-action@v0.1.8 with: - host: ${{ secrets.RAG_SSH_HOST }} + host: ${{ secrets.TRIGGER_SSH_HOST }} username: ${{ secrets.SSH_USER }} key: ${{ secrets.SSH_PRIVATE_KEY }} script: | diff --git a/.github/workflows/expose_service_ports.sh b/.github/workflows/expose_service_ports.sh index 01772ccf9f..e7d5f60288 100755 --- a/.github/workflows/expose_service_ports.sh +++ b/.github/workflows/expose_service_ports.sh @@ -1,6 +1,7 @@ #!/bin/bash yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml +yq eval '.services.weaviate.ports += ["50051:50051"]' -i docker/docker-compose.yaml yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml @@ -13,4 +14,4 @@ yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.ya yq eval '.services.oceanbase.ports += ["2881:2881"]' -i docker/docker-compose.yaml yq eval '.services.opengauss.ports += ["6600:6600"]' -i docker/docker-compose.yaml -echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss" +echo "Ports exposed for sandbox, weaviate (HTTP 8080, gRPC 50051), tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss" diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 06584c1b78..e652657705 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -103,6 +103,11 @@ jobs: run: | pnpm run lint + - name: Web type check + if: steps.changed-files.outputs.any_changed == 'true' + working-directory: ./web + run: pnpm run type-check + docker-compose-template: name: Docker Compose Template runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index 22a2c42566..76cfd7d9bf 100644 --- a/.gitignore +++ b/.gitignore @@ -97,6 +97,7 @@ __pypackages__/ # Celery stuff celerybeat-schedule +celerybeat-schedule.db celerybeat.pid # SageMath parsed files diff --git a/.vscode/launch.json.template b/.vscode/launch.json.template index f5a7f0893b..bf04d31998 100644 --- a/.vscode/launch.json.template +++ b/.vscode/launch.json.template @@ -40,7 +40,7 @@ "-c", "1", "-Q", - "dataset,generation,mail,ops_trace", + "dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline", "--loglevel", "INFO" ], diff --git a/AGENTS.md b/AGENTS.md index 5859cd1bd9..2ef7931efc 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -14,7 +14,7 @@ The codebase is split into: - Run backend CLI commands through `uv run --project api `. -- Backend QA gate requires passing `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh` before review. +- Before submission, all backend modifications must pass local checks: `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`. - Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks. diff --git a/Makefile b/Makefile index ea560c7157..19c398ec82 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,6 @@ prepare-web: @echo "🌐 Setting up web environment..." @cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists" @cd web && pnpm install - @cd web && pnpm build @echo "✅ Web environment prepared (not started)" # Step 3: Prepare API environment diff --git a/README.md b/README.md index 8159057f55..110d74b63d 100644 --- a/README.md +++ b/README.md @@ -40,18 +40,18 @@

README in English - 繁體中文文件 - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production. @@ -63,7 +63,7 @@ Dify is an open-source platform for developing LLM applications. Its intuitive i > - CPU >= 2 Core > - RAM >= 4 GiB -
+
The easiest way to start the Dify server is through [Docker Compose](docker/docker-compose.yaml). Before running Dify with the following commands, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine: @@ -109,15 +109,15 @@ All of Dify's offerings come with corresponding APIs, so you could effortlessly ## Using Dify -- **Cloud
** +- **Cloud
** We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan. -- **Self-hosting Dify Community Edition
** +- **Self-hosting Dify Community Edition
** Quickly get Dify running in your environment with this [starter guide](#quick-start). Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions. -- **Dify for enterprise / organizations
** - We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs.
+- **Dify for enterprise / organizations
** + We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs.
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding. @@ -129,8 +129,18 @@ Star Dify on GitHub and be instantly notified of new releases. ## Advanced Setup +### Custom configurations + If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### Metrics Monitoring with Grafana + +Import the dashboard to Grafana, using Dify's PostgreSQL database as data source, to monitor metrics in granularity of apps, tenants, messages, and more. + +- [Grafana Dashboard by @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Deployment with Kubernetes + If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes. - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/api/.env.example b/api/.env.example index d53de3779b..c59d3ea16f 100644 --- a/api/.env.example +++ b/api/.env.example @@ -156,6 +156,9 @@ SUPABASE_URL=your-server-url # CORS configuration WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,* CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,* +# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains. +# Provide the registrable domain (e.g. example.com); leading dots are optional. +COOKIE_DOMAIN= # Vector database configuration # Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`. @@ -343,6 +346,15 @@ OCEANBASE_VECTOR_DATABASE=test OCEANBASE_MEMORY_LIMIT=6G OCEANBASE_ENABLE_HYBRID_SEARCH=false +# AlibabaCloud MySQL Vector configuration +ALIBABACLOUD_MYSQL_HOST=127.0.0.1 +ALIBABACLOUD_MYSQL_PORT=3306 +ALIBABACLOUD_MYSQL_USER=root +ALIBABACLOUD_MYSQL_PASSWORD=root +ALIBABACLOUD_MYSQL_DATABASE=dify +ALIBABACLOUD_MYSQL_MAX_CONNECTION=5 +ALIBABACLOUD_MYSQL_HNSW_M=6 + # openGauss configuration OPENGAUSS_HOST=127.0.0.1 OPENGAUSS_PORT=6600 @@ -425,10 +437,13 @@ CODE_EXECUTION_SSL_VERIFY=True CODE_EXECUTION_POOL_MAX_CONNECTIONS=100 CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20 CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0 +CODE_EXECUTION_CONNECT_TIMEOUT=10 +CODE_EXECUTION_READ_TIMEOUT=60 +CODE_EXECUTION_WRITE_TIMEOUT=10 CODE_MAX_NUMBER=9223372036854775807 CODE_MIN_NUMBER=-9223372036854775808 -CODE_MAX_STRING_LENGTH=80000 -TEMPLATE_TRANSFORM_MAX_LENGTH=80000 +CODE_MAX_STRING_LENGTH=400000 +TEMPLATE_TRANSFORM_MAX_LENGTH=400000 CODE_MAX_STRING_ARRAY_LENGTH=30 CODE_MAX_OBJECT_ARRAY_LENGTH=30 CODE_MAX_NUMBER_ARRAY_LENGTH=1000 diff --git a/api/.ruff.toml b/api/.ruff.toml index 643bc063a1..5a29e1d8fa 100644 --- a/api/.ruff.toml +++ b/api/.ruff.toml @@ -81,7 +81,6 @@ ignore = [ "SIM113", # enumerate-for-loop "SIM117", # multiple-with-statements "SIM210", # if-expr-with-true-false - "UP038", # deprecated and not recommended by Ruff, https://docs.astral.sh/ruff/rules/non-pep604-isinstance/ ] [lint.per-file-ignores] diff --git a/api/.vscode/launch.json.example b/api/.vscode/launch.json.example index b9e32e2511..e97828f9d8 100644 --- a/api/.vscode/launch.json.example +++ b/api/.vscode/launch.json.example @@ -54,7 +54,7 @@ "--loglevel", "DEBUG", "-Q", - "dataset,generation,mail,ops_trace,app_deletion" + "dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline" ] } ] diff --git a/api/README.md b/api/README.md index 5ecf92a4f0..fc21158c7a 100644 --- a/api/README.md +++ b/api/README.md @@ -80,10 +80,10 @@ 1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service. ```bash -uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation +uv run celery -A app.celery worker -P gevent -c 2 --loglevel INFO -Q dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline ``` -Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal: +Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service: ```bash uv run celery -A app.celery beat diff --git a/api/commands.py b/api/commands.py index 82efe34611..084fd576a1 100644 --- a/api/commands.py +++ b/api/commands.py @@ -321,6 +321,8 @@ def migrate_knowledge_vector_database(): ) datasets = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False) + if not datasets.items: + break except SQLAlchemyError: raise @@ -1521,6 +1523,14 @@ def transform_datasource_credentials(): auth_count = 0 for firecrawl_tenant_credential in firecrawl_tenant_credentials: auth_count += 1 + if not firecrawl_tenant_credential.credentials: + click.echo( + click.style( + f"Skipping firecrawl credential for tenant {tenant_id} due to missing credentials.", + fg="yellow", + ) + ) + continue # get credential api key credentials_json = json.loads(firecrawl_tenant_credential.credentials) api_key = credentials_json.get("config", {}).get("api_key") @@ -1576,6 +1586,14 @@ def transform_datasource_credentials(): auth_count = 0 for jina_tenant_credential in jina_tenant_credentials: auth_count += 1 + if not jina_tenant_credential.credentials: + click.echo( + click.style( + f"Skipping jina credential for tenant {tenant_id} due to missing credentials.", + fg="yellow", + ) + ) + continue # get credential api key credentials_json = json.loads(jina_tenant_credential.credentials) api_key = credentials_json.get("config", {}).get("api_key") diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 363cf4e2b5..b2a2f8d0fd 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -150,7 +150,7 @@ class CodeExecutionSandboxConfig(BaseSettings): CODE_MAX_STRING_LENGTH: PositiveInt = Field( description="Maximum allowed length for strings in code execution", - default=80000, + default=400_000, ) CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field( @@ -189,6 +189,11 @@ class PluginConfig(BaseSettings): default="plugin-api-key", ) + PLUGIN_DAEMON_TIMEOUT: PositiveFloat | None = Field( + description="Timeout in seconds for requests to the plugin daemon (set to None to disable)", + default=300.0, + ) + INNER_API_KEY_FOR_PLUGIN: str = Field(description="Inner api key for plugin", default="inner-api-key") PLUGIN_REMOTE_INSTALL_HOST: str = Field( @@ -332,6 +337,11 @@ class HttpConfig(BaseSettings): HTTP-related configurations for the application """ + COOKIE_DOMAIN: str = Field( + description="Explicit cookie domain for console/service cookies when sharing across subdomains", + default="", + ) + API_COMPRESSION_ENABLED: bool = Field( description="Enable or disable gzip compression for HTTP responses", default=False, @@ -362,11 +372,11 @@ class HttpConfig(BaseSettings): ) HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field( - ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60 + ge=1, description="Maximum read timeout in seconds for HTTP requests", default=600 ) HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field( - ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20 + ge=1, description="Maximum write timeout in seconds for HTTP requests", default=600 ) HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field( @@ -543,7 +553,7 @@ class UpdateConfig(BaseSettings): class WorkflowVariableTruncationConfig(BaseSettings): WORKFLOW_VARIABLE_TRUNCATION_MAX_SIZE: PositiveInt = Field( - # 100KB + # 1000 KiB 1024_000, description="Maximum size for variable to trigger final truncation.", ) @@ -582,6 +592,11 @@ class WorkflowConfig(BaseSettings): default=200 * 1024, ) + TEMPLATE_TRANSFORM_MAX_LENGTH: PositiveInt = Field( + description="Maximum number of characters allowed in Template Transform node output", + default=400_000, + ) + # GraphEngine Worker Pool Configuration GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field( description="Minimum number of workers per GraphEngine instance", @@ -766,7 +781,7 @@ class MailConfig(BaseSettings): MAIL_TEMPLATING_TIMEOUT: int = Field( description=""" - Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates. + Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates. Only available in sandbox mode.""", default=3, ) diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 62b3cc9842..816d0e442f 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -18,6 +18,7 @@ from .storage.opendal_storage_config import OpenDALStorageConfig from .storage.supabase_storage_config import SupabaseStorageConfig from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig +from .vdb.alibabacloud_mysql_config import AlibabaCloudMySQLConfig from .vdb.analyticdb_config import AnalyticdbConfig from .vdb.baidu_vector_config import BaiduVectorDBConfig from .vdb.chroma_config import ChromaConfig @@ -144,7 +145,7 @@ class DatabaseConfig(BaseSettings): default="postgresql", ) - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def SQLALCHEMY_DATABASE_URI(self) -> str: db_extras = ( @@ -197,7 +198,7 @@ class DatabaseConfig(BaseSettings): default=os.cpu_count() or 1, ) - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]: # Parse DB_EXTRAS for 'options' @@ -330,6 +331,7 @@ class MiddlewareConfig( ClickzettaConfig, HuaweiCloudConfig, MilvusConfig, + AlibabaCloudMySQLConfig, MyScaleConfig, OpenSearchConfig, OracleConfig, diff --git a/api/configs/middleware/vdb/alibabacloud_mysql_config.py b/api/configs/middleware/vdb/alibabacloud_mysql_config.py new file mode 100644 index 0000000000..a76400ed1c --- /dev/null +++ b/api/configs/middleware/vdb/alibabacloud_mysql_config.py @@ -0,0 +1,54 @@ +from pydantic import Field, PositiveInt +from pydantic_settings import BaseSettings + + +class AlibabaCloudMySQLConfig(BaseSettings): + """ + Configuration settings for AlibabaCloud MySQL vector database + """ + + ALIBABACLOUD_MYSQL_HOST: str = Field( + description="Hostname or IP address of the AlibabaCloud MySQL server (e.g., 'localhost' or 'mysql.aliyun.com')", + default="localhost", + ) + + ALIBABACLOUD_MYSQL_PORT: PositiveInt = Field( + description="Port number on which the AlibabaCloud MySQL server is listening (default is 3306)", + default=3306, + ) + + ALIBABACLOUD_MYSQL_USER: str = Field( + description="Username for authenticating with AlibabaCloud MySQL (default is 'root')", + default="root", + ) + + ALIBABACLOUD_MYSQL_PASSWORD: str = Field( + description="Password for authenticating with AlibabaCloud MySQL (default is an empty string)", + default="", + ) + + ALIBABACLOUD_MYSQL_DATABASE: str = Field( + description="Name of the AlibabaCloud MySQL database to connect to (default is 'dify')", + default="dify", + ) + + ALIBABACLOUD_MYSQL_MAX_CONNECTION: PositiveInt = Field( + description="Maximum number of connections in the connection pool", + default=5, + ) + + ALIBABACLOUD_MYSQL_CHARSET: str = Field( + description="Character set for AlibabaCloud MySQL connection (default is 'utf8mb4')", + default="utf8mb4", + ) + + ALIBABACLOUD_MYSQL_DISTANCE_FUNCTION: str = Field( + description="Distance function used for vector similarity search in AlibabaCloud MySQL " + "(e.g., 'cosine', 'euclidean')", + default="cosine", + ) + + ALIBABACLOUD_MYSQL_HNSW_M: PositiveInt = Field( + description="Maximum number of connections per layer for HNSW vector index (default is 6, range: 3-200)", + default=6, + ) diff --git a/api/configs/middleware/vdb/opensearch_config.py b/api/configs/middleware/vdb/opensearch_config.py index ba015a6eb9..a7d712545e 100644 --- a/api/configs/middleware/vdb/opensearch_config.py +++ b/api/configs/middleware/vdb/opensearch_config.py @@ -1,23 +1,24 @@ -from enum import Enum +from enum import StrEnum from typing import Literal from pydantic import Field, PositiveInt from pydantic_settings import BaseSettings +class AuthMethod(StrEnum): + """ + Authentication method for OpenSearch + """ + + BASIC = "basic" + AWS_MANAGED_IAM = "aws_managed_iam" + + class OpenSearchConfig(BaseSettings): """ Configuration settings for OpenSearch """ - class AuthMethod(Enum): - """ - Authentication method for OpenSearch - """ - - BASIC = "basic" - AWS_MANAGED_IAM = "aws_managed_iam" - OPENSEARCH_HOST: str | None = Field( description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')", default=None, diff --git a/api/constants/__init__.py b/api/constants/__init__.py index fe8f4f8785..e441395afc 100644 --- a/api/constants/__init__.py +++ b/api/constants/__init__.py @@ -1,4 +1,5 @@ from configs import dify_config +from libs.collection_utils import convert_to_lower_and_upper_set HIDDEN_VALUE = "[__HIDDEN__]" UNKNOWN_VALUE = "[__UNKNOWN__]" @@ -6,24 +7,39 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000" DEFAULT_FILE_NUMBER_LIMITS = 3 -IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"] -IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS]) +IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"}) -VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "webm"] -VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS]) +VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"}) -AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"] -AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS]) +AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"}) - -_doc_extensions: list[str] +_doc_extensions: set[str] if dify_config.ETL_TYPE == "Unstructured": - _doc_extensions = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"] - _doc_extensions.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub")) + _doc_extensions = { + "txt", + "markdown", + "md", + "mdx", + "pdf", + "html", + "htm", + "xlsx", + "xls", + "vtt", + "properties", + "doc", + "docx", + "csv", + "eml", + "msg", + "pptx", + "xml", + "epub", + } if dify_config.UNSTRUCTURED_API_URL: - _doc_extensions.append("ppt") + _doc_extensions.add("ppt") else: - _doc_extensions = [ + _doc_extensions = { "txt", "markdown", "md", @@ -37,5 +53,18 @@ else: "csv", "vtt", "properties", - ] -DOCUMENT_EXTENSIONS = _doc_extensions + [ext.upper() for ext in _doc_extensions] + } +DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions) + +# console +COOKIE_NAME_ACCESS_TOKEN = "access_token" +COOKIE_NAME_REFRESH_TOKEN = "refresh_token" +COOKIE_NAME_CSRF_TOKEN = "csrf_token" + +# webapp +COOKIE_NAME_WEBAPP_ACCESS_TOKEN = "webapp_access_token" +COOKIE_NAME_PASSPORT = "passport" + +HEADER_NAME_CSRF_TOKEN = "X-CSRF-Token" +HEADER_NAME_APP_CODE = "X-App-Code" +HEADER_NAME_PASSPORT = "X-App-Passport" diff --git a/api/constants/languages.py b/api/constants/languages.py index a509ddcf5d..0312a558c9 100644 --- a/api/constants/languages.py +++ b/api/constants/languages.py @@ -31,3 +31,9 @@ def supported_language(lang): error = f"{lang} is not a valid language." raise ValueError(error) + + +def get_valid_language(lang: str | None) -> str: + if lang and lang in languages: + return lang + return languages[0] diff --git a/api/controllers/common/helpers.py b/api/controllers/common/helpers.py index 6a5197635e..ef89e66980 100644 --- a/api/controllers/common/helpers.py +++ b/api/controllers/common/helpers.py @@ -24,7 +24,7 @@ except ImportError: ) else: warnings.warn("To use python-magic guess MIMETYPE, you need to install `libmagic`", stacklevel=2) - magic = None # type: ignore + magic = None # type: ignore[assignment] from pydantic import BaseModel diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index 93f242ad28..2c4d8709eb 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -15,6 +15,7 @@ from constants.languages import supported_language from controllers.console import api, console_ns from controllers.console.wraps import only_edition_cloud from extensions.ext_database import db +from libs.token import extract_access_token from models.model import App, InstalledApp, RecommendedApp @@ -24,19 +25,9 @@ def admin_required(view: Callable[P, R]): if not dify_config.ADMIN_API_KEY: raise Unauthorized("API key is invalid.") - auth_header = request.headers.get("Authorization") - if auth_header is None: + auth_token = extract_access_token(request) + if not auth_token: raise Unauthorized("Authorization header is missing.") - - if " " not in auth_header: - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - - auth_scheme, auth_token = auth_header.split(None, 1) - auth_scheme = auth_scheme.lower() - - if auth_scheme != "bearer": - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - if auth_token != dify_config.ADMIN_API_KEY: raise Unauthorized("API key is invalid.") @@ -70,15 +61,17 @@ class InsertExploreAppListApi(Resource): @only_edition_cloud @admin_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("app_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("desc", type=str, location="json") - parser.add_argument("copyright", type=str, location="json") - parser.add_argument("privacy_policy", type=str, location="json") - parser.add_argument("custom_disclaimer", type=str, location="json") - parser.add_argument("language", type=supported_language, required=True, nullable=False, location="json") - parser.add_argument("category", type=str, required=True, nullable=False, location="json") - parser.add_argument("position", type=int, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("app_id", type=str, required=True, nullable=False, location="json") + .add_argument("desc", type=str, location="json") + .add_argument("copyright", type=str, location="json") + .add_argument("privacy_policy", type=str, location="json") + .add_argument("custom_disclaimer", type=str, location="json") + .add_argument("language", type=supported_language, required=True, nullable=False, location="json") + .add_argument("category", type=str, required=True, nullable=False, location="json") + .add_argument("position", type=int, required=True, nullable=False, location="json") + ) args = parser.parse_args() app = db.session.execute(select(App).where(App.id == args["app_id"])).scalar_one_or_none() diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index fec527e4cb..4f04af7932 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -1,5 +1,4 @@ import flask_restx -from flask_login import current_user from flask_restx import Resource, fields, marshal_with from flask_restx._http import HTTPStatus from sqlalchemy import select @@ -8,12 +7,12 @@ from werkzeug.exceptions import Forbidden from extensions.ext_database import db from libs.helper import TimestampField -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.dataset import Dataset from models.model import ApiToken, App from . import api, console_ns -from .wraps import account_initialization_required, setup_required +from .wraps import account_initialization_required, edit_permission_required, setup_required api_key_fields = { "id": fields.String, @@ -57,7 +56,9 @@ class BaseApiKeyListResource(Resource): def get(self, resource_id): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) + _, current_tenant_id = current_account_with_tenant() + + _get_resource(resource_id, current_tenant_id, self.resource_model) keys = db.session.scalars( select(ApiToken).where( ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id @@ -66,13 +67,12 @@ class BaseApiKeyListResource(Resource): return {"items": keys} @marshal_with(api_key_fields) + @edit_permission_required def post(self, resource_id): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) - if not current_user.is_editor: - raise Forbidden() - + _, current_tenant_id = current_account_with_tenant() + _get_resource(resource_id, current_tenant_id, self.resource_model) current_key_count = ( db.session.query(ApiToken) .where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id) @@ -89,7 +89,7 @@ class BaseApiKeyListResource(Resource): key = ApiToken.generate_api_key(self.token_prefix or "", 24) api_token = ApiToken() setattr(api_token, self.resource_id_field, resource_id) - api_token.tenant_id = current_user.current_tenant_id + api_token.tenant_id = current_tenant_id api_token.token = key api_token.type = self.resource_type db.session.add(api_token) @@ -108,7 +108,8 @@ class BaseApiKeyResource(Resource): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) api_key_id = str(api_key_id) - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) + current_user, current_tenant_id = current_account_with_tenant() + _get_resource(resource_id, current_tenant_id, self.resource_model) # The role of the current user in the ta table must be admin or owner if not current_user.is_admin_or_owner: @@ -152,11 +153,6 @@ class AppApiKeyListResource(BaseApiKeyListResource): """Create a new API key for an app""" return super().post(resource_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "app" resource_model = App resource_id_field = "app_id" @@ -173,11 +169,6 @@ class AppApiKeyResource(BaseApiKeyResource): """Delete an API key for an app""" return super().delete(resource_id, api_key_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "app" resource_model = App resource_id_field = "app_id" @@ -202,11 +193,6 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): """Create a new API key for a dataset""" return super().post(resource_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "dataset" resource_model = Dataset resource_id_field = "dataset_id" @@ -223,11 +209,6 @@ class DatasetApiKeyResource(BaseApiKeyResource): """Delete an API key for a dataset""" return super().delete(resource_id, api_key_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "dataset" resource_model = Dataset resource_id_field = "dataset_id" diff --git a/api/controllers/console/app/advanced_prompt_template.py b/api/controllers/console/app/advanced_prompt_template.py index 315825db79..5885d7b447 100644 --- a/api/controllers/console/app/advanced_prompt_template.py +++ b/api/controllers/console/app/advanced_prompt_template.py @@ -25,11 +25,13 @@ class AdvancedPromptTemplateList(Resource): @login_required @account_initialization_required def get(self): - parser = reqparse.RequestParser() - parser.add_argument("app_mode", type=str, required=True, location="args") - parser.add_argument("model_mode", type=str, required=True, location="args") - parser.add_argument("has_context", type=str, required=False, default="true", location="args") - parser.add_argument("model_name", type=str, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("app_mode", type=str, required=True, location="args") + .add_argument("model_mode", type=str, required=True, location="args") + .add_argument("has_context", type=str, required=False, default="true", location="args") + .add_argument("model_name", type=str, required=True, location="args") + ) args = parser.parse_args() return AdvancedPromptTemplateService.get_prompt(args) diff --git a/api/controllers/console/app/agent.py b/api/controllers/console/app/agent.py index c063f336c7..717263a74d 100644 --- a/api/controllers/console/app/agent.py +++ b/api/controllers/console/app/agent.py @@ -27,9 +27,11 @@ class AgentLogApi(Resource): @get_app_model(mode=[AppMode.AGENT_CHAT]) def get(self, app_model): """Get agent logs""" - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=uuid_value, required=True, location="args") - parser.add_argument("conversation_id", type=uuid_value, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=uuid_value, required=True, location="args") + .add_argument("conversation_id", type=uuid_value, required=True, location="args") + ) args = parser.parse_args() diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index d0ee11fe75..932214058a 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -1,15 +1,14 @@ from typing import Literal from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse -from werkzeug.exceptions import Forbidden from controllers.common.errors import NoFileUploadedError, TooManyFilesError from controllers.console import api, console_ns from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from extensions.ext_redis import redis_client @@ -42,15 +41,15 @@ class AnnotationReplyActionApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def post(self, app_id, action: Literal["enable", "disable"]): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) - parser = reqparse.RequestParser() - parser.add_argument("score_threshold", required=True, type=float, location="json") - parser.add_argument("embedding_provider_name", required=True, type=str, location="json") - parser.add_argument("embedding_model_name", required=True, type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("score_threshold", required=True, type=float, location="json") + .add_argument("embedding_provider_name", required=True, type=str, location="json") + .add_argument("embedding_model_name", required=True, type=str, location="json") + ) args = parser.parse_args() if action == "enable": result = AppAnnotationService.enable_app_annotation(args, app_id) @@ -69,10 +68,8 @@ class AppAnnotationSettingDetailApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) result = AppAnnotationService.get_app_annotation_setting_by_app_id(app_id) return result, 200 @@ -98,15 +95,12 @@ class AppAnnotationSettingUpdateApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, app_id, annotation_setting_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_setting_id = str(annotation_setting_id) - parser = reqparse.RequestParser() - parser.add_argument("score_threshold", required=True, type=float, location="json") + parser = reqparse.RequestParser().add_argument("score_threshold", required=True, type=float, location="json") args = parser.parse_args() result = AppAnnotationService.update_app_annotation_setting(app_id, annotation_setting_id, args) @@ -124,10 +118,8 @@ class AnnotationReplyActionStatusApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def get(self, app_id, job_id, action): - if not current_user.is_editor: - raise Forbidden() - job_id = str(job_id) app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}" cache_result = redis_client.get(app_annotation_job_key) @@ -159,10 +151,8 @@ class AnnotationApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) keyword = request.args.get("keyword", default="", type=str) @@ -198,14 +188,14 @@ class AnnotationApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("annotation") @marshal_with(annotation_fields) + @edit_permission_required def post(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) - parser = reqparse.RequestParser() - parser.add_argument("question", required=True, type=str, location="json") - parser.add_argument("answer", required=True, type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("question", required=True, type=str, location="json") + .add_argument("answer", required=True, type=str, location="json") + ) args = parser.parse_args() annotation = AppAnnotationService.insert_app_annotation_directly(args, app_id) return annotation @@ -213,10 +203,8 @@ class AnnotationApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) # Use request.args.getlist to get annotation_ids array directly @@ -249,10 +237,8 @@ class AnnotationExportApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id) response = {"data": marshal(annotation_list, annotation_fields)} @@ -271,16 +257,16 @@ class AnnotationUpdateDeleteApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required @marshal_with(annotation_fields) def post(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_id = str(annotation_id) - parser = reqparse.RequestParser() - parser.add_argument("question", required=True, type=str, location="json") - parser.add_argument("answer", required=True, type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("question", required=True, type=str, location="json") + .add_argument("answer", required=True, type=str, location="json") + ) args = parser.parse_args() annotation = AppAnnotationService.update_app_annotation_directly(args, app_id, annotation_id) return annotation @@ -288,10 +274,8 @@ class AnnotationUpdateDeleteApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_id = str(annotation_id) AppAnnotationService.delete_app_annotation(app_id, annotation_id) @@ -310,10 +294,8 @@ class AnnotationBatchImportApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def post(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) # check file if "file" not in request.files: @@ -341,10 +323,8 @@ class AnnotationBatchImportStatusApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def get(self, app_id, job_id): - if not current_user.is_editor: - raise Forbidden() - job_id = str(job_id) indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" cache_result = redis_client.get(indexing_cache_key) @@ -376,10 +356,8 @@ class AnnotationHitHistoryListApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) app_id = str(app_id) diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 2d2e4b448a..17505d69b2 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -1,7 +1,5 @@ import uuid -from typing import cast -from flask_login import current_user from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session @@ -12,14 +10,16 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, enterprise_license_required, setup_required, ) from core.ops.ops_trace_manager import OpsTraceManager from extensions.ext_database import db from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields -from libs.login import login_required -from models import Account, App +from libs.login import current_account_with_tenant, login_required +from libs.validators import validate_description_length +from models import App from services.app_dsl_service import AppDslService, ImportMode from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -28,12 +28,6 @@ from services.feature_service import FeatureService ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"] -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - @console_ns.route("/apps") class AppListApi(Resource): @api.doc("list_apps") @@ -61,6 +55,7 @@ class AppListApi(Resource): @enterprise_license_required def get(self): """Get app list""" + current_user, current_tenant_id = current_account_with_tenant() def uuid_list(value): try: @@ -68,34 +63,36 @@ class AppListApi(Resource): except ValueError: abort(400, message="Invalid UUID format in tag_ids.") - parser = reqparse.RequestParser() - parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") - parser.add_argument( - "mode", - type=str, - choices=[ - "completion", - "chat", - "advanced-chat", - "workflow", - "agent-chat", - "channel", - "all", - ], - default="all", - location="args", - required=False, + parser = ( + reqparse.RequestParser() + .add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + .add_argument( + "mode", + type=str, + choices=[ + "completion", + "chat", + "advanced-chat", + "workflow", + "agent-chat", + "channel", + "all", + ], + default="all", + location="args", + required=False, + ) + .add_argument("name", type=str, location="args", required=False) + .add_argument("tag_ids", type=uuid_list, location="args", required=False) + .add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False) ) - parser.add_argument("name", type=str, location="args", required=False) - parser.add_argument("tag_ids", type=uuid_list, location="args", required=False) - parser.add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False) args = parser.parse_args() # get app list app_service = AppService() - app_pagination = app_service.get_paginate_apps(current_user.id, current_user.current_tenant_id, args) + app_pagination = app_service.get_paginate_apps(current_user.id, current_tenant_id, args) if not app_pagination: return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False} @@ -134,30 +131,26 @@ class AppListApi(Resource): @account_initialization_required @marshal_with(app_detail_fields) @cloud_edition_billing_resource_check("apps") + @edit_permission_required def post(self): """Create app""" - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") - parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") + current_user, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, location="json") + .add_argument("description", type=validate_description_length, location="json") + .add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + ) args = parser.parse_args() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - if "mode" not in args or args["mode"] is None: raise BadRequest("mode is required") app_service = AppService() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - if current_user.current_tenant_id is None: - raise ValueError("current_user.current_tenant_id cannot be None") - app = app_service.create_app(current_user.current_tenant_id, args, current_user) + app = app_service.create_app(current_tenant_id, args, current_user) return app, 201 @@ -210,21 +203,20 @@ class AppApi(Resource): @login_required @account_initialization_required @get_app_model + @edit_permission_required @marshal_with(app_detail_fields_with_site) def put(self, app_model): """Update app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") - parser.add_argument("use_icon_as_answer_icon", type=bool, location="json") - parser.add_argument("max_active_requests", type=int, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, nullable=False, location="json") + .add_argument("description", type=validate_description_length, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + .add_argument("use_icon_as_answer_icon", type=bool, location="json") + .add_argument("max_active_requests", type=int, location="json") + ) args = parser.parse_args() app_service = AppService() @@ -253,12 +245,9 @@ class AppApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_model): """Delete app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - app_service = AppService() app_service.delete_app(app_model) @@ -288,28 +277,29 @@ class AppCopyApi(Resource): @login_required @account_initialization_required @get_app_model + @edit_permission_required @marshal_with(app_detail_fields_with_site) def post(self, app_model): """Copy app""" # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, location="json") + .add_argument("description", type=validate_description_length, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + ) args = parser.parse_args() with Session(db.engine) as session: import_service = AppDslService(session) yaml_content = import_service.export_dsl(app_model=app_model, include_secret=True) - account = cast(Account, current_user) result = import_service.import_app( - account=account, - import_mode=ImportMode.YAML_CONTENT.value, + account=current_user, + import_mode=ImportMode.YAML_CONTENT, yaml_content=yaml_content, name=args.get("name"), description=args.get("description"), @@ -345,16 +335,15 @@ class AppExportApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_model): """Export app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - # Add include_secret params - parser = reqparse.RequestParser() - parser.add_argument("include_secret", type=inputs.boolean, default=False, location="args") - parser.add_argument("workflow_id", type=str, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("include_secret", type=inputs.boolean, default=False, location="args") + .add_argument("workflow_id", type=str, location="args") + ) args = parser.parse_args() return { @@ -376,13 +365,9 @@ class AppNameApi(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json") args = parser.parse_args() app_service = AppService() @@ -413,14 +398,13 @@ class AppIconApi(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + ) args = parser.parse_args() app_service = AppService() @@ -446,13 +430,9 @@ class AppSiteStatus(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("enable_site", type=bool, required=True, location="json") + parser = reqparse.RequestParser().add_argument("enable_site", type=bool, required=True, location="json") args = parser.parse_args() app_service = AppService() @@ -480,11 +460,11 @@ class AppApiStatus(Resource): @marshal_with(app_detail_fields) def post(self, app_model): # The role of the current user in the ta table must be admin or owner + current_user, _ = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("enable_api", type=bool, required=True, location="json") + parser = reqparse.RequestParser().add_argument("enable_api", type=bool, required=True, location="json") args = parser.parse_args() app_service = AppService() @@ -525,13 +505,14 @@ class AppTraceApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, app_id): # add app trace - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("enabled", type=bool, required=True, location="json") - parser.add_argument("tracing_provider", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("enabled", type=bool, required=True, location="json") + .add_argument("tracing_provider", type=str, required=True, location="json") + ) args = parser.parse_args() OpsTraceManager.update_app_tracing_config( diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index c14f597c25..d902c129ad 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -1,20 +1,16 @@ -from typing import cast - -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from sqlalchemy.orm import Session -from werkzeug.exceptions import Forbidden from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from extensions.ext_database import db from fields.app_fields import app_import_check_dependencies_fields, app_import_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from models.model import App from services.app_dsl_service import AppDslService, ImportStatus from services.enterprise.enterprise_service import EnterpriseService @@ -30,28 +26,29 @@ class AppImportApi(Resource): @account_initialization_required @marshal_with(app_import_fields) @cloud_edition_billing_resource_check("apps") + @edit_permission_required def post(self): # Check user role first - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("mode", type=str, required=True, location="json") - parser.add_argument("yaml_content", type=str, location="json") - parser.add_argument("yaml_url", type=str, location="json") - parser.add_argument("name", type=str, location="json") - parser.add_argument("description", type=str, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") - parser.add_argument("app_id", type=str, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("mode", type=str, required=True, location="json") + .add_argument("yaml_content", type=str, location="json") + .add_argument("yaml_url", type=str, location="json") + .add_argument("name", type=str, location="json") + .add_argument("description", type=str, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + .add_argument("app_id", type=str, location="json") + ) args = parser.parse_args() # Create service with session with Session(db.engine) as session: import_service = AppDslService(session) # Import app - account = cast(Account, current_user) + account = current_user result = import_service.import_app( account=account, import_mode=args["mode"], @@ -70,9 +67,9 @@ class AppImportApi(Resource): EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private") # Return appropriate status code based on result status = result.status - if status == ImportStatus.FAILED.value: + if status == ImportStatus.FAILED: return result.model_dump(mode="json"), 400 - elif status == ImportStatus.PENDING.value: + elif status == ImportStatus.PENDING: return result.model_dump(mode="json"), 202 return result.model_dump(mode="json"), 200 @@ -83,21 +80,21 @@ class AppImportConfirmApi(Resource): @login_required @account_initialization_required @marshal_with(app_import_fields) + @edit_permission_required def post(self, import_id): # Check user role first - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() # Create service with session with Session(db.engine) as session: import_service = AppDslService(session) # Confirm import - account = cast(Account, current_user) + account = current_user result = import_service.confirm_import(import_id=import_id, account=account) session.commit() # Return appropriate status code based on result - if result.status == ImportStatus.FAILED.value: + if result.status == ImportStatus.FAILED: return result.model_dump(mode="json"), 400 return result.model_dump(mode="json"), 200 @@ -109,10 +106,8 @@ class AppImportCheckDependenciesApi(Resource): @get_app_model @account_initialization_required @marshal_with(app_import_check_dependencies_fields) + @edit_permission_required def get(self, app_model: App): - if not current_user.is_editor: - raise Forbidden() - with Session(db.engine) as session: import_service = AppDslService(session) result = import_service.check_dependencies(app_model=app_model) diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index 7d659dae0d..8170ba271a 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -111,11 +111,13 @@ class ChatMessageTextApi(Resource): @account_initialization_required def post(self, app_model: App): try: - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=str, location="json") - parser.add_argument("text", type=str, location="json") - parser.add_argument("voice", type=str, location="json") - parser.add_argument("streaming", type=bool, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=str, location="json") + .add_argument("text", type=str, location="json") + .add_argument("voice", type=str, location="json") + .add_argument("streaming", type=bool, location="json") + ) args = parser.parse_args() message_id = args.get("message_id", None) @@ -166,8 +168,7 @@ class TextModesApi(Resource): @account_initialization_required def get(self, app_model): try: - parser = reqparse.RequestParser() - parser.add_argument("language", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("language", type=str, required=True, location="args") args = parser.parse_args() response = AudioService.transcript_tts_voices( diff --git a/api/controllers/console/app/completion.py b/api/controllers/console/app/completion.py index 2f7b90e7fb..d7bc3cc20d 100644 --- a/api/controllers/console/app/completion.py +++ b/api/controllers/console/app/completion.py @@ -2,7 +2,7 @@ import logging from flask import request from flask_restx import Resource, fields, reqparse -from werkzeug.exceptions import Forbidden, InternalServerError, NotFound +from werkzeug.exceptions import InternalServerError, NotFound import services from controllers.console import api, console_ns @@ -15,7 +15,7 @@ from controllers.console.app.error import ( ProviderQuotaExceededError, ) from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom @@ -64,13 +64,15 @@ class CompletionMessageApi(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) def post(self, app_model): - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, location="json", default="") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("model_config", type=dict, required=True, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, location="json", default="") + .add_argument("files", type=list, required=False, location="json") + .add_argument("model_config", type=dict, required=True, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("retriever_from", type=str, required=False, default="dev", location="json") + ) args = parser.parse_args() streaming = args["response_mode"] != "blocking" @@ -151,22 +153,19 @@ class ChatMessageApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT]) + @edit_permission_required def post(self, app_model): - if not isinstance(current_user, Account): - raise Forbidden() - - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, required=True, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("model_config", type=dict, required=True, location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, required=True, location="json") + .add_argument("files", type=list, required=False, location="json") + .add_argument("model_config", type=dict, required=True, location="json") + .add_argument("conversation_id", type=uuid_value, location="json") + .add_argument("parent_message_id", type=uuid_value, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("retriever_from", type=str, required=False, default="dev", location="json") + ) args = parser.parse_args() streaming = args["response_mode"] != "blocking" diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index f104ab5dee..d5fa70d678 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -1,17 +1,16 @@ from datetime import datetime -import pytz # pip install pytz +import pytz import sqlalchemy as sa -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range from sqlalchemy import func, or_ from sqlalchemy.orm import joinedload -from werkzeug.exceptions import Forbidden, NotFound +from werkzeug.exceptions import NotFound from controllers.console import api, console_ns from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from fields.conversation_fields import ( @@ -22,8 +21,8 @@ from fields.conversation_fields import ( ) from libs.datetime_utils import naive_utc_now from libs.helper import DatetimeString -from libs.login import login_required -from models import Account, Conversation, EndUser, Message, MessageAnnotation +from libs.login import current_account_with_tenant, login_required +from models import Conversation, EndUser, Message, MessageAnnotation from models.model import AppMode from services.conversation_service import ConversationService from services.errors.conversation import ConversationNotExistsError @@ -57,18 +56,24 @@ class CompletionConversationApi(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) @marshal_with(conversation_pagination_fields) + @edit_permission_required def get(self, app_model): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("keyword", type=str, location="args") - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument( - "annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args" + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("keyword", type=str, location="args") + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument( + "annotation_status", + type=str, + choices=["annotated", "not_annotated", "all"], + default="all", + location="args", + ) + .add_argument("page", type=int_range(1, 99999), default=1, location="args") + .add_argument("limit", type=int_range(1, 100), default=20, location="args") ) - parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") - parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") args = parser.parse_args() query = sa.select(Conversation).where( @@ -84,6 +89,7 @@ class CompletionConversationApi(Resource): ) account = current_user + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -137,9 +143,8 @@ class CompletionConversationDetailApi(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) @marshal_with(conversation_message_detail_fields) + @edit_permission_required def get(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() conversation_id = str(conversation_id) return _get_conversation(app_model, conversation_id) @@ -154,14 +159,12 @@ class CompletionConversationDetailApi(Resource): @login_required @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) + @edit_permission_required def delete(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() conversation_id = str(conversation_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") ConversationService.delete(app_model, conversation_id, current_user) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @@ -206,26 +209,32 @@ class ChatConversationApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(conversation_with_summary_pagination_fields) + @edit_permission_required def get(self, app_model): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("keyword", type=str, location="args") - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument( - "annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args" - ) - parser.add_argument("message_count_gte", type=int_range(1, 99999), required=False, location="args") - parser.add_argument("page", type=int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - parser.add_argument( - "sort_by", - type=str, - choices=["created_at", "-created_at", "updated_at", "-updated_at"], - required=False, - default="-updated_at", - location="args", + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("keyword", type=str, location="args") + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument( + "annotation_status", + type=str, + choices=["annotated", "not_annotated", "all"], + default="all", + location="args", + ) + .add_argument("message_count_gte", type=int_range(1, 99999), required=False, location="args") + .add_argument("page", type=int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + .add_argument( + "sort_by", + type=str, + choices=["created_at", "-created_at", "updated_at", "-updated_at"], + required=False, + default="-updated_at", + location="args", + ) ) args = parser.parse_args() @@ -260,6 +269,7 @@ class ChatConversationApi(Resource): ) account = current_user + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -309,7 +319,7 @@ class ChatConversationApi(Resource): ) if app_model.mode == AppMode.ADVANCED_CHAT: - query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER.value) + query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER) match args["sort_by"]: case "created_at": @@ -341,9 +351,8 @@ class ChatConversationDetailApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(conversation_detail_fields) + @edit_permission_required def get(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() conversation_id = str(conversation_id) return _get_conversation(app_model, conversation_id) @@ -358,14 +367,12 @@ class ChatConversationDetailApi(Resource): @login_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @account_initialization_required + @edit_permission_required def delete(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() conversation_id = str(conversation_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") ConversationService.delete(app_model, conversation_id, current_user) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @@ -374,6 +381,7 @@ class ChatConversationDetailApi(Resource): def _get_conversation(app_model, conversation_id): + current_user, _ = current_account_with_tenant() conversation = ( db.session.query(Conversation) .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py index 8a65a89963..d4c0b5697f 100644 --- a/api/controllers/console/app/conversation_variables.py +++ b/api/controllers/console/app/conversation_variables.py @@ -29,8 +29,7 @@ class ConversationVariablesApi(Resource): @get_app_model(mode=AppMode.ADVANCED_CHAT) @marshal_with(paginated_conversation_variable_fields) def get(self, app_model): - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", type=str, location="args") + parser = reqparse.RequestParser().add_argument("conversation_id", type=str, location="args") args = parser.parse_args() stmt = ( diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index 230ccdca15..b6ca97ab4f 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -1,6 +1,5 @@ from collections.abc import Sequence -from flask_login import current_user from flask_restx import Resource, fields, reqparse from controllers.console import api, console_ns @@ -17,7 +16,7 @@ from core.helper.code_executor.python3.python3_code_provider import Python3CodeP from core.llm_generator.llm_generator import LLMGenerator from core.model_runtime.errors.invoke import InvokeError from extensions.ext_database import db -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import App from services.workflow_service import WorkflowService @@ -43,16 +42,18 @@ class RuleGenerateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") - parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") - parser.add_argument("no_variable", type=bool, required=True, default=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("instruction", type=str, required=True, nullable=False, location="json") + .add_argument("model_config", type=dict, required=True, nullable=False, location="json") + .add_argument("no_variable", type=bool, required=True, default=False, location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: rules = LLMGenerator.generate_rule_config( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=args["no_variable"], @@ -93,17 +94,19 @@ class RuleCodeGenerateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") - parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") - parser.add_argument("no_variable", type=bool, required=True, default=False, location="json") - parser.add_argument("code_language", type=str, required=False, default="javascript", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("instruction", type=str, required=True, nullable=False, location="json") + .add_argument("model_config", type=dict, required=True, nullable=False, location="json") + .add_argument("no_variable", type=bool, required=True, default=False, location="json") + .add_argument("code_language", type=str, required=False, default="javascript", location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: code_result = LLMGenerator.generate_code( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], code_language=args["code_language"], @@ -140,15 +143,17 @@ class RuleStructuredOutputGenerateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") - parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("instruction", type=str, required=True, nullable=False, location="json") + .add_argument("model_config", type=dict, required=True, nullable=False, location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: structured_output = LLMGenerator.generate_structured_output( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], ) @@ -189,15 +194,18 @@ class InstructionGenerateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("flow_id", type=str, required=True, default="", location="json") - parser.add_argument("node_id", type=str, required=False, default="", location="json") - parser.add_argument("current", type=str, required=False, default="", location="json") - parser.add_argument("language", type=str, required=False, default="javascript", location="json") - parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") - parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") - parser.add_argument("ideal_output", type=str, required=False, default="", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("flow_id", type=str, required=True, default="", location="json") + .add_argument("node_id", type=str, required=False, default="", location="json") + .add_argument("current", type=str, required=False, default="", location="json") + .add_argument("language", type=str, required=False, default="javascript", location="json") + .add_argument("instruction", type=str, required=True, nullable=False, location="json") + .add_argument("model_config", type=dict, required=True, nullable=False, location="json") + .add_argument("ideal_output", type=str, required=False, default="", location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() code_template = ( Python3CodeProvider.get_default_code() if args["language"] == "python" @@ -222,21 +230,21 @@ class InstructionGenerateApi(Resource): match node_type: case "llm": return LLMGenerator.generate_rule_config( - current_user.current_tenant_id, + current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=True, ) case "agent": return LLMGenerator.generate_rule_config( - current_user.current_tenant_id, + current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=True, ) case "code": return LLMGenerator.generate_code( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], code_language=args["language"], @@ -245,7 +253,7 @@ class InstructionGenerateApi(Resource): return {"error": f"invalid node type: {node_type}"} if args["node_id"] == "" and args["current"] != "": # For legacy app without a workflow return LLMGenerator.instruction_modify_legacy( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, flow_id=args["flow_id"], current=args["current"], instruction=args["instruction"], @@ -254,7 +262,7 @@ class InstructionGenerateApi(Resource): ) if args["node_id"] != "" and args["current"] != "": # For workflow node return LLMGenerator.instruction_modify_workflow( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, flow_id=args["flow_id"], node_id=args["node_id"], current=args["current"], @@ -293,8 +301,7 @@ class InstructionGenerationTemplateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, default=False, location="json") + parser = reqparse.RequestParser().add_argument("type", type=str, required=True, default=False, location="json") args = parser.parse_args() match args["type"]: case "prompt": diff --git a/api/controllers/console/app/mcp_server.py b/api/controllers/console/app/mcp_server.py index b9a383ee61..3700c6b1d0 100644 --- a/api/controllers/console/app/mcp_server.py +++ b/api/controllers/console/app/mcp_server.py @@ -1,16 +1,15 @@ import json from enum import StrEnum -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from werkzeug.exceptions import NotFound from controllers.console import api, console_ns from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from extensions.ext_database import db from fields.app_fields import app_server_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.model import AppMCPServer @@ -25,9 +24,9 @@ class AppMCPServerController(Resource): @api.doc(description="Get MCP server configuration for an application") @api.doc(params={"app_id": "Application ID"}) @api.response(200, "MCP server configuration retrieved successfully", app_server_fields) - @setup_required @login_required @account_initialization_required + @setup_required @get_app_model @marshal_with(app_server_fields) def get(self, app_model): @@ -48,17 +47,19 @@ class AppMCPServerController(Resource): ) @api.response(201, "MCP server configuration created successfully", app_server_fields) @api.response(403, "Insufficient permissions") - @setup_required - @login_required @account_initialization_required @get_app_model + @login_required + @setup_required @marshal_with(app_server_fields) + @edit_permission_required def post(self, app_model): - if not current_user.is_editor: - raise NotFound() - parser = reqparse.RequestParser() - parser.add_argument("description", type=str, required=False, location="json") - parser.add_argument("parameters", type=dict, required=True, location="json") + _, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("description", type=str, required=False, location="json") + .add_argument("parameters", type=dict, required=True, location="json") + ) args = parser.parse_args() description = args.get("description") @@ -71,7 +72,7 @@ class AppMCPServerController(Resource): parameters=json.dumps(args["parameters"], ensure_ascii=False), status=AppMCPServerStatus.ACTIVE, app_id=app_model.id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, server_code=AppMCPServer.generate_server_code(16), ) db.session.add(server) @@ -95,19 +96,20 @@ class AppMCPServerController(Resource): @api.response(200, "MCP server configuration updated successfully", app_server_fields) @api.response(403, "Insufficient permissions") @api.response(404, "Server not found") - @setup_required - @login_required - @account_initialization_required @get_app_model + @login_required + @setup_required + @account_initialization_required @marshal_with(app_server_fields) + @edit_permission_required def put(self, app_model): - if not current_user.is_editor: - raise NotFound() - parser = reqparse.RequestParser() - parser.add_argument("id", type=str, required=True, location="json") - parser.add_argument("description", type=str, required=False, location="json") - parser.add_argument("parameters", type=dict, required=True, location="json") - parser.add_argument("status", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("id", type=str, required=True, location="json") + .add_argument("description", type=str, required=False, location="json") + .add_argument("parameters", type=dict, required=True, location="json") + .add_argument("status", type=str, required=False, location="json") + ) args = parser.parse_args() server = db.session.query(AppMCPServer).where(AppMCPServer.id == args["id"]).first() if not server: @@ -142,13 +144,13 @@ class AppMCPServerRefreshController(Resource): @login_required @account_initialization_required @marshal_with(app_server_fields) + @edit_permission_required def get(self, server_id): - if not current_user.is_editor: - raise NotFound() + _, current_tenant_id = current_account_with_tenant() server = ( db.session.query(AppMCPServer) .where(AppMCPServer.id == server_id) - .where(AppMCPServer.tenant_id == current_user.current_tenant_id) + .where(AppMCPServer.tenant_id == current_tenant_id) .first() ) if not server: diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 46523feccc..7e0ae370ef 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -3,7 +3,7 @@ import logging from flask_restx import Resource, fields, marshal_with, reqparse from flask_restx.inputs import int_range from sqlalchemy import exists, select -from werkzeug.exceptions import Forbidden, InternalServerError, NotFound +from werkzeug.exceptions import InternalServerError, NotFound from controllers.console import api, console_ns from controllers.console.app.error import ( @@ -17,6 +17,7 @@ from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDi from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from core.app.entities.app_invoke_entities import InvokeFrom @@ -26,8 +27,7 @@ from extensions.ext_database import db from fields.conversation_fields import annotation_fields, message_detail_fields from libs.helper import uuid_value from libs.infinite_scroll_pagination import InfiniteScrollPagination -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback from services.annotation_service import AppAnnotationService from services.errors.conversation import ConversationNotExistsError @@ -56,19 +56,19 @@ class ChatMessageListApi(Resource): ) @api.response(200, "Success", message_infinite_scroll_pagination_fields) @api.response(404, "Conversation not found") - @setup_required @login_required - @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @account_initialization_required + @setup_required + @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(message_infinite_scroll_pagination_fields) + @edit_permission_required def get(self, app_model): - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", required=True, type=uuid_value, location="args") - parser.add_argument("first_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("conversation_id", required=True, type=uuid_value, location="args") + .add_argument("first_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() conversation = ( @@ -154,12 +154,13 @@ class MessageFeedbackApi(Resource): @login_required @account_initialization_required def post(self, app_model): - if current_user is None: - raise Forbidden() + current_user, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("message_id", required=True, type=uuid_value, location="json") - parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", required=True, type=uuid_value, location="json") + .add_argument("rating", type=str, choices=["like", "dislike", None], location="json") + ) args = parser.parse_args() message_id = str(args["message_id"]) @@ -211,23 +212,21 @@ class MessageAnnotationApi(Resource): ) @api.response(200, "Annotation created successfully", annotation_fields) @api.response(403, "Insufficient permissions") + @marshal_with(annotation_fields) + @get_app_model @setup_required @login_required - @account_initialization_required @cloud_edition_billing_resource_check("annotation") - @get_app_model - @marshal_with(annotation_fields) + @account_initialization_required + @edit_permission_required def post(self, app_model): - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("message_id", required=False, type=uuid_value, location="json") - parser.add_argument("question", required=True, type=str, location="json") - parser.add_argument("answer", required=True, type=str, location="json") - parser.add_argument("annotation_reply", required=False, type=dict, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", required=False, type=uuid_value, location="json") + .add_argument("question", required=True, type=str, location="json") + .add_argument("answer", required=True, type=str, location="json") + .add_argument("annotation_reply", required=False, type=dict, location="json") + ) args = parser.parse_args() annotation = AppAnnotationService.up_insert_app_annotation_from_message(args, app_model.id) @@ -270,6 +269,7 @@ class MessageSuggestedQuestionApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) def get(self, app_model, message_id): + current_user, _ = current_account_with_tenant() message_id = str(message_id) try: @@ -304,12 +304,12 @@ class MessageApi(Resource): @api.doc(params={"app_id": "Application ID", "message_id": "Message ID"}) @api.response(200, "Message retrieved successfully", message_detail_fields) @api.response(404, "Message not found") + @get_app_model @setup_required @login_required @account_initialization_required - @get_app_model @marshal_with(message_detail_fields) - def get(self, app_model, message_id): + def get(self, app_model, message_id: str): message_id = str(message_id) message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first() diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py index 11df511840..72ce8a7ddf 100644 --- a/api/controllers/console/app/model_config.py +++ b/api/controllers/console/app/model_config.py @@ -2,7 +2,6 @@ import json from typing import cast from flask import request -from flask_login import current_user from flask_restx import Resource, fields from werkzeug.exceptions import Forbidden @@ -14,8 +13,8 @@ from core.tools.tool_manager import ToolManager from core.tools.utils.configuration import ToolParameterConfigurationManager from events.app_event import app_model_config_was_updated from extensions.ext_database import db -from libs.login import login_required -from models.account import Account +from libs.datetime_utils import naive_utc_now +from libs.login import current_account_with_tenant, login_required from models.model import AppMode, AppModelConfig from services.app_model_config_service import AppModelConfigService @@ -53,16 +52,14 @@ class ModelConfigResource(Resource): @get_app_model(mode=[AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION]) def post(self, app_model): """Modify app model config""" - if not isinstance(current_user, Account): - raise Forbidden() + current_user, current_tenant_id = current_account_with_tenant() if not current_user.has_edit_permission: raise Forbidden() - assert current_user.current_tenant_id is not None, "The tenant information should be loaded." # validate config model_configuration = AppModelConfigService.validate_configuration( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, config=cast(dict, request.json), app_mode=AppMode.value_of(app_model.mode), ) @@ -90,16 +87,16 @@ class ModelConfigResource(Resource): if not isinstance(tool, dict) or len(tool.keys()) <= 3: continue - agent_tool_entity = AgentToolEntity(**tool) + agent_tool_entity = AgentToolEntity.model_validate(tool) # get tool try: tool_runtime = ToolManager.get_agent_tool_runtime( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, app_id=app_model.id, agent_tool=agent_tool_entity, ) manager = ToolParameterConfigurationManager( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, tool_runtime=tool_runtime, provider_name=agent_tool_entity.provider_id, provider_type=agent_tool_entity.provider_type, @@ -124,7 +121,7 @@ class ModelConfigResource(Resource): # encrypt agent tool parameters if it's secret-input agent_mode = new_app_model_config.agent_mode_dict for tool in agent_mode.get("tools") or []: - agent_tool_entity = AgentToolEntity(**tool) + agent_tool_entity = AgentToolEntity.model_validate(tool) # get tool key = f"{agent_tool_entity.provider_id}.{agent_tool_entity.provider_type}.{agent_tool_entity.tool_name}" @@ -133,7 +130,7 @@ class ModelConfigResource(Resource): else: try: tool_runtime = ToolManager.get_agent_tool_runtime( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, app_id=app_model.id, agent_tool=agent_tool_entity, ) @@ -141,7 +138,7 @@ class ModelConfigResource(Resource): continue manager = ToolParameterConfigurationManager( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, tool_runtime=tool_runtime, provider_name=agent_tool_entity.provider_id, provider_type=agent_tool_entity.provider_type, @@ -172,6 +169,8 @@ class ModelConfigResource(Resource): db.session.flush() app_model.app_model_config_id = new_app_model_config.id + app_model.updated_by = current_user.id + app_model.updated_at = naive_utc_now() db.session.commit() app_model_config_was_updated.send(app_model, app_model_config=new_app_model_config) diff --git a/api/controllers/console/app/ops_trace.py b/api/controllers/console/app/ops_trace.py index 981974e842..1d80314774 100644 --- a/api/controllers/console/app/ops_trace.py +++ b/api/controllers/console/app/ops_trace.py @@ -30,8 +30,7 @@ class TraceAppConfigApi(Resource): @login_required @account_initialization_required def get(self, app_id): - parser = reqparse.RequestParser() - parser.add_argument("tracing_provider", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("tracing_provider", type=str, required=True, location="args") args = parser.parse_args() try: @@ -63,9 +62,11 @@ class TraceAppConfigApi(Resource): @account_initialization_required def post(self, app_id): """Create a new trace app configuration""" - parser = reqparse.RequestParser() - parser.add_argument("tracing_provider", type=str, required=True, location="json") - parser.add_argument("tracing_config", type=dict, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("tracing_provider", type=str, required=True, location="json") + .add_argument("tracing_config", type=dict, required=True, location="json") + ) args = parser.parse_args() try: @@ -99,9 +100,11 @@ class TraceAppConfigApi(Resource): @account_initialization_required def patch(self, app_id): """Update an existing trace app configuration""" - parser = reqparse.RequestParser() - parser.add_argument("tracing_provider", type=str, required=True, location="json") - parser.add_argument("tracing_config", type=dict, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("tracing_provider", type=str, required=True, location="json") + .add_argument("tracing_config", type=dict, required=True, location="json") + ) args = parser.parse_args() try: @@ -129,8 +132,7 @@ class TraceAppConfigApi(Resource): @account_initialization_required def delete(self, app_id): """Delete an existing trace app configuration""" - parser = reqparse.RequestParser() - parser.add_argument("tracing_provider", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("tracing_provider", type=str, required=True, location="args") args = parser.parse_args() try: diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index 95befc5df9..c4d640bf0e 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -1,4 +1,3 @@ -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from werkzeug.exceptions import Forbidden, NotFound @@ -9,30 +8,36 @@ from controllers.console.wraps import account_initialization_required, setup_req from extensions.ext_database import db from fields.app_fields import app_site_fields from libs.datetime_utils import naive_utc_now -from libs.login import login_required -from models import Account, Site +from libs.login import current_account_with_tenant, login_required +from models import Site def parse_app_site_args(): - parser = reqparse.RequestParser() - parser.add_argument("title", type=str, required=False, location="json") - parser.add_argument("icon_type", type=str, required=False, location="json") - parser.add_argument("icon", type=str, required=False, location="json") - parser.add_argument("icon_background", type=str, required=False, location="json") - parser.add_argument("description", type=str, required=False, location="json") - parser.add_argument("default_language", type=supported_language, required=False, location="json") - parser.add_argument("chat_color_theme", type=str, required=False, location="json") - parser.add_argument("chat_color_theme_inverted", type=bool, required=False, location="json") - parser.add_argument("customize_domain", type=str, required=False, location="json") - parser.add_argument("copyright", type=str, required=False, location="json") - parser.add_argument("privacy_policy", type=str, required=False, location="json") - parser.add_argument("custom_disclaimer", type=str, required=False, location="json") - parser.add_argument( - "customize_token_strategy", type=str, choices=["must", "allow", "not_allow"], required=False, location="json" + parser = ( + reqparse.RequestParser() + .add_argument("title", type=str, required=False, location="json") + .add_argument("icon_type", type=str, required=False, location="json") + .add_argument("icon", type=str, required=False, location="json") + .add_argument("icon_background", type=str, required=False, location="json") + .add_argument("description", type=str, required=False, location="json") + .add_argument("default_language", type=supported_language, required=False, location="json") + .add_argument("chat_color_theme", type=str, required=False, location="json") + .add_argument("chat_color_theme_inverted", type=bool, required=False, location="json") + .add_argument("customize_domain", type=str, required=False, location="json") + .add_argument("copyright", type=str, required=False, location="json") + .add_argument("privacy_policy", type=str, required=False, location="json") + .add_argument("custom_disclaimer", type=str, required=False, location="json") + .add_argument( + "customize_token_strategy", + type=str, + choices=["must", "allow", "not_allow"], + required=False, + location="json", + ) + .add_argument("prompt_public", type=bool, required=False, location="json") + .add_argument("show_workflow_steps", type=bool, required=False, location="json") + .add_argument("use_icon_as_answer_icon", type=bool, required=False, location="json") ) - parser.add_argument("prompt_public", type=bool, required=False, location="json") - parser.add_argument("show_workflow_steps", type=bool, required=False, location="json") - parser.add_argument("use_icon_as_answer_icon", type=bool, required=False, location="json") return parser.parse_args() @@ -76,9 +81,10 @@ class AppSite(Resource): @marshal_with(app_site_fields) def post(self, app_model): args = parse_app_site_args() + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be editor, admin, or owner - if not current_user.is_editor: + if not current_user.has_edit_permission: raise Forbidden() site = db.session.query(Site).where(Site.app_id == app_model.id).first() @@ -107,8 +113,6 @@ class AppSite(Resource): if value is not None: setattr(site, attr_name, value) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") site.updated_by = current_user.id site.updated_at = naive_utc_now() db.session.commit() @@ -131,6 +135,8 @@ class AppSiteAccessTokenReset(Resource): @marshal_with(app_site_fields) def post(self, app_model): # The role of the current user in the ta table must be admin or owner + current_user, _ = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() @@ -140,8 +146,6 @@ class AppSiteAccessTokenReset(Resource): raise NotFound site.code = Site.generate_code(16) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") site.updated_by = current_user.id site.updated_at = naive_utc_now() db.session.commit() diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py index 6471b843c6..0917a6e53c 100644 --- a/api/controllers/console/app/statistic.py +++ b/api/controllers/console/app/statistic.py @@ -4,7 +4,6 @@ from decimal import Decimal import pytz import sqlalchemy as sa from flask import jsonify -from flask_login import current_user from flask_restx import Resource, fields, reqparse from controllers.console import api, console_ns @@ -13,7 +12,7 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from libs.helper import DatetimeString -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import AppMode, Message @@ -37,11 +36,13 @@ class DailyMessageStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -52,7 +53,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -109,13 +111,15 @@ class DailyConversationStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -127,7 +131,7 @@ class DailyConversationStatistic(Resource): sa.func.count(sa.distinct(Message.conversation_id)).label("conversation_count"), ) .select_from(Message) - .where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER.value) + .where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER) ) if args["start"]: @@ -175,11 +179,13 @@ class DailyTerminalsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -190,8 +196,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -247,11 +253,13 @@ class DailyTokenCostStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -263,8 +271,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -322,11 +330,13 @@ class AverageSessionInteractionStatistic(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -345,8 +355,8 @@ FROM WHERE c.app_id = :app_id AND m.invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -413,11 +423,13 @@ class UserSatisfactionRateStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -432,8 +444,8 @@ LEFT JOIN WHERE m.app_id = :app_id AND m.invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -494,11 +506,13 @@ class AverageResponseTimeStatistic(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -509,8 +523,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -566,11 +580,13 @@ class TokensPerSecondStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -584,8 +600,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 1f5cbbeca5..56771ed420 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -12,7 +12,7 @@ import services from controllers.console import api, console_ns from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError from core.app.app_config.features.file_upload.manager import FileUploadConfigManager from core.app.apps.base_app_queue_manager import AppQueueManager @@ -25,10 +25,10 @@ from factories import file_factory, variable_factory from fields.workflow_fields import workflow_fields, workflow_pagination_fields from fields.workflow_run_fields import workflow_run_node_execution_fields from libs import helper +from libs.datetime_utils import naive_utc_now from libs.helper import TimestampField, uuid_value -from libs.login import current_user, login_required +from libs.login import current_account_with_tenant, login_required from models import App -from models.account import Account from models.model import AppMode from models.workflow import Workflow from services.app_generate_service import AppGenerateService @@ -69,15 +69,11 @@ class DraftWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def get(self, app_model: App): """ Get draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() - # fetch draft workflow by app_model workflow_service = WorkflowService() workflow = workflow_service.get_draft_workflow(app_model=app_model) @@ -109,24 +105,24 @@ class DraftWorkflowApi(Resource): @api.response(200, "Draft workflow synced successfully", workflow_fields) @api.response(400, "Invalid workflow configuration") @api.response(403, "Permission denied") + @edit_permission_required def post(self, app_model: App): """ Sync draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() content_type = request.headers.get("Content-Type", "") if "application/json" in content_type: - parser = reqparse.RequestParser() - parser.add_argument("graph", type=dict, required=True, nullable=False, location="json") - parser.add_argument("features", type=dict, required=True, nullable=False, location="json") - parser.add_argument("hash", type=str, required=False, location="json") - parser.add_argument("environment_variables", type=list, required=True, location="json") - parser.add_argument("conversation_variables", type=list, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("graph", type=dict, required=True, nullable=False, location="json") + .add_argument("features", type=dict, required=True, nullable=False, location="json") + .add_argument("hash", type=str, required=False, location="json") + .add_argument("environment_variables", type=list, required=True, location="json") + .add_argument("conversation_variables", type=list, required=False, location="json") + ) args = parser.parse_args() elif "text/plain" in content_type: try: @@ -148,10 +144,6 @@ class DraftWorkflowApi(Resource): return {"message": "Invalid JSON data"}, 400 else: abort(415) - - if not isinstance(current_user, Account): - raise Forbidden() - workflow_service = WorkflowService() try: @@ -205,24 +197,21 @@ class AdvancedChatDraftWorkflowRunApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App): """ Run draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() - if not isinstance(current_user, Account): - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") - parser.add_argument("query", type=str, required=True, location="json", default="") - parser.add_argument("files", type=list, location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, location="json") + .add_argument("query", type=str, required=True, location="json", default="") + .add_argument("files", type=list, location="json") + .add_argument("conversation_id", type=uuid_value, location="json") + .add_argument("parent_message_id", type=uuid_value, required=False, location="json") + ) args = parser.parse_args() @@ -270,18 +259,13 @@ class AdvancedChatDraftRunIterationNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow iteration node """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -322,18 +306,13 @@ class WorkflowDraftRunIterationNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow iteration node """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -374,19 +353,13 @@ class AdvancedChatDraftRunLoopNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow loop node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -427,19 +400,13 @@ class WorkflowDraftRunLoopNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow loop node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -479,20 +446,17 @@ class DraftWorkflowRunApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App): """ Run draft workflow """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("files", type=list, required=False, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("files", type=list, required=False, location="json") + ) args = parser.parse_args() external_trace_id = get_external_trace_id(request) @@ -525,17 +489,11 @@ class WorkflowTaskStopApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, task_id: str): """ Stop workflow task """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # Stop using both mechanisms for backward compatibility # Legacy stop flag mechanism (without user check) AppQueueManager.set_stop_flag_no_user_check(task_id) @@ -567,21 +525,18 @@ class DraftWorkflowNodeRunApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_run_node_execution_fields) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("query", type=str, required=False, location="json", default="") - parser.add_argument("files", type=list, location="json", default=[]) + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("query", type=str, required=False, location="json", default="") + .add_argument("files", type=list, location="json", default=[]) + ) args = parser.parse_args() user_inputs = args.get("inputs") @@ -621,17 +576,11 @@ class PublishedWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def get(self, app_model: App): """ Get published workflow """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # fetch published workflow by app_model workflow_service = WorkflowService() workflow = workflow_service.get_published_workflow(app_model=app_model) @@ -643,19 +592,17 @@ class PublishedWorkflowApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App): """ Publish workflow """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("marked_name", type=str, required=False, default="", location="json") - parser.add_argument("marked_comment", type=str, required=False, default="", location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("marked_name", type=str, required=False, default="", location="json") + .add_argument("marked_comment", type=str, required=False, default="", location="json") + ) args = parser.parse_args() # Validate name and comment length @@ -674,8 +621,12 @@ class PublishedWorkflowApi(Resource): marked_comment=args.marked_comment or "", ) - app_model.workflow_id = workflow.id - db.session.commit() # NOTE: this is necessary for update app_model.workflow_id + # Update app_model within the same session to ensure atomicity + app_model_in_session = session.get(App, app_model.id) + if app_model_in_session: + app_model_in_session.workflow_id = workflow.id + app_model_in_session.updated_by = current_user.id + app_model_in_session.updated_at = naive_utc_now() workflow_created_at = TimestampField().format(workflow.created_at) @@ -697,17 +648,11 @@ class DefaultBlockConfigsApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def get(self, app_model: App): """ Get default block config """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # Get default block configs workflow_service = WorkflowService() return workflow_service.get_default_block_configs() @@ -724,18 +669,12 @@ class DefaultBlockConfigApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def get(self, app_model: App, block_type: str): """ Get default block config """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("q", type=str, location="args") + parser = reqparse.RequestParser().add_argument("q", type=str, location="args") args = parser.parse_args() q = args.get("q") @@ -764,24 +703,23 @@ class ConvertToWorkflowApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.COMPLETION]) + @edit_permission_required def post(self, app_model: App): """ Convert basic mode of chatbot app to workflow mode Convert expert mode of chatbot app to workflow mode Convert Completion App to Workflow App """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() if request.data: - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=False, nullable=True, location="json") - parser.add_argument("icon_type", type=str, required=False, nullable=True, location="json") - parser.add_argument("icon", type=str, required=False, nullable=True, location="json") - parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, nullable=True, location="json") + .add_argument("icon_type", type=str, required=False, nullable=True, location="json") + .add_argument("icon", type=str, required=False, nullable=True, location="json") + .add_argument("icon_background", type=str, required=False, nullable=True, location="json") + ) args = parser.parse_args() else: args = {} @@ -807,21 +745,20 @@ class PublishedAllWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_pagination_fields) + @edit_permission_required def get(self, app_model: App): """ Get published workflows """ + current_user, _ = current_account_with_tenant() - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") - parser.add_argument("user_id", type=str, required=False, location="args") - parser.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + .add_argument("user_id", type=str, required=False, location="args") + .add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args") + ) args = parser.parse_args() page = int(args.get("page", 1)) limit = int(args.get("limit", 10)) @@ -874,19 +811,17 @@ class WorkflowByIdApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def patch(self, app_model: App, workflow_id: str): """ Update workflow attributes """ - if not isinstance(current_user, Account): - raise Forbidden() - # Check permission - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("marked_name", type=str, required=False, location="json") - parser.add_argument("marked_comment", type=str, required=False, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("marked_name", type=str, required=False, location="json") + .add_argument("marked_comment", type=str, required=False, location="json") + ) args = parser.parse_args() # Validate name and comment length @@ -929,16 +864,11 @@ class WorkflowByIdApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def delete(self, app_model: App, workflow_id: str): """ Delete workflow """ - if not isinstance(current_user, Account): - raise Forbidden() - # Check permission - if not current_user.has_edit_permission: - raise Forbidden() - workflow_service = WorkflowService() # Create a session and manage the transaction diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index 8e24be4fa7..cbf4e84ff0 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -42,33 +42,35 @@ class WorkflowAppLogApi(Resource): """ Get workflow app logs """ - parser = reqparse.RequestParser() - parser.add_argument("keyword", type=str, location="args") - parser.add_argument( - "status", type=str, choices=["succeeded", "failed", "stopped", "partial-succeeded"], location="args" + parser = ( + reqparse.RequestParser() + .add_argument("keyword", type=str, location="args") + .add_argument( + "status", type=str, choices=["succeeded", "failed", "stopped", "partial-succeeded"], location="args" + ) + .add_argument( + "created_at__before", type=str, location="args", help="Filter logs created before this timestamp" + ) + .add_argument( + "created_at__after", type=str, location="args", help="Filter logs created after this timestamp" + ) + .add_argument( + "created_by_end_user_session_id", + type=str, + location="args", + required=False, + default=None, + ) + .add_argument( + "created_by_account", + type=str, + location="args", + required=False, + default=None, + ) + .add_argument("page", type=int_range(1, 99999), default=1, location="args") + .add_argument("limit", type=int_range(1, 100), default=20, location="args") ) - parser.add_argument( - "created_at__before", type=str, location="args", help="Filter logs created before this timestamp" - ) - parser.add_argument( - "created_at__after", type=str, location="args", help="Filter logs created after this timestamp" - ) - parser.add_argument( - "created_by_end_user_session_id", - type=str, - location="args", - required=False, - default=None, - ) - parser.add_argument( - "created_by_account", - type=str, - location="args", - required=False, - default=None, - ) - parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") - parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") args = parser.parse_args() args.status = WorkflowExecutionStatus(args.status) if args.status else None diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py index da6b56d026..0722eb40d2 100644 --- a/api/controllers/console/app/workflow_draft_variable.py +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -22,8 +22,7 @@ from extensions.ext_database import db from factories.file_factory import build_from_mapping, build_from_mappings from factories.variable_factory import build_segment_with_type from libs.login import current_user, login_required -from models import App, AppMode -from models.account import Account +from models import Account, App, AppMode from models.workflow import WorkflowDraftVariable from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService from services.workflow_service import WorkflowService @@ -58,16 +57,18 @@ def _serialize_var_value(variable: WorkflowDraftVariable): def _create_pagination_parser(): - parser = reqparse.RequestParser() - parser.add_argument( - "page", - type=inputs.int_range(1, 100_000), - required=False, - default=1, - location="args", - help="the page of data requested", + parser = ( + reqparse.RequestParser() + .add_argument( + "page", + type=inputs.int_range(1, 100_000), + required=False, + default=1, + location="args", + help="the page of data requested", + ) + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") ) - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") return parser @@ -320,10 +321,11 @@ class VariableApi(Resource): # "upload_file_id": "1602650a-4fe4-423c-85a2-af76c083e3c4" # } - parser = reqparse.RequestParser() - parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") - # Parse 'value' field as-is to maintain its original data structure - parser.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") + .add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + ) draft_var_srv = WorkflowDraftVariableService( session=db.session(), diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index 23ba63845c..311aa81279 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -1,6 +1,5 @@ from typing import cast -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range @@ -9,15 +8,81 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from fields.workflow_run_fields import ( advanced_chat_workflow_run_pagination_fields, + workflow_run_count_fields, workflow_run_detail_fields, workflow_run_node_execution_list_fields, workflow_run_pagination_fields, ) +from libs.custom_inputs import time_duration from libs.helper import uuid_value -from libs.login import login_required -from models import Account, App, AppMode, EndUser +from libs.login import current_user, login_required +from models import Account, App, AppMode, EndUser, WorkflowRunTriggeredFrom from services.workflow_run_service import WorkflowRunService +# Workflow run status choices for filtering +WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"] + + +def _parse_workflow_run_list_args(): + """ + Parse common arguments for workflow run list endpoints. + + Returns: + Parsed arguments containing last_id, limit, status, and triggered_from filters + """ + parser = reqparse.RequestParser() + parser.add_argument("last_id", type=uuid_value, location="args") + parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser.add_argument( + "status", + type=str, + choices=WORKFLOW_RUN_STATUS_CHOICES, + location="args", + required=False, + ) + parser.add_argument( + "triggered_from", + type=str, + choices=["debugging", "app-run"], + location="args", + required=False, + help="Filter by trigger source: debugging or app-run", + ) + return parser.parse_args() + + +def _parse_workflow_run_count_args(): + """ + Parse common arguments for workflow run count endpoints. + + Returns: + Parsed arguments containing status, time_range, and triggered_from filters + """ + parser = reqparse.RequestParser() + parser.add_argument( + "status", + type=str, + choices=WORKFLOW_RUN_STATUS_CHOICES, + location="args", + required=False, + ) + parser.add_argument( + "time_range", + type=time_duration, + location="args", + required=False, + help="Time range filter (e.g., 7d, 4h, 30m, 30s)", + ) + parser.add_argument( + "triggered_from", + type=str, + choices=["debugging", "app-run"], + location="args", + required=False, + help="Filter by trigger source: debugging or app-run", + ) + return parser.parse_args() + @console_ns.route("/apps//advanced-chat/workflow-runs") class AdvancedChatAppWorkflowRunListApi(Resource): @@ -25,6 +90,8 @@ class AdvancedChatAppWorkflowRunListApi(Resource): @api.doc(description="Get advanced chat workflow run list") @api.doc(params={"app_id": "Application ID"}) @api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"}) + @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) + @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) @api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields) @setup_required @login_required @@ -35,13 +102,64 @@ class AdvancedChatAppWorkflowRunListApi(Resource): """ Get advanced chat app workflow run list """ - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - args = parser.parse_args() + args = _parse_workflow_run_list_args() + + # Default to DEBUGGING if not specified + triggered_from = ( + WorkflowRunTriggeredFrom(args.get("triggered_from")) + if args.get("triggered_from") + else WorkflowRunTriggeredFrom.DEBUGGING + ) workflow_run_service = WorkflowRunService() - result = workflow_run_service.get_paginate_advanced_chat_workflow_runs(app_model=app_model, args=args) + result = workflow_run_service.get_paginate_advanced_chat_workflow_runs( + app_model=app_model, args=args, triggered_from=triggered_from + ) + + return result + + +@console_ns.route("/apps//advanced-chat/workflow-runs/count") +class AdvancedChatAppWorkflowRunCountApi(Resource): + @api.doc("get_advanced_chat_workflow_runs_count") + @api.doc(description="Get advanced chat workflow runs count statistics") + @api.doc(params={"app_id": "Application ID"}) + @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) + @api.doc( + params={ + "time_range": ( + "Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), " + "30m (30 minutes), 30s (30 seconds). Filters by created_at field." + ) + } + ) + @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) + @api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @marshal_with(workflow_run_count_fields) + def get(self, app_model: App): + """ + Get advanced chat workflow runs count statistics + """ + args = _parse_workflow_run_count_args() + + # Default to DEBUGGING if not specified + triggered_from = ( + WorkflowRunTriggeredFrom(args.get("triggered_from")) + if args.get("triggered_from") + else WorkflowRunTriggeredFrom.DEBUGGING + ) + + workflow_run_service = WorkflowRunService() + result = workflow_run_service.get_workflow_runs_count( + app_model=app_model, + status=args.get("status"), + time_range=args.get("time_range"), + triggered_from=triggered_from, + ) return result @@ -52,6 +170,8 @@ class WorkflowRunListApi(Resource): @api.doc(description="Get workflow run list") @api.doc(params={"app_id": "Application ID"}) @api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"}) + @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) + @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) @api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields) @setup_required @login_required @@ -62,13 +182,64 @@ class WorkflowRunListApi(Resource): """ Get workflow run list """ - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - args = parser.parse_args() + args = _parse_workflow_run_list_args() + + # Default to DEBUGGING for workflow if not specified (backward compatibility) + triggered_from = ( + WorkflowRunTriggeredFrom(args.get("triggered_from")) + if args.get("triggered_from") + else WorkflowRunTriggeredFrom.DEBUGGING + ) workflow_run_service = WorkflowRunService() - result = workflow_run_service.get_paginate_workflow_runs(app_model=app_model, args=args) + result = workflow_run_service.get_paginate_workflow_runs( + app_model=app_model, args=args, triggered_from=triggered_from + ) + + return result + + +@console_ns.route("/apps//workflow-runs/count") +class WorkflowRunCountApi(Resource): + @api.doc("get_workflow_runs_count") + @api.doc(description="Get workflow runs count statistics") + @api.doc(params={"app_id": "Application ID"}) + @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) + @api.doc( + params={ + "time_range": ( + "Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), " + "30m (30 minutes), 30s (30 seconds). Filters by created_at field." + ) + } + ) + @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) + @api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @marshal_with(workflow_run_count_fields) + def get(self, app_model: App): + """ + Get workflow runs count statistics + """ + args = _parse_workflow_run_count_args() + + # Default to DEBUGGING for workflow if not specified (backward compatibility) + triggered_from = ( + WorkflowRunTriggeredFrom(args.get("triggered_from")) + if args.get("triggered_from") + else WorkflowRunTriggeredFrom.DEBUGGING + ) + + workflow_run_service = WorkflowRunService() + result = workflow_run_service.get_workflow_runs_count( + app_model=app_model, + status=args.get("status"), + time_range=args.get("time_range"), + triggered_from=triggered_from, + ) return result diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py index 535e7cadd6..c246b3ffd5 100644 --- a/api/controllers/console/app/workflow_statistic.py +++ b/api/controllers/console/app/workflow_statistic.py @@ -1,24 +1,28 @@ from datetime import datetime -from decimal import Decimal import pytz -import sqlalchemy as sa from flask import jsonify -from flask_login import current_user from flask_restx import Resource, reqparse +from sqlalchemy.orm import sessionmaker from controllers.console import api, console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db from libs.helper import DatetimeString -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.enums import WorkflowRunTriggeredFrom from models.model import AppMode +from repositories.factory import DifyAPIRepositoryFactory @console_ns.route("/apps//workflow/statistics/daily-conversations") class WorkflowDailyRunsStatistic(Resource): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) + self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) + @api.doc("get_workflow_daily_runs_statistic") @api.doc(description="Get workflow daily runs statistics") @api.doc(params={"app_id": "Application ID"}) @@ -29,64 +33,53 @@ class WorkflowDailyRunsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, - COUNT(id) AS runs -FROM - workflow_runs -WHERE - app_id = :app_id - AND triggered_from = :triggered_from""" - arg_dict = { - "tz": account.timezone, - "app_id": app_model.id, - "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value, - } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc + start_date = None + end_date = None + if args["start"]: start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M") start_datetime = start_datetime.replace(second=0) - start_datetime_timezone = timezone.localize(start_datetime) - start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone) - - sql_query += " AND created_at >= :start" - arg_dict["start"] = start_datetime_utc + start_date = start_datetime_timezone.astimezone(utc_timezone) if args["end"]: end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M") end_datetime = end_datetime.replace(second=0) - end_datetime_timezone = timezone.localize(end_datetime) - end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone) + end_date = end_datetime_timezone.astimezone(utc_timezone) - sql_query += " AND created_at < :end" - arg_dict["end"] = end_datetime_utc - - sql_query += " GROUP BY date ORDER BY date" - - response_data = [] - - with db.engine.begin() as conn: - rs = conn.execute(sa.text(sql_query), arg_dict) - for i in rs: - response_data.append({"date": str(i.date), "runs": i.runs}) + response_data = self._workflow_run_repo.get_daily_runs_statistics( + tenant_id=app_model.tenant_id, + app_id=app_model.id, + triggered_from=WorkflowRunTriggeredFrom.APP_RUN, + start_date=start_date, + end_date=end_date, + timezone=account.timezone, + ) return jsonify({"data": response_data}) @console_ns.route("/apps//workflow/statistics/daily-terminals") class WorkflowDailyTerminalsStatistic(Resource): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) + self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) + @api.doc("get_workflow_daily_terminals_statistic") @api.doc(description="Get workflow daily terminals statistics") @api.doc(params={"app_id": "Application ID"}) @@ -97,64 +90,53 @@ class WorkflowDailyTerminalsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, - COUNT(DISTINCT workflow_runs.created_by) AS terminal_count -FROM - workflow_runs -WHERE - app_id = :app_id - AND triggered_from = :triggered_from""" - arg_dict = { - "tz": account.timezone, - "app_id": app_model.id, - "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value, - } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc + start_date = None + end_date = None + if args["start"]: start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M") start_datetime = start_datetime.replace(second=0) - start_datetime_timezone = timezone.localize(start_datetime) - start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone) - - sql_query += " AND created_at >= :start" - arg_dict["start"] = start_datetime_utc + start_date = start_datetime_timezone.astimezone(utc_timezone) if args["end"]: end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M") end_datetime = end_datetime.replace(second=0) - end_datetime_timezone = timezone.localize(end_datetime) - end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone) + end_date = end_datetime_timezone.astimezone(utc_timezone) - sql_query += " AND created_at < :end" - arg_dict["end"] = end_datetime_utc - - sql_query += " GROUP BY date ORDER BY date" - - response_data = [] - - with db.engine.begin() as conn: - rs = conn.execute(sa.text(sql_query), arg_dict) - for i in rs: - response_data.append({"date": str(i.date), "terminal_count": i.terminal_count}) + response_data = self._workflow_run_repo.get_daily_terminals_statistics( + tenant_id=app_model.tenant_id, + app_id=app_model.id, + triggered_from=WorkflowRunTriggeredFrom.APP_RUN, + start_date=start_date, + end_date=end_date, + timezone=account.timezone, + ) return jsonify({"data": response_data}) @console_ns.route("/apps//workflow/statistics/token-costs") class WorkflowDailyTokenCostStatistic(Resource): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) + self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) + @api.doc("get_workflow_daily_token_cost_statistic") @api.doc(description="Get workflow daily token cost statistics") @api.doc(params={"app_id": "Application ID"}) @@ -165,69 +147,53 @@ class WorkflowDailyTokenCostStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, - SUM(workflow_runs.total_tokens) AS token_count -FROM - workflow_runs -WHERE - app_id = :app_id - AND triggered_from = :triggered_from""" - arg_dict = { - "tz": account.timezone, - "app_id": app_model.id, - "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value, - } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc + start_date = None + end_date = None + if args["start"]: start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M") start_datetime = start_datetime.replace(second=0) - start_datetime_timezone = timezone.localize(start_datetime) - start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone) - - sql_query += " AND created_at >= :start" - arg_dict["start"] = start_datetime_utc + start_date = start_datetime_timezone.astimezone(utc_timezone) if args["end"]: end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M") end_datetime = end_datetime.replace(second=0) - end_datetime_timezone = timezone.localize(end_datetime) - end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone) + end_date = end_datetime_timezone.astimezone(utc_timezone) - sql_query += " AND created_at < :end" - arg_dict["end"] = end_datetime_utc - - sql_query += " GROUP BY date ORDER BY date" - - response_data = [] - - with db.engine.begin() as conn: - rs = conn.execute(sa.text(sql_query), arg_dict) - for i in rs: - response_data.append( - { - "date": str(i.date), - "token_count": i.token_count, - } - ) + response_data = self._workflow_run_repo.get_daily_token_cost_statistics( + tenant_id=app_model.tenant_id, + app_id=app_model.id, + triggered_from=WorkflowRunTriggeredFrom.APP_RUN, + start_date=start_date, + end_date=end_date, + timezone=account.timezone, + ) return jsonify({"data": response_data}) @console_ns.route("/apps//workflow/statistics/average-app-interactions") class WorkflowAverageAppInteractionStatistic(Resource): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) + self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) + @api.doc("get_workflow_average_app_interaction_statistic") @api.doc(description="Get workflow average app interaction statistics") @api.doc(params={"app_id": "Application ID"}) @@ -238,74 +204,41 @@ class WorkflowAverageAppInteractionStatistic(Resource): @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() - sql_query = """SELECT - AVG(sub.interactions) AS interactions, - sub.date -FROM - ( - SELECT - DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, - c.created_by, - COUNT(c.id) AS interactions - FROM - workflow_runs c - WHERE - c.app_id = :app_id - AND c.triggered_from = :triggered_from - {{start}} - {{end}} - GROUP BY - date, c.created_by - ) sub -GROUP BY - sub.date""" - arg_dict = { - "tz": account.timezone, - "app_id": app_model.id, - "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value, - } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc + start_date = None + end_date = None + if args["start"]: start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M") start_datetime = start_datetime.replace(second=0) - start_datetime_timezone = timezone.localize(start_datetime) - start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone) - - sql_query = sql_query.replace("{{start}}", " AND c.created_at >= :start") - arg_dict["start"] = start_datetime_utc - else: - sql_query = sql_query.replace("{{start}}", "") + start_date = start_datetime_timezone.astimezone(utc_timezone) if args["end"]: end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M") end_datetime = end_datetime.replace(second=0) - end_datetime_timezone = timezone.localize(end_datetime) - end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone) + end_date = end_datetime_timezone.astimezone(utc_timezone) - sql_query = sql_query.replace("{{end}}", " AND c.created_at < :end") - arg_dict["end"] = end_datetime_utc - else: - sql_query = sql_query.replace("{{end}}", "") - - response_data = [] - - with db.engine.begin() as conn: - rs = conn.execute(sa.text(sql_query), arg_dict) - for i in rs: - response_data.append( - {"date": str(i.date), "interactions": float(i.interactions.quantize(Decimal("0.01")))} - ) + response_data = self._workflow_run_repo.get_average_app_interaction_statistics( + tenant_id=app_model.tenant_id, + app_id=app_model.id, + triggered_from=WorkflowRunTriggeredFrom.APP_RUN, + start_date=start_date, + end_date=end_date, + timezone=account.timezone, + ) return jsonify({"data": response_data}) diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 44aba01820..9bb2718f89 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -4,28 +4,29 @@ from typing import ParamSpec, TypeVar, Union from controllers.console.app.error import AppNotFoundError from extensions.ext_database import db -from libs.login import current_user +from libs.login import current_account_with_tenant from models import App, AppMode -from models.account import Account P = ParamSpec("P") R = TypeVar("R") +P1 = ParamSpec("P1") +R1 = TypeVar("R1") def _load_app_model(app_id: str) -> App | None: - assert isinstance(current_user, Account) + _, current_tenant_id = current_account_with_tenant() app_model = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) return app_model def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None): - def decorator(view_func: Callable[P, R]): + def decorator(view_func: Callable[P1, R1]): @wraps(view_func) - def decorated_view(*args: P.args, **kwargs: P.kwargs): + def decorated_view(*args: P1.args, **kwargs: P1.kwargs): if not kwargs.get("app_id"): raise ValueError("missing app_id in path parameters") diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py index 8cdadfb03c..2eeef079a1 100644 --- a/api/controllers/console/auth/activate.py +++ b/api/controllers/console/auth/activate.py @@ -7,18 +7,14 @@ from controllers.console.error import AlreadyActivateError from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.helper import StrLen, email, extract_remote_ip, timezone -from models.account import AccountStatus +from models import AccountStatus from services.account_service import AccountService, RegisterService -active_check_parser = reqparse.RequestParser() -active_check_parser.add_argument( - "workspace_id", type=str, required=False, nullable=True, location="args", help="Workspace ID" -) -active_check_parser.add_argument( - "email", type=email, required=False, nullable=True, location="args", help="Email address" -) -active_check_parser.add_argument( - "token", type=str, required=True, nullable=False, location="args", help="Activation token" +active_check_parser = ( + reqparse.RequestParser() + .add_argument("workspace_id", type=str, required=False, nullable=True, location="args", help="Workspace ID") + .add_argument("email", type=email, required=False, nullable=True, location="args", help="Email address") + .add_argument("token", type=str, required=True, nullable=False, location="args", help="Activation token") ) @@ -60,15 +56,15 @@ class ActivateCheckApi(Resource): return {"is_valid": False} -active_parser = reqparse.RequestParser() -active_parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="json") -active_parser.add_argument("email", type=email, required=False, nullable=True, location="json") -active_parser.add_argument("token", type=str, required=True, nullable=False, location="json") -active_parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") -active_parser.add_argument( - "interface_language", type=supported_language, required=True, nullable=False, location="json" +active_parser = ( + reqparse.RequestParser() + .add_argument("workspace_id", type=str, required=False, nullable=True, location="json") + .add_argument("email", type=email, required=False, nullable=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") + .add_argument("interface_language", type=supported_language, required=True, nullable=False, location="json") + .add_argument("timezone", type=timezone, required=True, nullable=False, location="json") ) -active_parser.add_argument("timezone", type=timezone, required=True, nullable=False, location="json") @console_ns.route("/activate") @@ -103,7 +99,7 @@ class ActivateApi(Resource): account.interface_language = args["interface_language"] account.timezone = args["timezone"] account.interface_theme = "light" - account.status = AccountStatus.ACTIVE.value + account.status = AccountStatus.ACTIVE account.initialized_at = naive_utc_now() db.session.commit() diff --git a/api/controllers/console/auth/data_source_bearer_auth.py b/api/controllers/console/auth/data_source_bearer_auth.py index 207303b212..a06435267b 100644 --- a/api/controllers/console/auth/data_source_bearer_auth.py +++ b/api/controllers/console/auth/data_source_bearer_auth.py @@ -1,10 +1,9 @@ -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import console_ns from controllers.console.auth.error import ApiKeyAuthFailedError -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.auth.api_key_auth_service import ApiKeyAuthService from ..wraps import account_initialization_required, setup_required @@ -16,7 +15,8 @@ class ApiKeyAuthDataSource(Resource): @login_required @account_initialization_required def get(self): - data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_tenant_id) if data_source_api_key_bindings: return { "sources": [ @@ -41,16 +41,20 @@ class ApiKeyAuthDataSourceBinding(Resource): @account_initialization_required def post(self): # The role of the current user in the table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("category", type=str, required=True, nullable=False, location="json") - parser.add_argument("provider", type=str, required=True, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("category", type=str, required=True, nullable=False, location="json") + .add_argument("provider", type=str, required=True, nullable=False, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + ) args = parser.parse_args() ApiKeyAuthService.validate_api_key_auth_args(args) try: - ApiKeyAuthService.create_provider_auth(current_user.current_tenant_id, args) + ApiKeyAuthService.create_provider_auth(current_tenant_id, args) except Exception as e: raise ApiKeyAuthFailedError(str(e)) return {"result": "success"}, 200 @@ -63,9 +67,11 @@ class ApiKeyAuthDataSourceBindingDelete(Resource): @account_initialization_required def delete(self, binding_id): # The role of the current user in the table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id) + ApiKeyAuthService.delete_provider_auth(current_tenant_id, binding_id) return {"result": "success"}, 204 diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index 6f1fd2f11a..0fd433d718 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -2,13 +2,12 @@ import logging import httpx from flask import current_app, redirect, request -from flask_login import current_user from flask_restx import Resource, fields from werkzeug.exceptions import Forbidden from configs import dify_config from controllers.console import api, console_ns -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from libs.oauth_data_source import NotionOAuth from ..wraps import account_initialization_required, setup_required @@ -45,6 +44,7 @@ class OAuthDataSource(Resource): @api.response(403, "Admin privileges required") def get(self, provider: str): # The role of the current user in the table must be admin or owner + current_user, _ = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers() diff --git a/api/controllers/console/auth/email_register.py b/api/controllers/console/auth/email_register.py index d3613d9183..fe2bb54e0b 100644 --- a/api/controllers/console/auth/email_register.py +++ b/api/controllers/console/auth/email_register.py @@ -19,7 +19,7 @@ from controllers.console.wraps import email_password_login_enabled, email_regist from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import valid_password -from models.account import Account +from models import Account from services.account_service import AccountService from services.billing_service import BillingService from services.errors.account import AccountNotFoundError, AccountRegisterError @@ -31,9 +31,11 @@ class EmailRegisterSendEmailApi(Resource): @email_password_login_enabled @email_register_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -59,10 +61,12 @@ class EmailRegisterCheckApi(Resource): @email_password_login_enabled @email_register_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -100,10 +104,12 @@ class EmailRegisterResetApi(Resource): @email_password_login_enabled @email_register_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, nullable=False, location="json") - parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") - parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, nullable=False, location="json") + .add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") + .add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + ) args = parser.parse_args() # Validate passwords match diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 704bcf8fb8..6be6ad51fe 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -20,7 +20,7 @@ from events.tenant_event import tenant_was_created from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password -from models.account import Account +from models import Account from services.account_service import AccountService, TenantService from services.feature_service import FeatureService @@ -54,9 +54,11 @@ class ForgotPasswordSendEmailApi(Resource): @setup_required @email_password_login_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -111,10 +113,12 @@ class ForgotPasswordCheckApi(Resource): @setup_required @email_password_login_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -169,10 +173,12 @@ class ForgotPasswordResetApi(Resource): @setup_required @email_password_login_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, nullable=False, location="json") - parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") - parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, nullable=False, location="json") + .add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") + .add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + ) args = parser.parse_args() # Validate passwords match diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index ba614aa828..77ecd5a5e4 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -1,12 +1,10 @@ -from typing import cast - import flask_login -from flask import request +from flask import make_response, request from flask_restx import Resource, reqparse import services from configs import dify_config -from constants.languages import languages +from constants.languages import get_valid_language from controllers.console import console_ns from controllers.console.auth.error import ( AuthenticationFailedError, @@ -26,7 +24,16 @@ from controllers.console.error import ( from controllers.console.wraps import email_password_login_enabled, setup_required from events.tenant_event import tenant_was_created from libs.helper import email, extract_remote_ip -from models.account import Account +from libs.login import current_account_with_tenant +from libs.token import ( + clear_access_token_from_cookie, + clear_csrf_token_from_cookie, + clear_refresh_token_from_cookie, + extract_refresh_token, + set_access_token_to_cookie, + set_csrf_token_to_cookie, + set_refresh_token_to_cookie, +) from services.account_service import AccountService, RegisterService, TenantService from services.billing_service import BillingService from services.errors.account import AccountRegisterError @@ -42,11 +49,13 @@ class LoginApi(Resource): @email_password_login_enabled def post(self): """Authenticate user and login.""" - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("password", type=str, required=True, location="json") - parser.add_argument("remember_me", type=bool, required=False, default=False, location="json") - parser.add_argument("invite_token", type=str, required=False, default=None, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("password", type=str, required=True, location="json") + .add_argument("remember_me", type=bool, required=False, default=False, location="json") + .add_argument("invite_token", type=str, required=False, default=None, location="json") + ) args = parser.parse_args() if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(args["email"]): @@ -89,19 +98,36 @@ class LoginApi(Resource): token_pair = AccountService.login(account=account, ip_address=extract_remote_ip(request)) AccountService.reset_login_error_rate_limit(args["email"]) - return {"result": "success", "data": token_pair.model_dump()} + + # Create response with cookies instead of returning tokens in body + response = make_response({"result": "success"}) + + set_access_token_to_cookie(request, response, token_pair.access_token) + set_refresh_token_to_cookie(request, response, token_pair.refresh_token) + set_csrf_token_to_cookie(request, response, token_pair.csrf_token) + + return response @console_ns.route("/logout") class LogoutApi(Resource): @setup_required - def get(self): - account = cast(Account, flask_login.current_user) + def post(self): + current_user, _ = current_account_with_tenant() + account = current_user if isinstance(account, flask_login.AnonymousUserMixin): - return {"result": "success"} - AccountService.logout(account=account) - flask_login.logout_user() - return {"result": "success"} + response = make_response({"result": "success"}) + else: + AccountService.logout(account=account) + flask_login.logout_user() + response = make_response({"result": "success"}) + + # Clear cookies on logout + clear_access_token_from_cookie(response) + clear_refresh_token_from_cookie(response) + clear_csrf_token_from_cookie(response) + + return response @console_ns.route("/reset-password") @@ -109,9 +135,11 @@ class ResetPasswordSendEmailApi(Resource): @setup_required @email_password_login_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() if args["language"] is not None and args["language"] == "zh-Hans": @@ -137,9 +165,11 @@ class ResetPasswordSendEmailApi(Resource): class EmailCodeLoginSendEmailApi(Resource): @setup_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -170,13 +200,17 @@ class EmailCodeLoginSendEmailApi(Resource): class EmailCodeLoginApi(Resource): @setup_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() user_email = args["email"] + language = args["language"] token_data = AccountService.get_email_code_login_data(args["token"]) if token_data is None: @@ -210,7 +244,9 @@ class EmailCodeLoginApi(Resource): if account is None: try: account = AccountService.create_account_and_tenant( - email=user_email, name=user_email, interface_language=languages[0] + email=user_email, + name=user_email, + interface_language=get_valid_language(language), ) except WorkSpaceNotAllowedCreateError: raise NotAllowedCreateWorkspace() @@ -220,18 +256,36 @@ class EmailCodeLoginApi(Resource): raise WorkspacesLimitExceeded() token_pair = AccountService.login(account, ip_address=extract_remote_ip(request)) AccountService.reset_login_error_rate_limit(args["email"]) - return {"result": "success", "data": token_pair.model_dump()} + + # Create response with cookies instead of returning tokens in body + response = make_response({"result": "success"}) + + set_csrf_token_to_cookie(request, response, token_pair.csrf_token) + # Set HTTP-only secure cookies for tokens + set_access_token_to_cookie(request, response, token_pair.access_token) + set_refresh_token_to_cookie(request, response, token_pair.refresh_token) + return response @console_ns.route("/refresh-token") class RefreshTokenApi(Resource): def post(self): - parser = reqparse.RequestParser() - parser.add_argument("refresh_token", type=str, required=True, location="json") - args = parser.parse_args() + # Get refresh token from cookie instead of request body + refresh_token = extract_refresh_token(request) + + if not refresh_token: + return {"result": "fail", "message": "No refresh token provided"}, 401 try: - new_token_pair = AccountService.refresh_token(args["refresh_token"]) - return {"result": "success", "data": new_token_pair.model_dump()} + new_token_pair = AccountService.refresh_token(refresh_token) + + # Create response with new cookies + response = make_response({"result": "success"}) + + # Update cookies with new tokens + set_csrf_token_to_cookie(request, response, new_token_pair.csrf_token) + set_access_token_to_cookie(request, response, new_token_pair.access_token) + set_refresh_token_to_cookie(request, response, new_token_pair.refresh_token) + return response except Exception as e: - return {"result": "fail", "data": str(e)}, 401 + return {"result": "fail", "message": str(e)}, 401 diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 5528dc0569..29653b32ec 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -14,8 +14,12 @@ from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.helper import extract_remote_ip from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo -from models import Account -from models.account import AccountStatus +from libs.token import ( + set_access_token_to_cookie, + set_csrf_token_to_cookie, + set_refresh_token_to_cookie, +) +from models import Account, AccountStatus from services.account_service import AccountService, RegisterService, TenantService from services.billing_service import BillingService from services.errors.account import AccountNotFoundError, AccountRegisterError @@ -130,11 +134,11 @@ class OAuthCallback(Resource): return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={e.description}") # Check account status - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account is banned.") - if account.status == AccountStatus.PENDING.value: - account.status = AccountStatus.ACTIVE.value + if account.status == AccountStatus.PENDING: + account.status = AccountStatus.ACTIVE account.initialized_at = naive_utc_now() db.session.commit() @@ -153,9 +157,12 @@ class OAuthCallback(Resource): ip_address=extract_remote_ip(request), ) - return redirect( - f"{dify_config.CONSOLE_WEB_URL}?access_token={token_pair.access_token}&refresh_token={token_pair.refresh_token}" - ) + response = redirect(f"{dify_config.CONSOLE_WEB_URL}") + + set_access_token_to_cookie(request, response, token_pair.access_token) + set_refresh_token_to_cookie(request, response, token_pair.refresh_token) + set_csrf_token_to_cookie(request, response, token_pair.csrf_token) + return response def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Account | None: diff --git a/api/controllers/console/auth/oauth_server.py b/api/controllers/console/auth/oauth_server.py index 46281860ae..5e12aa7d03 100644 --- a/api/controllers/console/auth/oauth_server.py +++ b/api/controllers/console/auth/oauth_server.py @@ -1,16 +1,15 @@ from collections.abc import Callable from functools import wraps -from typing import Concatenate, ParamSpec, TypeVar, cast +from typing import Concatenate, ParamSpec, TypeVar -import flask_login from flask import jsonify, request from flask_restx import Resource, reqparse from werkzeug.exceptions import BadRequest, NotFound from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder -from libs.login import login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required +from models import Account from models.model import OAuthProviderApp from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN, OAuthGrantType, OAuthServerService @@ -24,8 +23,7 @@ T = TypeVar("T") def oauth_server_client_id_required(view: Callable[Concatenate[T, OAuthProviderApp, P], R]): @wraps(view) def decorated(self: T, *args: P.args, **kwargs: P.kwargs): - parser = reqparse.RequestParser() - parser.add_argument("client_id", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("client_id", type=str, required=True, location="json") parsed_args = parser.parse_args() client_id = parsed_args.get("client_id") if not client_id: @@ -91,8 +89,7 @@ class OAuthServerAppApi(Resource): @setup_required @oauth_server_client_id_required def post(self, oauth_provider_app: OAuthProviderApp): - parser = reqparse.RequestParser() - parser.add_argument("redirect_uri", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("redirect_uri", type=str, required=True, location="json") parsed_args = parser.parse_args() redirect_uri = parsed_args.get("redirect_uri") @@ -116,7 +113,8 @@ class OAuthServerUserAuthorizeApi(Resource): @account_initialization_required @oauth_server_client_id_required def post(self, oauth_provider_app: OAuthProviderApp): - account = cast(Account, flask_login.current_user) + current_user, _ = current_account_with_tenant() + account = current_user user_account_id = account.id code = OAuthServerService.sign_oauth_authorization_code(oauth_provider_app.client_id, user_account_id) @@ -132,12 +130,14 @@ class OAuthServerUserTokenApi(Resource): @setup_required @oauth_server_client_id_required def post(self, oauth_provider_app: OAuthProviderApp): - parser = reqparse.RequestParser() - parser.add_argument("grant_type", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=False, location="json") - parser.add_argument("client_secret", type=str, required=False, location="json") - parser.add_argument("redirect_uri", type=str, required=False, location="json") - parser.add_argument("refresh_token", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("grant_type", type=str, required=True, location="json") + .add_argument("code", type=str, required=False, location="json") + .add_argument("client_secret", type=str, required=False, location="json") + .add_argument("redirect_uri", type=str, required=False, location="json") + .add_argument("refresh_token", type=str, required=False, location="json") + ) parsed_args = parser.parse_args() try: diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index fa89f45122..705f5970dd 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -2,8 +2,7 @@ from flask_restx import Resource, reqparse from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required -from libs.login import current_user, login_required -from models.model import Account +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService @@ -14,17 +13,15 @@ class Subscription(Resource): @account_initialization_required @only_edition_cloud def get(self): - parser = reqparse.RequestParser() - parser.add_argument("plan", type=str, required=True, location="args", choices=["professional", "team"]) - parser.add_argument("interval", type=str, required=True, location="args", choices=["month", "year"]) - args = parser.parse_args() - assert isinstance(current_user, Account) - - BillingService.is_tenant_owner_or_admin(current_user) - assert current_user.current_tenant_id is not None - return BillingService.get_subscription( - args["plan"], args["interval"], current_user.email, current_user.current_tenant_id + current_user, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("plan", type=str, required=True, location="args", choices=["professional", "team"]) + .add_argument("interval", type=str, required=True, location="args", choices=["month", "year"]) ) + args = parser.parse_args() + BillingService.is_tenant_owner_or_admin(current_user) + return BillingService.get_subscription(args["plan"], args["interval"], current_user.email, current_tenant_id) @console_ns.route("/billing/invoices") @@ -34,7 +31,6 @@ class Invoices(Resource): @account_initialization_required @only_edition_cloud def get(self): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() BillingService.is_tenant_owner_or_admin(current_user) - assert current_user.current_tenant_id is not None - return BillingService.get_invoices(current_user.email, current_user.current_tenant_id) + return BillingService.get_invoices(current_user.email, current_tenant_id) diff --git a/api/controllers/console/billing/compliance.py b/api/controllers/console/billing/compliance.py index e489b48c82..2a6889968c 100644 --- a/api/controllers/console/billing/compliance.py +++ b/api/controllers/console/billing/compliance.py @@ -1,9 +1,8 @@ from flask import request -from flask_login import current_user from flask_restx import Resource, reqparse from libs.helper import extract_remote_ip -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService from .. import console_ns @@ -17,17 +16,16 @@ class ComplianceApi(Resource): @account_initialization_required @only_edition_cloud def get(self): - parser = reqparse.RequestParser() - parser.add_argument("doc_name", type=str, required=True, location="args") + current_user, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("doc_name", type=str, required=True, location="args") args = parser.parse_args() ip_address = extract_remote_ip(request) device_info = request.headers.get("User-Agent", "Unknown device") - return BillingService.get_compliance_download_link( doc_name=args.doc_name, account_id=current_user.id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, ip=ip_address, device_info=device_info, ) diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index 370e0c0d14..ef66053075 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -3,7 +3,6 @@ from collections.abc import Generator from typing import cast from flask import request -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session @@ -15,12 +14,12 @@ from core.datasource.entities.datasource_entities import DatasourceProviderType, from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin from core.indexing_runner import IndexingRunner from core.rag.extractor.entity.datasource_type import DatasourceType -from core.rag.extractor.entity.extract_setting import ExtractSetting +from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo from core.rag.extractor.notion_extractor import NotionExtractor from extensions.ext_database import db from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields from libs.datetime_utils import naive_utc_now -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import DataSourceOauthBinding, Document from services.dataset_service import DatasetService, DocumentService from services.datasource_provider_service import DatasourceProviderService @@ -37,10 +36,12 @@ class DataSourceApi(Resource): @account_initialization_required @marshal_with(integrate_list_fields) def get(self): + _, current_tenant_id = current_account_with_tenant() + # get workspace data source integrates data_source_integrates = db.session.scalars( select(DataSourceOauthBinding).where( - DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, + DataSourceOauthBinding.tenant_id == current_tenant_id, DataSourceOauthBinding.disabled == False, ) ).all() @@ -120,13 +121,15 @@ class DataSourceNotionListApi(Resource): @account_initialization_required @marshal_with(integrate_notion_info_list_fields) def get(self): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = request.args.get("dataset_id", default=None, type=str) credential_id = request.args.get("credential_id", default=None, type=str) if not credential_id: raise ValueError("Credential id is required.") datasource_provider_service = DatasourceProviderService() credential = datasource_provider_service.get_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, credential_id=credential_id, provider="notion_datasource", plugin_id="langgenius/notion_datasource", @@ -146,7 +149,7 @@ class DataSourceNotionListApi(Resource): documents = session.scalars( select(Document).filter_by( dataset_id=dataset_id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, data_source_type="notion_import", enabled=True, ) @@ -161,7 +164,7 @@ class DataSourceNotionListApi(Resource): datasource_runtime = DatasourceManager.get_datasource_runtime( provider_id="langgenius/notion_datasource/notion_datasource", datasource_name="notion_datasource", - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_type=DatasourceProviderType.ONLINE_DOCUMENT, ) datasource_provider_service = DatasourceProviderService() @@ -210,12 +213,14 @@ class DataSourceNotionApi(Resource): @login_required @account_initialization_required def get(self, workspace_id, page_id, page_type): + _, current_tenant_id = current_account_with_tenant() + credential_id = request.args.get("credential_id", default=None, type=str) if not credential_id: raise ValueError("Credential id is required.") datasource_provider_service = DatasourceProviderService() credential = datasource_provider_service.get_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, credential_id=credential_id, provider="notion_datasource", plugin_id="langgenius/notion_datasource", @@ -229,7 +234,7 @@ class DataSourceNotionApi(Resource): notion_obj_id=page_id, notion_page_type=page_type, notion_access_token=credential.get("integration_secret"), - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, ) text_docs = extractor.extract() @@ -239,12 +244,14 @@ class DataSourceNotionApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("notion_info_list", type=list, required=True, nullable=True, location="json") - parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json") - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" + _, current_tenant_id = current_account_with_tenant() + + parser = ( + reqparse.RequestParser() + .add_argument("notion_info_list", type=list, required=True, nullable=True, location="json") + .add_argument("process_rule", type=dict, required=True, nullable=True, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") ) args = parser.parse_args() # validate args @@ -256,20 +263,22 @@ class DataSourceNotionApi(Resource): credential_id = notion_info.get("credential_id") for page in notion_info["pages"]: extract_setting = ExtractSetting( - datasource_type=DatasourceType.NOTION.value, - notion_info={ - "credential_id": credential_id, - "notion_workspace_id": workspace_id, - "notion_obj_id": page["page_id"], - "notion_page_type": page["type"], - "tenant_id": current_user.current_tenant_id, - }, + datasource_type=DatasourceType.NOTION, + notion_info=NotionInfo.model_validate( + { + "credential_id": credential_id, + "notion_workspace_id": workspace_id, + "notion_obj_id": page["page_id"], + "notion_page_type": page["type"], + "tenant_id": current_tenant_id, + } + ), document_model=args["doc_form"], ) extract_settings.append(extract_setting) indexing_runner = IndexingRunner() response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, args["process_rule"], args["doc_form"], diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 2affbd6a42..50bf48450c 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -1,6 +1,6 @@ -import flask_restx +from typing import Any, cast + from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse from sqlalchemy import select from werkzeug.exceptions import Forbidden, NotFound @@ -23,29 +23,97 @@ from core.model_runtime.entities.model_entities import ModelType from core.provider_manager import ProviderManager from core.rag.datasource.vdb.vector_type import VectorType from core.rag.extractor.entity.datasource_type import DatasourceType -from core.rag.extractor.entity.extract_setting import ExtractSetting +from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from fields.app_fields import related_app_list from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields from fields.document_fields import document_status_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required +from libs.validators import validate_description_length from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile from models.dataset import DatasetPermissionEnum from models.provider_ids import ModelProviderID from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 40: raise ValueError("Name must be between 1 to 40 characters.") return name -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description +def _get_retrieval_methods_by_vector_type(vector_type: str | None, is_mock: bool = False) -> dict[str, list[str]]: + """ + Get supported retrieval methods based on vector database type. + + Args: + vector_type: Vector database type, can be None + is_mock: Whether this is a Mock API, affects MILVUS handling + + Returns: + Dictionary containing supported retrieval methods + + Raises: + ValueError: If vector_type is None or unsupported + """ + if vector_type is None: + raise ValueError("Vector store type is not configured.") + + # Define vector database types that only support semantic search + semantic_only_types = { + VectorType.RELYT, + VectorType.TIDB_VECTOR, + VectorType.CHROMA, + VectorType.PGVECTO_RS, + VectorType.VIKINGDB, + VectorType.UPSTASH, + } + + # Define vector database types that support all retrieval methods + full_search_types = { + VectorType.QDRANT, + VectorType.WEAVIATE, + VectorType.OPENSEARCH, + VectorType.ANALYTICDB, + VectorType.MYSCALE, + VectorType.ORACLE, + VectorType.ELASTICSEARCH, + VectorType.ELASTICSEARCH_JA, + VectorType.PGVECTOR, + VectorType.VASTBASE, + VectorType.TIDB_ON_QDRANT, + VectorType.LINDORM, + VectorType.COUCHBASE, + VectorType.OPENGAUSS, + VectorType.OCEANBASE, + VectorType.TABLESTORE, + VectorType.HUAWEI_CLOUD, + VectorType.TENCENT, + VectorType.MATRIXONE, + VectorType.CLICKZETTA, + VectorType.BAIDU, + VectorType.ALIBABACLOUD_MYSQL, + } + + semantic_methods = {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} + full_methods = { + "retrieval_method": [ + RetrievalMethod.SEMANTIC_SEARCH.value, + RetrievalMethod.FULL_TEXT_SEARCH.value, + RetrievalMethod.HYBRID_SEARCH.value, + ] + } + + if vector_type == VectorType.MILVUS: + return semantic_methods if is_mock else full_methods + + if vector_type in semantic_only_types: + return semantic_methods + elif vector_type in full_search_types: + return full_methods + else: + raise ValueError(f"Unsupported vector db type {vector_type}.") @console_ns.route("/datasets") @@ -68,6 +136,7 @@ class DatasetListApi(Resource): @account_initialization_required @enterprise_license_required def get(self): + current_user, current_tenant_id = current_account_with_tenant() page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) ids = request.args.getlist("ids") @@ -76,15 +145,15 @@ class DatasetListApi(Resource): tag_ids = request.args.getlist("tag_ids") include_all = request.args.get("include_all", default="false").lower() == "true" if ids: - datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id) + datasets, total = DatasetService.get_datasets_by_ids(ids, current_tenant_id) else: datasets, total = DatasetService.get_datasets( - page, limit, current_user.current_tenant_id, current_user, search, tag_ids, include_all + page, limit, current_tenant_id, current_user, search, tag_ids, include_all ) # check embedding setting provider_manager = ProviderManager() - configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) + configurations = provider_manager.get_configurations(tenant_id=current_tenant_id) embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True) @@ -92,7 +161,7 @@ class DatasetListApi(Resource): for embedding_model in embedding_models: model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}") - data = marshal(datasets, dataset_detail_fields) + data = cast(list[dict[str, Any]], marshal(datasets, dataset_detail_fields)) for item in data: # convert embedding_model_provider to plugin standard format if item["indexing_technique"] == "high_quality" and item["embedding_model_provider"]: @@ -137,50 +206,53 @@ class DatasetListApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def post(self): - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument( - "description", - type=_validate_description_length, - nullable=True, - required=False, - default="", - ) - parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - help="Invalid indexing technique.", - ) - parser.add_argument( - "external_knowledge_api_id", - type=str, - nullable=True, - required=False, - ) - parser.add_argument( - "provider", - type=str, - nullable=True, - choices=Dataset.PROVIDER_LIST, - required=False, - default="vendor", - ) - parser.add_argument( - "external_knowledge_id", - type=str, - nullable=True, - required=False, + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument( + "description", + type=validate_description_length, + nullable=True, + required=False, + default="", + ) + .add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + help="Invalid indexing technique.", + ) + .add_argument( + "external_knowledge_api_id", + type=str, + nullable=True, + required=False, + ) + .add_argument( + "provider", + type=str, + nullable=True, + choices=Dataset.PROVIDER_LIST, + required=False, + default="vendor", + ) + .add_argument( + "external_knowledge_id", + type=str, + nullable=True, + required=False, + ) ) args = parser.parse_args() + current_user, current_tenant_id = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator if not current_user.is_dataset_editor: @@ -188,7 +260,7 @@ class DatasetListApi(Resource): try: dataset = DatasetService.create_empty_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, name=args["name"], description=args["description"], indexing_technique=args["indexing_technique"], @@ -216,6 +288,7 @@ class DatasetApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, current_tenant_id = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -224,7 +297,7 @@ class DatasetApi(Resource): DatasetService.check_dataset_permission(dataset, current_user) except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - data = marshal(dataset, dataset_detail_fields) + data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) if dataset.indexing_technique == "high_quality": if dataset.embedding_model_provider: provider_id = ModelProviderID(dataset.embedding_model_provider) @@ -235,7 +308,7 @@ class DatasetApi(Resource): # check embedding setting provider_manager = ProviderManager() - configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) + configurations = provider_manager.get_configurations(tenant_id=current_tenant_id) embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True) @@ -281,73 +354,76 @@ class DatasetApi(Resource): if dataset is None: raise NotFound("Dataset not found.") - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument("description", location="json", store_missing=False, type=_validate_description_length) - parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - help="Invalid indexing technique.", - ) - parser.add_argument( - "permission", - type=str, - location="json", - choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), - help="Invalid permission.", - ) - parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") - parser.add_argument( - "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider." - ) - parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") - parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") - - parser.add_argument( - "external_retrieval_model", - type=dict, - required=False, - nullable=True, - location="json", - help="Invalid external retrieval model.", - ) - - parser.add_argument( - "external_knowledge_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge id.", - ) - - parser.add_argument( - "external_knowledge_api_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge api id.", - ) - - parser.add_argument( - "icon_info", - type=dict, - required=False, - nullable=True, - location="json", - help="Invalid icon info.", + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument("description", location="json", store_missing=False, type=validate_description_length) + .add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + help="Invalid indexing technique.", + ) + .add_argument( + "permission", + type=str, + location="json", + choices=( + DatasetPermissionEnum.ONLY_ME, + DatasetPermissionEnum.ALL_TEAM, + DatasetPermissionEnum.PARTIAL_TEAM, + ), + help="Invalid permission.", + ) + .add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") + .add_argument( + "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider." + ) + .add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") + .add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") + .add_argument( + "external_retrieval_model", + type=dict, + required=False, + nullable=True, + location="json", + help="Invalid external retrieval model.", + ) + .add_argument( + "external_knowledge_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge id.", + ) + .add_argument( + "external_knowledge_api_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge api id.", + ) + .add_argument( + "icon_info", + type=dict, + required=False, + nullable=True, + location="json", + help="Invalid icon info.", + ) ) args = parser.parse_args() data = request.get_json() + current_user, current_tenant_id = current_account_with_tenant() # check embedding model setting if ( @@ -369,8 +445,8 @@ class DatasetApi(Resource): if dataset is None: raise NotFound("Dataset not found.") - result_data = marshal(dataset, dataset_detail_fields) - tenant_id = current_user.current_tenant_id + result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) + tenant_id = current_tenant_id if data.get("partial_member_list") and data.get("permission") == "partial_members": DatasetPermissionService.update_partial_member_list( @@ -394,9 +470,9 @@ class DatasetApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id): dataset_id_str = str(dataset_id) + current_user, _ = current_account_with_tenant() - # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_operator): + if not (current_user.has_edit_permission or current_user.is_dataset_operator): raise Forbidden() try: @@ -435,6 +511,7 @@ class DatasetQueryApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -469,32 +546,31 @@ class DatasetIndexingEstimateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("info_list", type=dict, required=True, nullable=True, location="json") - parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json") - parser.add_argument( - "indexing_technique", - type=str, - required=True, - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - location="json", - ) - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument("dataset_id", type=str, required=False, nullable=False, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" + parser = ( + reqparse.RequestParser() + .add_argument("info_list", type=dict, required=True, nullable=True, location="json") + .add_argument("process_rule", type=dict, required=True, nullable=True, location="json") + .add_argument( + "indexing_technique", + type=str, + required=True, + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + location="json", + ) + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("dataset_id", type=str, required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() # validate args DocumentService.estimate_args_validate(args) extract_settings = [] if args["info_list"]["data_source_type"] == "upload_file": file_ids = args["info_list"]["file_info_list"]["file_ids"] file_details = db.session.scalars( - select(UploadFile).where( - UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids) - ) + select(UploadFile).where(UploadFile.tenant_id == current_tenant_id, UploadFile.id.in_(file_ids)) ).all() if file_details is None: @@ -503,7 +579,7 @@ class DatasetIndexingEstimateApi(Resource): if file_details: for file_detail in file_details: extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, + datasource_type=DatasourceType.FILE, upload_file=file_detail, document_model=args["doc_form"], ) @@ -515,14 +591,16 @@ class DatasetIndexingEstimateApi(Resource): credential_id = notion_info.get("credential_id") for page in notion_info["pages"]: extract_setting = ExtractSetting( - datasource_type=DatasourceType.NOTION.value, - notion_info={ - "credential_id": credential_id, - "notion_workspace_id": workspace_id, - "notion_obj_id": page["page_id"], - "notion_page_type": page["type"], - "tenant_id": current_user.current_tenant_id, - }, + datasource_type=DatasourceType.NOTION, + notion_info=NotionInfo.model_validate( + { + "credential_id": credential_id, + "notion_workspace_id": workspace_id, + "notion_obj_id": page["page_id"], + "notion_page_type": page["type"], + "tenant_id": current_tenant_id, + } + ), document_model=args["doc_form"], ) extract_settings.append(extract_setting) @@ -530,15 +608,17 @@ class DatasetIndexingEstimateApi(Resource): website_info_list = args["info_list"]["website_info_list"] for url in website_info_list["urls"]: extract_setting = ExtractSetting( - datasource_type=DatasourceType.WEBSITE.value, - website_info={ - "provider": website_info_list["provider"], - "job_id": website_info_list["job_id"], - "url": url, - "tenant_id": current_user.current_tenant_id, - "mode": "crawl", - "only_main_content": website_info_list["only_main_content"], - }, + datasource_type=DatasourceType.WEBSITE, + website_info=WebsiteInfo.model_validate( + { + "provider": website_info_list["provider"], + "job_id": website_info_list["job_id"], + "url": url, + "tenant_id": current_tenant_id, + "mode": "crawl", + "only_main_content": website_info_list["only_main_content"], + } + ), document_model=args["doc_form"], ) extract_settings.append(extract_setting) @@ -547,7 +627,7 @@ class DatasetIndexingEstimateApi(Resource): indexing_runner = IndexingRunner() try: response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, args["process_rule"], args["doc_form"], @@ -578,6 +658,7 @@ class DatasetRelatedAppListApi(Resource): @account_initialization_required @marshal_with(related_app_list) def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -609,11 +690,10 @@ class DatasetIndexingStatusApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) documents = db.session.scalars( - select(Document).where( - Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id - ) + select(Document).where(Document.dataset_id == dataset_id, Document.tenant_id == current_tenant_id) ).all() documents_status = [] for document in documents: @@ -665,10 +745,9 @@ class DatasetApiKeyApi(Resource): @account_initialization_required @marshal_with(api_key_list) def get(self): + _, current_tenant_id = current_account_with_tenant() keys = db.session.scalars( - select(ApiToken).where( - ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id - ) + select(ApiToken).where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_tenant_id) ).all() return {"items": keys} @@ -678,17 +757,18 @@ class DatasetApiKeyApi(Resource): @marshal_with(api_key_fields) def post(self): # The role of the current user in the ta table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() current_key_count = ( db.session.query(ApiToken) - .where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id) + .where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_tenant_id) .count() ) if current_key_count >= self.max_keys: - flask_restx.abort( + api.abort( 400, message=f"Cannot create more than {self.max_keys} API keys for this resource type.", code="max_keys_exceeded", @@ -696,7 +776,7 @@ class DatasetApiKeyApi(Resource): key = ApiToken.generate_api_key(self.token_prefix, 24) api_token = ApiToken() - api_token.tenant_id = current_user.current_tenant_id + api_token.tenant_id = current_tenant_id api_token.token = key api_token.type = self.resource_type db.session.add(api_token) @@ -716,6 +796,7 @@ class DatasetApiDeleteApi(Resource): @login_required @account_initialization_required def delete(self, api_key_id): + current_user, current_tenant_id = current_account_with_tenant() api_key_id = str(api_key_id) # The role of the current user in the ta table must be admin or owner @@ -725,7 +806,7 @@ class DatasetApiDeleteApi(Resource): key = ( db.session.query(ApiToken) .where( - ApiToken.tenant_id == current_user.current_tenant_id, + ApiToken.tenant_id == current_tenant_id, ApiToken.type == self.resource_type, ApiToken.id == api_key_id, ) @@ -733,7 +814,7 @@ class DatasetApiDeleteApi(Resource): ) if key is None: - flask_restx.abort(404, message="API key not found") + api.abort(404, message="API key not found") db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete() db.session.commit() @@ -776,49 +857,7 @@ class DatasetRetrievalSettingApi(Resource): @account_initialization_required def get(self): vector_type = dify_config.VECTOR_STORE - match vector_type: - case ( - VectorType.RELYT - | VectorType.TIDB_VECTOR - | VectorType.CHROMA - | VectorType.PGVECTO_RS - | VectorType.VIKINGDB - | VectorType.UPSTASH - ): - return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} - case ( - VectorType.QDRANT - | VectorType.WEAVIATE - | VectorType.OPENSEARCH - | VectorType.ANALYTICDB - | VectorType.MYSCALE - | VectorType.ORACLE - | VectorType.ELASTICSEARCH - | VectorType.ELASTICSEARCH_JA - | VectorType.PGVECTOR - | VectorType.VASTBASE - | VectorType.TIDB_ON_QDRANT - | VectorType.LINDORM - | VectorType.COUCHBASE - | VectorType.MILVUS - | VectorType.OPENGAUSS - | VectorType.OCEANBASE - | VectorType.TABLESTORE - | VectorType.HUAWEI_CLOUD - | VectorType.TENCENT - | VectorType.MATRIXONE - | VectorType.CLICKZETTA - | VectorType.BAIDU - ): - return { - "retrieval_method": [ - RetrievalMethod.SEMANTIC_SEARCH.value, - RetrievalMethod.FULL_TEXT_SEARCH.value, - RetrievalMethod.HYBRID_SEARCH.value, - ] - } - case _: - raise ValueError(f"Unsupported vector db type {vector_type}.") + return _get_retrieval_methods_by_vector_type(vector_type, is_mock=False) @console_ns.route("/datasets/retrieval-setting/") @@ -831,48 +870,7 @@ class DatasetRetrievalSettingMockApi(Resource): @login_required @account_initialization_required def get(self, vector_type): - match vector_type: - case ( - VectorType.MILVUS - | VectorType.RELYT - | VectorType.TIDB_VECTOR - | VectorType.CHROMA - | VectorType.PGVECTO_RS - | VectorType.VIKINGDB - | VectorType.UPSTASH - ): - return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} - case ( - VectorType.QDRANT - | VectorType.WEAVIATE - | VectorType.OPENSEARCH - | VectorType.ANALYTICDB - | VectorType.MYSCALE - | VectorType.ORACLE - | VectorType.ELASTICSEARCH - | VectorType.ELASTICSEARCH_JA - | VectorType.COUCHBASE - | VectorType.PGVECTOR - | VectorType.VASTBASE - | VectorType.LINDORM - | VectorType.OPENGAUSS - | VectorType.OCEANBASE - | VectorType.TABLESTORE - | VectorType.TENCENT - | VectorType.HUAWEI_CLOUD - | VectorType.MATRIXONE - | VectorType.CLICKZETTA - | VectorType.BAIDU - ): - return { - "retrieval_method": [ - RetrievalMethod.SEMANTIC_SEARCH.value, - RetrievalMethod.FULL_TEXT_SEARCH.value, - RetrievalMethod.HYBRID_SEARCH.value, - ] - } - case _: - raise ValueError(f"Unsupported vector db type {vector_type}.") + return _get_retrieval_methods_by_vector_type(vector_type, is_mock=True) @console_ns.route("/datasets//error-docs") @@ -907,6 +905,7 @@ class DatasetPermissionUserListApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 6aaede0fb3..85fd0535c7 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -6,7 +6,6 @@ from typing import Literal, cast import sqlalchemy as sa from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse from sqlalchemy import asc, desc, select from werkzeug.exceptions import Forbidden, NotFound @@ -44,7 +43,7 @@ from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.plugin.impl.exc import PluginDaemonClientSideError from core.rag.extractor.entity.datasource_type import DatasourceType -from core.rag.extractor.entity.extract_setting import ExtractSetting +from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo from extensions.ext_database import db from fields.document_fields import ( dataset_and_document_fields, @@ -53,7 +52,7 @@ from fields.document_fields import ( document_with_segments_fields, ) from libs.datetime_utils import naive_utc_now -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile from models.dataset import DocumentPipelineExecutionLog from services.dataset_service import DatasetService, DocumentService @@ -64,6 +63,7 @@ logger = logging.getLogger(__name__) class DocumentResource(Resource): def get_document(self, dataset_id: str, document_id: str) -> Document: + current_user, current_tenant_id = current_account_with_tenant() dataset = DatasetService.get_dataset(dataset_id) if not dataset: raise NotFound("Dataset not found.") @@ -78,12 +78,13 @@ class DocumentResource(Resource): if not document: raise NotFound("Document not found.") - if document.tenant_id != current_user.current_tenant_id: + if document.tenant_id != current_tenant_id: raise Forbidden("No permission.") return document def get_batch_documents(self, dataset_id: str, batch: str) -> Sequence[Document]: + current_user, _ = current_account_with_tenant() dataset = DatasetService.get_dataset(dataset_id) if not dataset: raise NotFound("Dataset not found.") @@ -111,6 +112,7 @@ class GetProcessRuleApi(Resource): @login_required @account_initialization_required def get(self): + current_user, _ = current_account_with_tenant() req_data = request.args document_id = req_data.get("document_id") @@ -167,6 +169,7 @@ class DatasetDocumentListApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) @@ -198,7 +201,7 @@ class DatasetDocumentListApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_user.current_tenant_id) + query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_tenant_id) if search: search = f"%{search}%" @@ -272,6 +275,7 @@ class DatasetDocumentListApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -288,23 +292,23 @@ class DatasetDocumentListApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - parser = reqparse.RequestParser() - parser.add_argument( - "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" - ) - parser.add_argument("data_source", type=dict, required=False, location="json") - parser.add_argument("process_rule", type=dict, required=False, location="json") - parser.add_argument("duplicate", type=bool, default=True, nullable=False, location="json") - parser.add_argument("original_document_id", type=str, required=False, location="json") - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") - parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") - parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" + parser = ( + reqparse.RequestParser() + .add_argument( + "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" + ) + .add_argument("data_source", type=dict, required=False, location="json") + .add_argument("process_rule", type=dict, required=False, location="json") + .add_argument("duplicate", type=bool, default=True, nullable=False, location="json") + .add_argument("original_document_id", type=str, required=False, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") + .add_argument("embedding_model", type=str, required=False, nullable=True, location="json") + .add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") ) args = parser.parse_args() - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) if not dataset.indexing_technique and not knowledge_config.indexing_technique: raise ValueError("indexing_technique is required.") @@ -371,37 +375,38 @@ class DatasetInitApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def post(self): # The role of the current user in the ta table must be admin, owner, dataset_operator, or editor + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_dataset_editor: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "indexing_technique", - type=str, - choices=Dataset.INDEXING_TECHNIQUE_LIST, - required=True, - nullable=False, - location="json", + parser = ( + reqparse.RequestParser() + .add_argument( + "indexing_technique", + type=str, + choices=Dataset.INDEXING_TECHNIQUE_LIST, + required=True, + nullable=False, + location="json", + ) + .add_argument("data_source", type=dict, required=True, nullable=True, location="json") + .add_argument("process_rule", type=dict, required=True, nullable=True, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") + .add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") + .add_argument("embedding_model", type=str, required=False, nullable=True, location="json") + .add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") ) - parser.add_argument("data_source", type=dict, required=True, nullable=True, location="json") - parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json") - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" - ) - parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") - parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") - parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") args = parser.parse_args() - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) if knowledge_config.indexing_technique == "high_quality": if knowledge_config.embedding_model is None or knowledge_config.embedding_model_provider is None: raise ValueError("embedding model and embedding model provider are required for high quality indexing.") try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=args["embedding_model_provider"], model_type=ModelType.TEXT_EMBEDDING, model=args["embedding_model"], @@ -418,7 +423,9 @@ class DatasetInitApi(Resource): try: dataset, documents, batch = DocumentService.save_document_without_dataset_id( - tenant_id=current_user.current_tenant_id, knowledge_config=knowledge_config, account=current_user + tenant_id=current_tenant_id, + knowledge_config=knowledge_config, + account=current_user, ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -444,6 +451,7 @@ class DocumentIndexingEstimateApi(DocumentResource): @login_required @account_initialization_required def get(self, dataset_id, document_id): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -452,7 +460,7 @@ class DocumentIndexingEstimateApi(DocumentResource): raise DocumentAlreadyFinishedError() data_process_rule = document.dataset_process_rule - data_process_rule_dict = data_process_rule.to_dict() + data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {} response = {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []} @@ -472,14 +480,14 @@ class DocumentIndexingEstimateApi(DocumentResource): raise NotFound("File not found.") extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, upload_file=file, document_model=document.doc_form + datasource_type=DatasourceType.FILE, upload_file=file, document_model=document.doc_form ) indexing_runner = IndexingRunner() try: estimate_response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, [extract_setting], data_process_rule_dict, document.doc_form, @@ -508,13 +516,14 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): @login_required @account_initialization_required def get(self, dataset_id, batch): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) batch = str(batch) documents = self.get_batch_documents(dataset_id, batch) if not documents: return {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}, 200 data_process_rule = documents[0].dataset_process_rule - data_process_rule_dict = data_process_rule.to_dict() + data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {} extract_settings = [] for document in documents: if document.indexing_status in {"completed", "error"}: @@ -527,7 +536,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): file_id = data_source_info["upload_file_id"] file_detail = ( db.session.query(UploadFile) - .where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id) + .where(UploadFile.tenant_id == current_tenant_id, UploadFile.id == file_id) .first() ) @@ -535,7 +544,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): raise NotFound("File not found.") extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, upload_file=file_detail, document_model=document.doc_form + datasource_type=DatasourceType.FILE, upload_file=file_detail, document_model=document.doc_form ) extract_settings.append(extract_setting) @@ -543,14 +552,16 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): if not data_source_info: continue extract_setting = ExtractSetting( - datasource_type=DatasourceType.NOTION.value, - notion_info={ - "credential_id": data_source_info["credential_id"], - "notion_workspace_id": data_source_info["notion_workspace_id"], - "notion_obj_id": data_source_info["notion_page_id"], - "notion_page_type": data_source_info["type"], - "tenant_id": current_user.current_tenant_id, - }, + datasource_type=DatasourceType.NOTION, + notion_info=NotionInfo.model_validate( + { + "credential_id": data_source_info["credential_id"], + "notion_workspace_id": data_source_info["notion_workspace_id"], + "notion_obj_id": data_source_info["notion_page_id"], + "notion_page_type": data_source_info["type"], + "tenant_id": current_tenant_id, + } + ), document_model=document.doc_form, ) extract_settings.append(extract_setting) @@ -558,15 +569,17 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): if not data_source_info: continue extract_setting = ExtractSetting( - datasource_type=DatasourceType.WEBSITE.value, - website_info={ - "provider": data_source_info["provider"], - "job_id": data_source_info["job_id"], - "url": data_source_info["url"], - "tenant_id": current_user.current_tenant_id, - "mode": data_source_info["mode"], - "only_main_content": data_source_info["only_main_content"], - }, + datasource_type=DatasourceType.WEBSITE, + website_info=WebsiteInfo.model_validate( + { + "provider": data_source_info["provider"], + "job_id": data_source_info["job_id"], + "url": data_source_info["url"], + "tenant_id": current_tenant_id, + "mode": data_source_info["mode"], + "only_main_content": data_source_info["only_main_content"], + } + ), document_model=document.doc_form, ) extract_settings.append(extract_setting) @@ -576,7 +589,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): indexing_runner = IndexingRunner() try: response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, data_process_rule_dict, document.doc_form, @@ -753,7 +766,7 @@ class DocumentApi(DocumentResource): } else: dataset_process_rules = DatasetService.get_process_rules(dataset_id) - document_process_rules = document.dataset_process_rule.to_dict() + document_process_rules = document.dataset_process_rule.to_dict() if document.dataset_process_rule else {} data_source_info = document.data_source_detail_dict response = { "id": document.id, @@ -827,6 +840,7 @@ class DocumentProcessingApi(DocumentResource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, action: Literal["pause", "resume"]): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -877,6 +891,7 @@ class DocumentMetadataApi(DocumentResource): @login_required @account_initialization_required def put(self, dataset_id, document_id): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -924,6 +939,7 @@ class DocumentStatusApi(DocumentResource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, action: Literal["enable", "disable", "archive", "un_archive"]): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if dataset is None: @@ -1027,8 +1043,9 @@ class DocumentRetryApi(DocumentResource): def post(self, dataset_id): """retry document.""" - parser = reqparse.RequestParser() - parser.add_argument("document_ids", type=list, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "document_ids", type=list, required=True, nullable=False, location="json" + ) args = parser.parse_args() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -1070,12 +1087,14 @@ class DocumentRenameApi(DocumentResource): @marshal_with(document_fields) def post(self, dataset_id, document_id): # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator + current_user, _ = current_account_with_tenant() if not current_user.is_dataset_editor: raise Forbidden() dataset = DatasetService.get_dataset(dataset_id) + if not dataset: + raise NotFound("Dataset not found.") DatasetService.check_dataset_operator_permission(current_user, dataset) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, nullable=False, location="json") args = parser.parse_args() try: @@ -1093,6 +1112,7 @@ class WebsiteDocumentSyncApi(DocumentResource): @account_initialization_required def get(self, dataset_id, document_id): """sync website document.""" + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if not dataset: @@ -1101,7 +1121,7 @@ class WebsiteDocumentSyncApi(DocumentResource): document = DocumentService.get_document(dataset.id, document_id) if not document: raise NotFound("Document not found.") - if document.tenant_id != current_user.current_tenant_id: + if document.tenant_id != current_tenant_id: raise Forbidden("No permission.") if document.data_source_type != "website_crawl": raise ValueError("Document is not a website document.") diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index ba552821d2..2fe7d42e46 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -1,7 +1,6 @@ import uuid from flask import request -from flask_login import current_user from flask_restx import Resource, marshal, reqparse from sqlalchemy import select from werkzeug.exceptions import Forbidden, NotFound @@ -27,7 +26,7 @@ from core.model_runtime.entities.model_entities import ModelType from extensions.ext_database import db from extensions.ext_redis import redis_client from fields.segment_fields import child_chunk_fields, segment_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.dataset import ChildChunk, DocumentSegment from models.model import UploadFile from services.dataset_service import DatasetService, DocumentService, SegmentService @@ -43,6 +42,8 @@ class DatasetDocumentSegmentListApi(Resource): @login_required @account_initialization_required def get(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = str(dataset_id) document_id = str(document_id) dataset = DatasetService.get_dataset(dataset_id) @@ -59,13 +60,15 @@ class DatasetDocumentSegmentListApi(Resource): if not document: raise NotFound("Document not found.") - parser = reqparse.RequestParser() - parser.add_argument("limit", type=int, default=20, location="args") - parser.add_argument("status", type=str, action="append", default=[], location="args") - parser.add_argument("hit_count_gte", type=int, default=None, location="args") - parser.add_argument("enabled", type=str, default="all", location="args") - parser.add_argument("keyword", type=str, default=None, location="args") - parser.add_argument("page", type=int, default=1, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("limit", type=int, default=20, location="args") + .add_argument("status", type=str, action="append", default=[], location="args") + .add_argument("hit_count_gte", type=int, default=None, location="args") + .add_argument("enabled", type=str, default="all", location="args") + .add_argument("keyword", type=str, default=None, location="args") + .add_argument("page", type=int, default=1, location="args") + ) args = parser.parse_args() @@ -79,7 +82,7 @@ class DatasetDocumentSegmentListApi(Resource): select(DocumentSegment) .where( DocumentSegment.document_id == str(document_id), - DocumentSegment.tenant_id == current_user.current_tenant_id, + DocumentSegment.tenant_id == current_tenant_id, ) .order_by(DocumentSegment.position.asc()) ) @@ -115,6 +118,8 @@ class DatasetDocumentSegmentListApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id): + current_user, _ = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -148,6 +153,8 @@ class DatasetDocumentSegmentApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, action): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if not dataset: @@ -171,7 +178,7 @@ class DatasetDocumentSegmentApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -204,6 +211,8 @@ class DatasetDocumentSegmentAddApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -221,7 +230,7 @@ class DatasetDocumentSegmentAddApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -237,10 +246,12 @@ class DatasetDocumentSegmentAddApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") - parser.add_argument("answer", type=str, required=False, nullable=True, location="json") - parser.add_argument("keywords", type=list, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("content", type=str, required=True, nullable=False, location="json") + .add_argument("answer", type=str, required=False, nullable=True, location="json") + .add_argument("keywords", type=list, required=False, nullable=True, location="json") + ) args = parser.parse_args() SegmentService.segment_create_args_validate(args, document) segment = SegmentService.create_segment(args, document, dataset) @@ -255,6 +266,8 @@ class DatasetDocumentSegmentUpdateApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -272,7 +285,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -287,7 +300,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -300,16 +313,18 @@ class DatasetDocumentSegmentUpdateApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") - parser.add_argument("answer", type=str, required=False, nullable=True, location="json") - parser.add_argument("keywords", type=list, required=False, nullable=True, location="json") - parser.add_argument( - "regenerate_child_chunks", type=bool, required=False, nullable=True, default=False, location="json" + parser = ( + reqparse.RequestParser() + .add_argument("content", type=str, required=True, nullable=False, location="json") + .add_argument("answer", type=str, required=False, nullable=True, location="json") + .add_argument("keywords", type=list, required=False, nullable=True, location="json") + .add_argument( + "regenerate_child_chunks", type=bool, required=False, nullable=True, default=False, location="json" + ) ) args = parser.parse_args() SegmentService.segment_create_args_validate(args, document) - segment = SegmentService.update_segment(SegmentUpdateArgs(**args), segment, document, dataset) + segment = SegmentService.update_segment(SegmentUpdateArgs.model_validate(args), segment, document, dataset) return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200 @setup_required @@ -317,6 +332,8 @@ class DatasetDocumentSegmentUpdateApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -333,7 +350,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -361,6 +378,8 @@ class DatasetDocumentSegmentBatchImportApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -372,8 +391,9 @@ class DatasetDocumentSegmentBatchImportApi(Resource): if not document: raise NotFound("Document not found.") - parser = reqparse.RequestParser() - parser.add_argument("upload_file_id", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "upload_file_id", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() upload_file_id = args["upload_file_id"] @@ -392,7 +412,12 @@ class DatasetDocumentSegmentBatchImportApi(Resource): # send batch add segments task redis_client.setnx(indexing_cache_key, "waiting") batch_create_segment_to_index_task.delay( - str(job_id), upload_file_id, dataset_id, document_id, current_user.current_tenant_id, current_user.id + str(job_id), + upload_file_id, + dataset_id, + document_id, + current_tenant_id, + current_user.id, ) except Exception as e: return {"error": str(e)}, 500 @@ -422,6 +447,8 @@ class ChildChunkAddApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -436,7 +463,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -448,7 +475,7 @@ class ChildChunkAddApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -464,11 +491,13 @@ class ChildChunkAddApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "content", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() try: - child_chunk = SegmentService.create_child_chunk(args.get("content"), segment, document, dataset) + content = args["content"] + child_chunk = SegmentService.create_child_chunk(content, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 @@ -477,6 +506,8 @@ class ChildChunkAddApi(Resource): @login_required @account_initialization_required def get(self, dataset_id, document_id, segment_id): + _, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -493,15 +524,17 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: raise NotFound("Segment not found.") - parser = reqparse.RequestParser() - parser.add_argument("limit", type=int, default=20, location="args") - parser.add_argument("keyword", type=str, default=None, location="args") - parser.add_argument("page", type=int, default=1, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("limit", type=int, default=20, location="args") + .add_argument("keyword", type=str, default=None, location="args") + .add_argument("page", type=int, default=1, location="args") + ) args = parser.parse_args() @@ -524,6 +557,8 @@ class ChildChunkAddApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -540,7 +575,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -553,11 +588,13 @@ class ChildChunkAddApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("chunks", type=list, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "chunks", type=list, required=True, nullable=False, location="json" + ) args = parser.parse_args() try: - chunks = [ChildChunkUpdateArgs(**chunk) for chunk in args.get("chunks")] + chunks_data = args["chunks"] + chunks = [ChildChunkUpdateArgs.model_validate(chunk) for chunk in chunks_data] child_chunks = SegmentService.update_child_chunks(chunks, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) @@ -573,6 +610,8 @@ class ChildChunkUpdateApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id, segment_id, child_chunk_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -589,7 +628,7 @@ class ChildChunkUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -600,7 +639,7 @@ class ChildChunkUpdateApi(Resource): db.session.query(ChildChunk) .where( ChildChunk.id == str(child_chunk_id), - ChildChunk.tenant_id == current_user.current_tenant_id, + ChildChunk.tenant_id == current_tenant_id, ChildChunk.segment_id == segment.id, ChildChunk.document_id == document_id, ) @@ -627,6 +666,8 @@ class ChildChunkUpdateApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id, child_chunk_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -643,7 +684,7 @@ class ChildChunkUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -654,7 +695,7 @@ class ChildChunkUpdateApi(Resource): db.session.query(ChildChunk) .where( ChildChunk.id == str(child_chunk_id), - ChildChunk.tenant_id == current_user.current_tenant_id, + ChildChunk.tenant_id == current_tenant_id, ChildChunk.segment_id == segment.id, ChildChunk.document_id == document_id, ) @@ -670,13 +711,13 @@ class ChildChunkUpdateApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "content", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() try: - child_chunk = SegmentService.update_child_chunk( - args.get("content"), child_chunk, segment, document, dataset - ) + content = args["content"] + child_chunk = SegmentService.update_child_chunk(content, child_chunk, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index e8f5a11b41..4f738db0e5 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -1,5 +1,4 @@ from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound @@ -8,14 +7,14 @@ from controllers.console import api, console_ns from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import account_initialization_required, setup_required from fields.dataset_fields import dataset_detail_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.dataset_service import DatasetService from services.external_knowledge_service import ExternalDatasetService from services.hit_testing_service import HitTestingService from services.knowledge_service import ExternalDatasetTestService -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 100: raise ValueError("Name must be between 1 to 100 characters.") return name @@ -37,12 +36,13 @@ class ExternalApiTemplateListApi(Resource): @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) search = request.args.get("keyword", default=None, type=str) external_knowledge_apis, total = ExternalDatasetService.get_external_knowledge_apis( - page, limit, current_user.current_tenant_id, search + page, limit, current_tenant_id, search ) response = { "data": [item.to_dict() for item in external_knowledge_apis], @@ -57,20 +57,23 @@ class ExternalApiTemplateListApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="Name is required. Name must be between 1 to 100 characters.", - type=_validate_name, - ) - parser.add_argument( - "settings", - type=dict, - location="json", - nullable=False, - required=True, + current_user, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + .add_argument( + "settings", + type=dict, + location="json", + nullable=False, + required=True, + ) ) args = parser.parse_args() @@ -82,7 +85,7 @@ class ExternalApiTemplateListApi(Resource): try: external_knowledge_api = ExternalDatasetService.create_external_knowledge_api( - tenant_id=current_user.current_tenant_id, user_id=current_user.id, args=args + tenant_id=current_tenant_id, user_id=current_user.id, args=args ) except services.errors.dataset.DatasetNameDuplicateError: raise DatasetNameDuplicateError() @@ -112,28 +115,31 @@ class ExternalApiTemplateApi(Resource): @login_required @account_initialization_required def patch(self, external_knowledge_api_id): + current_user, current_tenant_id = current_account_with_tenant() external_knowledge_api_id = str(external_knowledge_api_id) - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="type is required. Name must be between 1 to 100 characters.", - type=_validate_name, - ) - parser.add_argument( - "settings", - type=dict, - location="json", - nullable=False, - required=True, + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="type is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + .add_argument( + "settings", + type=dict, + location="json", + nullable=False, + required=True, + ) ) args = parser.parse_args() ExternalDatasetService.validate_api_list(args["settings"]) external_knowledge_api = ExternalDatasetService.update_external_knowledge_api( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, user_id=current_user.id, external_knowledge_api_id=external_knowledge_api_id, args=args, @@ -145,13 +151,13 @@ class ExternalApiTemplateApi(Resource): @login_required @account_initialization_required def delete(self, external_knowledge_api_id): + current_user, current_tenant_id = current_account_with_tenant() external_knowledge_api_id = str(external_knowledge_api_id) - # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_operator): + if not (current_user.has_edit_permission or current_user.is_dataset_operator): raise Forbidden() - ExternalDatasetService.delete_external_knowledge_api(current_user.current_tenant_id, external_knowledge_api_id) + ExternalDatasetService.delete_external_knowledge_api(current_tenant_id, external_knowledge_api_id) return {"result": "success"}, 204 @@ -196,21 +202,24 @@ class ExternalDatasetCreateApi(Resource): @account_initialization_required def post(self): # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("external_knowledge_id", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "name", - nullable=False, - required=True, - help="name is required. Name must be between 1 to 100 characters.", - type=_validate_name, + parser = ( + reqparse.RequestParser() + .add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json") + .add_argument("external_knowledge_id", type=str, required=True, nullable=False, location="json") + .add_argument( + "name", + nullable=False, + required=True, + help="name is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + .add_argument("description", type=str, required=False, nullable=True, location="json") + .add_argument("external_retrieval_model", type=dict, required=False, location="json") ) - parser.add_argument("description", type=str, required=False, nullable=True, location="json") - parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") args = parser.parse_args() @@ -220,7 +229,7 @@ class ExternalDatasetCreateApi(Resource): try: dataset = ExternalDatasetService.create_external_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, user_id=current_user.id, args=args, ) @@ -252,6 +261,7 @@ class ExternalKnowledgeHitTestingApi(Resource): @login_required @account_initialization_required def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -262,10 +272,12 @@ class ExternalKnowledgeHitTestingApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - parser = reqparse.RequestParser() - parser.add_argument("query", type=str, location="json") - parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") - parser.add_argument("metadata_filtering_conditions", type=dict, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("query", type=str, location="json") + .add_argument("external_retrieval_model", type=dict, required=False, location="json") + .add_argument("metadata_filtering_conditions", type=dict, required=False, location="json") + ) args = parser.parse_args() HitTestingService.hit_testing_args_check(args) @@ -301,15 +313,17 @@ class BedrockRetrievalApi(Resource): ) @api.response(200, "Bedrock retrieval test completed") def post(self): - parser = reqparse.RequestParser() - parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json") - parser.add_argument( - "query", - nullable=False, - required=True, - type=str, + parser = ( + reqparse.RequestParser() + .add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json") + .add_argument( + "query", + nullable=False, + required=True, + type=str, + ) + .add_argument("knowledge_id", nullable=False, required=True, type=str) ) - parser.add_argument("knowledge_id", nullable=False, required=True, type=str) args = parser.parse_args() # Call the knowledge retrieval service diff --git a/api/controllers/console/datasets/hit_testing_base.py b/api/controllers/console/datasets/hit_testing_base.py index cfbfc50873..99d4d5a29c 100644 --- a/api/controllers/console/datasets/hit_testing_base.py +++ b/api/controllers/console/datasets/hit_testing_base.py @@ -1,10 +1,9 @@ import logging -from flask_login import current_user from flask_restx import marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound -import services.dataset_service +import services from controllers.console.app.error import ( CompletionRequestError, ProviderModelCurrentlyNotSupportError, @@ -20,6 +19,8 @@ from core.errors.error import ( ) from core.model_runtime.errors.invoke import InvokeError from fields.hit_testing_fields import hit_testing_record_fields +from libs.login import current_user +from models.account import Account from services.dataset_service import DatasetService from services.hit_testing_service import HitTestingService @@ -29,6 +30,7 @@ logger = logging.getLogger(__name__) class DatasetsHitTestingBase: @staticmethod def get_and_validate_dataset(dataset_id: str): + assert isinstance(current_user, Account) dataset = DatasetService.get_dataset(dataset_id) if dataset is None: raise NotFound("Dataset not found.") @@ -46,15 +48,17 @@ class DatasetsHitTestingBase: @staticmethod def parse_args(): - parser = reqparse.RequestParser() - - parser.add_argument("query", type=str, location="json") - parser.add_argument("retrieval_model", type=dict, required=False, location="json") - parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("query", type=str, location="json") + .add_argument("retrieval_model", type=dict, required=False, location="json") + .add_argument("external_retrieval_model", type=dict, required=False, location="json") + ) return parser.parse_args() @staticmethod def perform_hit_testing(dataset, args): + assert isinstance(current_user, Account) try: response = HitTestingService.retrieve( dataset=dataset, diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py index 53dc80eaa5..72b2ff0ff8 100644 --- a/api/controllers/console/datasets/metadata.py +++ b/api/controllers/console/datasets/metadata.py @@ -1,13 +1,12 @@ from typing import Literal -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import NotFound from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from fields.dataset_fields import dataset_metadata_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.dataset_service import DatasetService from services.entities.knowledge_entities.knowledge_entities import ( MetadataArgs, @@ -24,11 +23,14 @@ class DatasetMetadataCreateApi(Resource): @enterprise_license_required @marshal_with(dataset_metadata_fields) def post(self, dataset_id): - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() - metadata_args = MetadataArgs(**args) + metadata_args = MetadataArgs.model_validate(args) dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -59,9 +61,10 @@ class DatasetMetadataApi(Resource): @enterprise_license_required @marshal_with(dataset_metadata_fields) def patch(self, dataset_id, metadata_id): - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, nullable=False, location="json") args = parser.parse_args() + name = args["name"] dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) @@ -70,7 +73,7 @@ class DatasetMetadataApi(Resource): raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) + metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, name) return metadata, 200 @setup_required @@ -78,6 +81,7 @@ class DatasetMetadataApi(Resource): @account_initialization_required @enterprise_license_required def delete(self, dataset_id, metadata_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -107,6 +111,7 @@ class DatasetMetadataBuiltInFieldActionApi(Resource): @account_initialization_required @enterprise_license_required def post(self, dataset_id, action: Literal["enable", "disable"]): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -127,16 +132,18 @@ class DocumentMetadataEditApi(Resource): @account_initialization_required @enterprise_license_required def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - parser = reqparse.RequestParser() - parser.add_argument("operation_data", type=list, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "operation_data", type=list, required=True, nullable=False, location="json" + ) args = parser.parse_args() - metadata_args = MetadataOperationData(**args) + metadata_args = MetadataOperationData.model_validate(args) MetadataService.update_documents_metadata(dataset, metadata_args) diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py index 154d9e646b..2111ee2ecf 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py @@ -1,19 +1,15 @@ -from fastapi.encoders import jsonable_encoder from flask import make_response, redirect, request -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden, NotFound from configs import dify_config from controllers.console import console_ns -from controllers.console.wraps import ( - account_initialization_required, - setup_required, -) +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.oauth import OAuthHandler from libs.helper import StrLen -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.provider_ids import DatasourceProviderID from services.datasource_provider_service import DatasourceProviderService from services.plugin.oauth_service import OAuthProxyService @@ -24,11 +20,11 @@ class DatasourcePluginOAuthAuthorizationUrl(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, provider_id: str): - user = current_user - tenant_id = user.current_tenant_id - if not current_user.is_editor: - raise Forbidden() + current_user, current_tenant_id = current_account_with_tenant() + + tenant_id = current_tenant_id credential_id = request.args.get("credential_id") datasource_provider_id = DatasourceProviderID(provider_id) @@ -52,7 +48,7 @@ class DatasourcePluginOAuthAuthorizationUrl(Resource): redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/datasource/callback" authorization_url_response = oauth_handler.get_authorization_url( tenant_id=tenant_id, - user_id=user.id, + user_id=current_user.id, plugin_id=plugin_id, provider=provider_name, redirect_uri=redirect_uri, @@ -130,22 +126,24 @@ class DatasourceAuth(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() + _, current_tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument( - "name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None + parser = ( + reqparse.RequestParser() + .add_argument( + "name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None + ) + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() try: datasource_provider_service.add_datasource_api_key_provider( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider_id=datasource_provider_id, credentials=args["credentials"], name=args["name"], @@ -160,8 +158,10 @@ class DatasourceAuth(Resource): def get(self, provider_id: str): datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() + _, current_tenant_id = current_account_with_tenant() + datasources = datasource_provider_service.list_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=datasource_provider_id.provider_name, plugin_id=datasource_provider_id.plugin_id, ) @@ -173,18 +173,21 @@ class DatasourceAuthDeleteApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) plugin_id = datasource_provider_id.plugin_id provider_name = datasource_provider_id.provider_name - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + + parser = reqparse.RequestParser().add_argument( + "credential_id", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() datasource_provider_service = DatasourceProviderService() datasource_provider_service.remove_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, auth_id=args["credential_id"], provider=provider_name, plugin_id=plugin_id, @@ -197,18 +200,22 @@ class DatasourceAuthUpdateApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") - parser.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json") - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=False, nullable=True, location="json") + .add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json") + .add_argument("credential_id", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() - if not current_user.is_editor: - raise Forbidden() + datasource_provider_service = DatasourceProviderService() datasource_provider_service.update_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, auth_id=args["credential_id"], provider=datasource_provider_id.provider_name, plugin_id=datasource_provider_id.plugin_id, @@ -224,10 +231,10 @@ class DatasourceAuthListApi(Resource): @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_service = DatasourceProviderService() - datasources = datasource_provider_service.get_all_datasource_credentials( - tenant_id=current_user.current_tenant_id - ) + datasources = datasource_provider_service.get_all_datasource_credentials(tenant_id=current_tenant_id) return {"result": jsonable_encoder(datasources)}, 200 @@ -237,10 +244,10 @@ class DatasourceHardCodeAuthListApi(Resource): @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_service = DatasourceProviderService() - datasources = datasource_provider_service.get_hard_code_datasource_credentials( - tenant_id=current_user.current_tenant_id - ) + datasources = datasource_provider_service.get_hard_code_datasource_credentials(tenant_id=current_tenant_id) return {"result": jsonable_encoder(datasources)}, 200 @@ -249,17 +256,20 @@ class DatasourceAuthOauthCustomClient(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json") - parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") + _, current_tenant_id = current_account_with_tenant() + + parser = ( + reqparse.RequestParser() + .add_argument("client_params", type=dict, required=False, nullable=True, location="json") + .add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") + ) args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.setup_oauth_custom_client_params( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, client_params=args.get("client_params", {}), enabled=args.get("enable_oauth_custom_client", False), @@ -270,10 +280,12 @@ class DatasourceAuthOauthCustomClient(Resource): @login_required @account_initialization_required def delete(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.remove_oauth_custom_client_params( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, ) return {"result": "success"}, 200 @@ -284,16 +296,16 @@ class DatasourceAuthDefaultApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("id", type=str, required=True, nullable=False, location="json") + _, current_tenant_id = current_account_with_tenant() + + parser = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.set_default_datasource_provider( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, credential_id=args["id"], ) @@ -305,17 +317,20 @@ class DatasourceUpdateProviderNameApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json") - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + _, current_tenant_id = current_account_with_tenant() + + parser = ( + reqparse.RequestParser() + .add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json") + .add_argument("credential_id", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.update_datasource_provider_name( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, name=args["name"], credential_id=args["credential_id"], diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py index 6c04cc877a..d413def27f 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py @@ -4,7 +4,7 @@ from flask_restx import ( # type: ignore ) from werkzeug.exceptions import Forbidden -from controllers.console import console_ns +from controllers.console import api, console_ns from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import account_initialization_required, setup_required from libs.login import current_user, login_required @@ -12,9 +12,17 @@ from models import Account from models.dataset import Pipeline from services.rag_pipeline.rag_pipeline import RagPipelineService +parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("credential_id", type=str, required=False, location="json") +) + @console_ns.route("/rag/pipelines//workflows/published/datasource/nodes//preview") class DataSourceContentPreviewApi(Resource): + @api.expect(parser) @setup_required @login_required @account_initialization_required @@ -26,10 +34,6 @@ class DataSourceContentPreviewApi(Resource): if not isinstance(current_user, Account): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("credential_id", type=str, required=False, location="json") args = parser.parse_args() inputs = args.get("inputs") diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py index 6641911243..f589bba3bf 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py @@ -20,13 +20,13 @@ from services.rag_pipeline.rag_pipeline import RagPipelineService logger = logging.getLogger(__name__) -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 40: raise ValueError("Name must be between 1 to 40 characters.") return name -def _validate_description_length(description): +def _validate_description_length(description: str) -> str: if len(description) > 400: raise ValueError("Description cannot exceed 400 characters.") return description @@ -66,29 +66,31 @@ class CustomizedPipelineTemplateApi(Resource): @account_initialization_required @enterprise_license_required def patch(self, template_id: str): - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument( - "description", - type=str, - nullable=True, - required=False, - default="", - ) - parser.add_argument( - "icon_info", - type=dict, - location="json", - nullable=True, + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument( + "description", + type=_validate_description_length, + nullable=True, + required=False, + default="", + ) + .add_argument( + "icon_info", + type=dict, + location="json", + nullable=True, + ) ) args = parser.parse_args() - pipeline_template_info = PipelineTemplateInfoEntity(**args) + pipeline_template_info = PipelineTemplateInfoEntity.model_validate(args) RagPipelineService.update_customized_pipeline_template(template_id, pipeline_template_info) return 200 @@ -123,26 +125,28 @@ class PublishCustomizedPipelineTemplateApi(Resource): @enterprise_license_required @knowledge_pipeline_publish_enabled def post(self, pipeline_id: str): - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument( - "description", - type=str, - nullable=True, - required=False, - default="", - ) - parser.add_argument( - "icon_info", - type=dict, - location="json", - nullable=True, + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument( + "description", + type=_validate_description_length, + nullable=True, + required=False, + default="", + ) + .add_argument( + "icon_info", + type=dict, + location="json", + nullable=True, + ) ) args = parser.parse_args() rag_pipeline_service = RagPipelineService() diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py index c741bfbf82..98876e9f5e 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py @@ -1,5 +1,4 @@ -from flask_login import current_user # type: ignore # type: ignore -from flask_restx import Resource, marshal, reqparse # type: ignore +from flask_restx import Resource, marshal, reqparse from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden @@ -13,25 +12,13 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.dataset_fields import dataset_detail_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.dataset import DatasetPermissionEnum from services.dataset_service import DatasetPermissionService, DatasetService from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, RagPipelineDatasetCreateEntity from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService -def _validate_name(name): - if not name or len(name) < 1 or len(name) > 40: - raise ValueError("Name must be between 1 to 40 characters.") - return name - - -def _validate_description_length(description): - if len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - @console_ns.route("/rag/pipeline/dataset") class CreateRagPipelineDatasetApi(Resource): @setup_required @@ -39,9 +26,7 @@ class CreateRagPipelineDatasetApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def post(self): - parser = reqparse.RequestParser() - - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "yaml_content", type=str, nullable=False, @@ -50,7 +35,7 @@ class CreateRagPipelineDatasetApi(Resource): ) args = parser.parse_args() - + current_user, current_tenant_id = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator if not current_user.is_dataset_editor: raise Forbidden() @@ -70,12 +55,12 @@ class CreateRagPipelineDatasetApi(Resource): with Session(db.engine) as session: rag_pipeline_dsl_service = RagPipelineDslService(session) import_info = rag_pipeline_dsl_service.create_rag_pipeline_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, rag_pipeline_dataset_create_entity=rag_pipeline_dataset_create_entity, ) if rag_pipeline_dataset_create_entity.permission == "partial_members": DatasetPermissionService.update_partial_member_list( - current_user.current_tenant_id, + current_tenant_id, import_info["dataset_id"], rag_pipeline_dataset_create_entity.partial_member_list, ) @@ -93,10 +78,12 @@ class CreateEmptyRagPipelineDatasetApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def post(self): # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_dataset_editor: raise Forbidden() dataset = DatasetService.create_empty_rag_pipeline_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, rag_pipeline_dataset_create_entity=RagPipelineDatasetCreateEntity( name="", description="", diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py index 38f75402a8..858ba94bf8 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py @@ -1,5 +1,5 @@ import logging -from typing import Any, NoReturn +from typing import NoReturn from flask import Response from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse @@ -11,21 +11,19 @@ from controllers.console.app.error import ( DraftWorkflowNotExist, ) from controllers.console.app.workflow_draft_variable import ( - _WORKFLOW_DRAFT_VARIABLE_FIELDS, - _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_FIELDS, # type: ignore[private-usage] + _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, # type: ignore[private-usage] ) from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import account_initialization_required, setup_required from controllers.web.error import InvalidArgumentError, NotFoundError -from core.variables.segment_group import SegmentGroup -from core.variables.segments import ArrayFileSegment, FileSegment, Segment from core.variables.types import SegmentType from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID from extensions.ext_database import db from factories.file_factory import build_from_mapping, build_from_mappings from factories.variable_factory import build_segment_with_type from libs.login import current_user, login_required -from models.account import Account +from models import Account from models.dataset import Pipeline from models.workflow import WorkflowDraftVariable from services.rag_pipeline.rag_pipeline import RagPipelineService @@ -34,43 +32,19 @@ from services.workflow_draft_variable_service import WorkflowDraftVariableList, logger = logging.getLogger(__name__) -def _convert_values_to_json_serializable_object(value: Segment) -> Any: - if isinstance(value, FileSegment): - return value.value.model_dump() - elif isinstance(value, ArrayFileSegment): - return [i.model_dump() for i in value.value] - elif isinstance(value, SegmentGroup): - return [_convert_values_to_json_serializable_object(i) for i in value.value] - else: - return value.value - - -def _serialize_var_value(variable: WorkflowDraftVariable) -> Any: - value = variable.get_value() - # create a copy of the value to avoid affecting the model cache. - value = value.model_copy(deep=True) - # Refresh the url signature before returning it to client. - if isinstance(value, FileSegment): - file = value.value - file.remote_url = file.generate_url() - elif isinstance(value, ArrayFileSegment): - files = value.value - for file in files: - file.remote_url = file.generate_url() - return _convert_values_to_json_serializable_object(value) - - def _create_pagination_parser(): - parser = reqparse.RequestParser() - parser.add_argument( - "page", - type=inputs.int_range(1, 100_000), - required=False, - default=1, - location="args", - help="the page of data requested", + parser = ( + reqparse.RequestParser() + .add_argument( + "page", + type=inputs.int_range(1, 100_000), + required=False, + default=1, + location="args", + help="the page of data requested", + ) + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") ) - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") return parser @@ -104,7 +78,7 @@ def _api_prerequisite(f): @account_initialization_required @get_rag_pipeline def wrapper(*args, **kwargs): - if not isinstance(current_user, Account) or not current_user.is_editor: + if not isinstance(current_user, Account) or not current_user.has_edit_permission: raise Forbidden() return f(*args, **kwargs) @@ -234,10 +208,11 @@ class RagPipelineVariableApi(Resource): # "upload_file_id": "1602650a-4fe4-423c-85a2-af76c083e3c4" # } - parser = reqparse.RequestParser() - parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") - # Parse 'value' field as-is to maintain its original data structure - parser.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") + .add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + ) draft_var_srv = WorkflowDraftVariableService( session=db.session(), diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py index e0b918456b..2c28120e65 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py @@ -1,6 +1,3 @@ -from typing import cast - -from flask_login import current_user # type: ignore from flask_restx import Resource, marshal_with, reqparse # type: ignore from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden @@ -13,8 +10,7 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.rag_pipeline_fields import pipeline_import_check_dependencies_fields, pipeline_import_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from models.dataset import Pipeline from services.app_dsl_service import ImportStatus from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService @@ -28,26 +24,29 @@ class RagPipelineImportApi(Resource): @marshal_with(pipeline_import_fields) def post(self): # Check user role first - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("mode", type=str, required=True, location="json") - parser.add_argument("yaml_content", type=str, location="json") - parser.add_argument("yaml_url", type=str, location="json") - parser.add_argument("name", type=str, location="json") - parser.add_argument("description", type=str, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") - parser.add_argument("pipeline_id", type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("mode", type=str, required=True, location="json") + .add_argument("yaml_content", type=str, location="json") + .add_argument("yaml_url", type=str, location="json") + .add_argument("name", type=str, location="json") + .add_argument("description", type=str, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + .add_argument("pipeline_id", type=str, location="json") + ) args = parser.parse_args() # Create service with session with Session(db.engine) as session: import_service = RagPipelineDslService(session) # Import app - account = cast(Account, current_user) + account = current_user result = import_service.import_rag_pipeline( account=account, import_mode=args["mode"], @@ -60,9 +59,9 @@ class RagPipelineImportApi(Resource): # Return appropriate status code based on result status = result.status - if status == ImportStatus.FAILED.value: + if status == ImportStatus.FAILED: return result.model_dump(mode="json"), 400 - elif status == ImportStatus.PENDING.value: + elif status == ImportStatus.PENDING: return result.model_dump(mode="json"), 202 return result.model_dump(mode="json"), 200 @@ -74,20 +73,21 @@ class RagPipelineImportConfirmApi(Resource): @account_initialization_required @marshal_with(pipeline_import_fields) def post(self, import_id): + current_user, _ = current_account_with_tenant() # Check user role first - if not current_user.is_editor: + if not current_user.has_edit_permission: raise Forbidden() # Create service with session with Session(db.engine) as session: import_service = RagPipelineDslService(session) # Confirm import - account = cast(Account, current_user) + account = current_user result = import_service.confirm_import(import_id=import_id, account=account) session.commit() # Return appropriate status code based on result - if result.status == ImportStatus.FAILED.value: + if result.status == ImportStatus.FAILED: return result.model_dump(mode="json"), 400 return result.model_dump(mode="json"), 200 @@ -100,7 +100,8 @@ class RagPipelineImportCheckDependenciesApi(Resource): @account_initialization_required @marshal_with(pipeline_import_check_dependencies_fields) def get(self, pipeline: Pipeline): - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() with Session(db.engine) as session: @@ -117,12 +118,12 @@ class RagPipelineExportApi(Resource): @get_rag_pipeline @account_initialization_required def get(self, pipeline: Pipeline): - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() # Add include_secret params - parser = reqparse.RequestParser() - parser.add_argument("include_secret", type=str, default="false", location="args") + parser = reqparse.RequestParser().add_argument("include_secret", type=str, default="false", location="args") args = parser.parse_args() with Session(db.engine) as session: diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index a75c121fbe..5fe8572dfa 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -18,6 +18,7 @@ from controllers.console.app.error import ( from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import ( account_initialization_required, + edit_permission_required, setup_required, ) from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError @@ -36,8 +37,8 @@ from fields.workflow_run_fields import ( ) from libs import helper from libs.helper import TimestampField, uuid_value -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, current_user, login_required +from models import Account from models.dataset import Pipeline from models.model import EndUser from services.errors.app import WorkflowHashNotEqualError @@ -56,15 +57,12 @@ class DraftRagPipelineApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required @marshal_with(workflow_fields) def get(self, pipeline: Pipeline): """ Get draft rag pipeline's workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - # fetch draft workflow by app_model rag_pipeline_service = RagPipelineService() workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline) @@ -79,23 +77,25 @@ class DraftRagPipelineApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def post(self, pipeline: Pipeline): """ Sync draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() content_type = request.headers.get("Content-Type", "") if "application/json" in content_type: - parser = reqparse.RequestParser() - parser.add_argument("graph", type=dict, required=True, nullable=False, location="json") - parser.add_argument("hash", type=str, required=False, location="json") - parser.add_argument("environment_variables", type=list, required=False, location="json") - parser.add_argument("conversation_variables", type=list, required=False, location="json") - parser.add_argument("rag_pipeline_variables", type=list, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("graph", type=dict, required=True, nullable=False, location="json") + .add_argument("hash", type=str, required=False, location="json") + .add_argument("environment_variables", type=list, required=False, location="json") + .add_argument("conversation_variables", type=list, required=False, location="json") + .add_argument("rag_pipeline_variables", type=list, required=False, location="json") + ) args = parser.parse_args() elif "text/plain" in content_type: try: @@ -154,16 +154,15 @@ class RagPipelineDraftRunIterationNodeApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def post(self, pipeline: Pipeline, node_id: str): """ Run draft workflow iteration node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -194,11 +193,11 @@ class RagPipelineDraftRunLoopNodeApi(Resource): Run draft workflow loop node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -229,14 +228,17 @@ class DraftRagPipelineRunApi(Resource): Run draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("datasource_info_list", type=list, required=True, location="json") - parser.add_argument("start_node_id", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("datasource_info_list", type=list, required=True, location="json") + .add_argument("start_node_id", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -264,17 +266,20 @@ class PublishedRagPipelineRunApi(Resource): Run published workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("datasource_info_list", type=list, required=True, location="json") - parser.add_argument("start_node_id", type=str, required=True, location="json") - parser.add_argument("is_preview", type=bool, required=True, location="json", default=False) - parser.add_argument("response_mode", type=str, required=True, location="json", default="streaming") - parser.add_argument("original_document_id", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("datasource_info_list", type=list, required=True, location="json") + .add_argument("start_node_id", type=str, required=True, location="json") + .add_argument("is_preview", type=bool, required=True, location="json", default=False) + .add_argument("response_mode", type=str, required=True, location="json", default="streaming") + .add_argument("original_document_id", type=str, required=False, location="json") + ) args = parser.parse_args() streaming = args["response_mode"] == "streaming" @@ -303,15 +308,16 @@ class PublishedRagPipelineRunApi(Resource): # Run rag pipeline datasource # """ # # The role of the current user in the ta table must be admin, owner, or editor -# if not current_user.is_editor: +# if not current_user.has_edit_permission: # raise Forbidden() # # if not isinstance(current_user, Account): # raise Forbidden() # -# parser = reqparse.RequestParser() -# parser.add_argument("job_id", type=str, required=True, nullable=False, location="json") -# parser.add_argument("datasource_type", type=str, required=True, location="json") +# parser = (reqparse.RequestParser() +# .add_argument("job_id", type=str, required=True, nullable=False, location="json") +# .add_argument("datasource_type", type=str, required=True, location="json") +# ) # args = parser.parse_args() # # job_id = args.get("job_id") @@ -344,15 +350,16 @@ class PublishedRagPipelineRunApi(Resource): # Run rag pipeline datasource # """ # # The role of the current user in the ta table must be admin, owner, or editor -# if not current_user.is_editor: +# if not current_user.has_edit_permission: # raise Forbidden() # # if not isinstance(current_user, Account): # raise Forbidden() # -# parser = reqparse.RequestParser() -# parser.add_argument("job_id", type=str, required=True, nullable=False, location="json") -# parser.add_argument("datasource_type", type=str, required=True, location="json") +# parser = (reqparse.RequestParser() +# .add_argument("job_id", type=str, required=True, nullable=False, location="json") +# .add_argument("datasource_type", type=str, required=True, location="json") +# ) # args = parser.parse_args() # # job_id = args.get("job_id") @@ -385,13 +392,16 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource): Run rag pipeline datasource """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("credential_id", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("credential_id", type=str, required=False, location="json") + ) args = parser.parse_args() inputs = args.get("inputs") @@ -428,13 +438,16 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource): Run rag pipeline datasource """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("credential_id", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("credential_id", type=str, required=False, location="json") + ) args = parser.parse_args() inputs = args.get("inputs") @@ -472,11 +485,13 @@ class RagPipelineDraftNodeRunApi(Resource): Run draft workflow node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "inputs", type=dict, required=True, nullable=False, location="json" + ) args = parser.parse_args() inputs = args.get("inputs") @@ -505,7 +520,8 @@ class RagPipelineTaskStopApi(Resource): Stop workflow task """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id) @@ -525,7 +541,8 @@ class PublishedRagPipelineApi(Resource): Get published pipeline """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() if not pipeline.is_published: return None @@ -545,7 +562,8 @@ class PublishedRagPipelineApi(Resource): Publish workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() rag_pipeline_service = RagPipelineService() @@ -580,7 +598,8 @@ class DefaultRagPipelineBlockConfigsApi(Resource): Get default block config """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() # Get default block configs @@ -599,11 +618,11 @@ class DefaultRagPipelineBlockConfigApi(Resource): Get default block config """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("q", type=str, location="args") + parser = reqparse.RequestParser().add_argument("q", type=str, location="args") args = parser.parse_args() q = args.get("q") @@ -631,14 +650,17 @@ class PublishedAllRagPipelineApi(Resource): """ Get published workflows """ - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") - parser.add_argument("user_id", type=str, required=False, location="args") - parser.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + .add_argument("user_id", type=str, required=False, location="args") + .add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args") + ) args = parser.parse_args() page = int(args.get("page", 1)) limit = int(args.get("limit", 10)) @@ -681,12 +703,15 @@ class RagPipelineByIdApi(Resource): Update workflow attributes """ # Check permission - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("marked_name", type=str, required=False, location="json") - parser.add_argument("marked_comment", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("marked_name", type=str, required=False, location="json") + .add_argument("marked_comment", type=str, required=False, location="json") + ) args = parser.parse_args() # Validate name and comment length @@ -733,15 +758,12 @@ class PublishedRagPipelineSecondStepApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get second step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("node_id", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() node_id = args.get("node_id") if not node_id: @@ -759,15 +781,12 @@ class PublishedRagPipelineFirstStepApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get first step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("node_id", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() node_id = args.get("node_id") if not node_id: @@ -785,15 +804,12 @@ class DraftRagPipelineFirstStepApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get first step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("node_id", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() node_id = args.get("node_id") if not node_id: @@ -811,15 +827,12 @@ class DraftRagPipelineSecondStepApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get second step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("node_id", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() node_id = args.get("node_id") if not node_id: @@ -843,9 +856,11 @@ class RagPipelineWorkflowRunListApi(Resource): """ Get workflow run list """ - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() rag_pipeline_service = RagPipelineService() @@ -880,7 +895,7 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource): @account_initialization_required @get_rag_pipeline @marshal_with(workflow_run_node_execution_list_fields) - def get(self, pipeline: Pipeline, run_id): + def get(self, pipeline: Pipeline, run_id: str): """ Get workflow run node execution list """ @@ -903,14 +918,8 @@ class DatasourceListApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user - if not isinstance(user, Account): - raise Forbidden() - tenant_id = user.current_tenant_id - if not tenant_id: - raise Forbidden() - - return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(tenant_id)) + _, current_tenant_id = current_account_with_tenant() + return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(current_tenant_id)) @console_ns.route("/rag/pipelines//workflows/draft/nodes//last-run") @@ -940,9 +949,8 @@ class RagPipelineTransformApi(Resource): @setup_required @login_required @account_initialization_required - def post(self, dataset_id): - if not isinstance(current_user, Account): - raise Forbidden() + def post(self, dataset_id: str): + current_user, _ = current_account_with_tenant() if not (current_user.has_edit_permission or current_user.is_dataset_operator): raise Forbidden() @@ -959,19 +967,20 @@ class RagPipelineDatasourceVariableApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required @marshal_with(workflow_run_node_execution_fields) def post(self, pipeline: Pipeline): """ Set datasource variables """ - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("datasource_info", type=dict, required=True, location="json") - parser.add_argument("start_node_id", type=str, required=True, location="json") - parser.add_argument("start_node_title", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("datasource_info", type=dict, required=True, location="json") + .add_argument("start_node_id", type=str, required=True, location="json") + .add_argument("start_node_title", type=str, required=True, location="json") + ) args = parser.parse_args() rag_pipeline_service = RagPipelineService() diff --git a/api/controllers/console/datasets/website.py b/api/controllers/console/datasets/website.py index b9c1f65bfd..fe6eaaa0de 100644 --- a/api/controllers/console/datasets/website.py +++ b/api/controllers/console/datasets/website.py @@ -31,17 +31,19 @@ class WebsiteCrawlApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument( - "provider", - type=str, - choices=["firecrawl", "watercrawl", "jinareader"], - required=True, - nullable=True, - location="json", + parser = ( + reqparse.RequestParser() + .add_argument( + "provider", + type=str, + choices=["firecrawl", "watercrawl", "jinareader"], + required=True, + nullable=True, + location="json", + ) + .add_argument("url", type=str, required=True, nullable=True, location="json") + .add_argument("options", type=dict, required=True, nullable=True, location="json") ) - parser.add_argument("url", type=str, required=True, nullable=True, location="json") - parser.add_argument("options", type=dict, required=True, nullable=True, location="json") args = parser.parse_args() # Create typed request and validate @@ -70,8 +72,7 @@ class WebsiteCrawlStatusApi(Resource): @login_required @account_initialization_required def get(self, job_id: str): - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "provider", type=str, choices=["firecrawl", "watercrawl", "jinareader"], required=True, location="args" ) args = parser.parse_args() diff --git a/api/controllers/console/datasets/wraps.py b/api/controllers/console/datasets/wraps.py index 98abb3ef8d..a8c1298e3e 100644 --- a/api/controllers/console/datasets/wraps.py +++ b/api/controllers/console/datasets/wraps.py @@ -3,8 +3,7 @@ from functools import wraps from controllers.console.datasets.error import PipelineNotFoundError from extensions.ext_database import db -from libs.login import current_user -from models.account import Account +from libs.login import current_account_with_tenant from models.dataset import Pipeline @@ -17,8 +16,7 @@ def get_rag_pipeline( if not kwargs.get("pipeline_id"): raise ValueError("missing pipeline_id in path parameters") - if not isinstance(current_user, Account): - raise ValueError("current_user is not an account") + _, current_tenant_id = current_account_with_tenant() pipeline_id = kwargs.get("pipeline_id") pipeline_id = str(pipeline_id) @@ -27,7 +25,7 @@ def get_rag_pipeline( pipeline = ( db.session.query(Pipeline) - .where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_user.current_tenant_id) + .where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_tenant_id) .first() ) diff --git a/api/controllers/console/explore/audio.py b/api/controllers/console/explore/audio.py index 7c20fb49d8..2a248cf20d 100644 --- a/api/controllers/console/explore/audio.py +++ b/api/controllers/console/explore/audio.py @@ -81,11 +81,13 @@ class ChatTextApi(InstalledAppResource): app_model = installed_app.app try: - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=str, required=False, location="json") - parser.add_argument("voice", type=str, location="json") - parser.add_argument("text", type=str, location="json") - parser.add_argument("streaming", type=bool, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=str, required=False, location="json") + .add_argument("voice", type=str, location="json") + .add_argument("text", type=str, location="json") + .add_argument("streaming", type=bool, location="json") + ) args = parser.parse_args() message_id = args.get("message_id", None) diff --git a/api/controllers/console/explore/completion.py b/api/controllers/console/explore/completion.py index 1102b815eb..9386ecebae 100644 --- a/api/controllers/console/explore/completion.py +++ b/api/controllers/console/explore/completion.py @@ -49,12 +49,14 @@ class CompletionApi(InstalledAppResource): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, location="json", default="") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, location="json", default="") + .add_argument("files", type=list, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + ) args = parser.parse_args() streaming = args["response_mode"] == "streaming" @@ -121,13 +123,15 @@ class ChatApi(InstalledAppResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, required=True, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") - parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, required=True, location="json") + .add_argument("files", type=list, required=False, location="json") + .add_argument("conversation_id", type=uuid_value, location="json") + .add_argument("parent_message_id", type=uuid_value, required=False, location="json") + .add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + ) args = parser.parse_args() args["auto_generate_name"] = False diff --git a/api/controllers/console/explore/conversation.py b/api/controllers/console/explore/conversation.py index feabea2524..5a39363cc2 100644 --- a/api/controllers/console/explore/conversation.py +++ b/api/controllers/console/explore/conversation.py @@ -31,10 +31,12 @@ class ConversationListApi(InstalledAppResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - parser.add_argument("pinned", type=str, choices=["true", "false", None], location="args") + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + .add_argument("pinned", type=str, choices=["true", "false", None], location="args") + ) args = parser.parse_args() pinned = None @@ -94,9 +96,11 @@ class ConversationRenameApi(InstalledAppResource): conversation_id = str(c_id) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=False, location="json") - parser.add_argument("auto_generate", type=bool, required=False, default=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, location="json") + .add_argument("auto_generate", type=bool, required=False, default=False, location="json") + ) args = parser.parse_args() try: diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index bdc3fb0dbd..3c95779475 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -6,31 +6,29 @@ from flask_restx import Resource, inputs, marshal_with, reqparse from sqlalchemy import and_, select from werkzeug.exceptions import BadRequest, Forbidden, NotFound -from controllers.console import api +from controllers.console import console_ns from controllers.console.explore.wraps import InstalledAppResource from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check from extensions.ext_database import db from fields.installed_app_fields import installed_app_list_fields from libs.datetime_utils import naive_utc_now -from libs.login import current_user, login_required -from models import Account, App, InstalledApp, RecommendedApp +from libs.login import current_account_with_tenant, login_required +from models import App, InstalledApp, RecommendedApp from services.account_service import TenantService -from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService logger = logging.getLogger(__name__) +@console_ns.route("/installed-apps") class InstalledAppsListApi(Resource): @login_required @account_initialization_required @marshal_with(installed_app_list_fields) def get(self): app_id = request.args.get("app_id", default=None, type=str) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - current_tenant_id = current_user.current_tenant_id + current_user, current_tenant_id = current_account_with_tenant() if app_id: installed_apps = db.session.scalars( @@ -68,31 +66,26 @@ class InstalledAppsListApi(Resource): # Pre-filter out apps without setting or with sso_verified filtered_installed_apps = [] - app_id_to_app_code = {} for installed_app in installed_app_list: app_id = installed_app["app"].id webapp_setting = webapp_settings.get(app_id) if not webapp_setting or webapp_setting.access_mode == "sso_verified": continue - app_code = AppService.get_app_code_by_id(str(app_id)) - app_id_to_app_code[app_id] = app_code filtered_installed_apps.append(installed_app) - app_codes = list(app_id_to_app_code.values()) - # Batch permission check + app_ids = [installed_app["app"].id for installed_app in filtered_installed_apps] permissions = EnterpriseService.WebAppAuth.batch_is_user_allowed_to_access_webapps( user_id=user_id, - app_codes=app_codes, + app_ids=app_ids, ) # Keep only allowed apps res = [] for installed_app in filtered_installed_apps: app_id = installed_app["app"].id - app_code = app_id_to_app_code[app_id] - if permissions.get(app_code): + if permissions.get(app_id): res.append(installed_app) installed_app_list = res @@ -112,17 +105,15 @@ class InstalledAppsListApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("apps") def post(self): - parser = reqparse.RequestParser() - parser.add_argument("app_id", type=str, required=True, help="Invalid app_id") + parser = reqparse.RequestParser().add_argument("app_id", type=str, required=True, help="Invalid app_id") args = parser.parse_args() recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == args["app_id"]).first() if recommended_app is None: raise NotFound("App not found") - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - current_tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + app = db.session.query(App).where(App.id == args["app_id"]).first() if app is None: @@ -154,6 +145,7 @@ class InstalledAppsListApi(Resource): return {"message": "App installed successfully"} +@console_ns.route("/installed-apps/") class InstalledAppApi(InstalledAppResource): """ update and delete an installed app @@ -161,9 +153,8 @@ class InstalledAppApi(InstalledAppResource): """ def delete(self, installed_app): - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - if installed_app.app_owner_tenant_id == current_user.current_tenant_id: + _, current_tenant_id = current_account_with_tenant() + if installed_app.app_owner_tenant_id == current_tenant_id: raise BadRequest("You can't uninstall an app owned by the current tenant") db.session.delete(installed_app) @@ -172,8 +163,7 @@ class InstalledAppApi(InstalledAppResource): return {"result": "success", "message": "App uninstalled successfully"}, 204 def patch(self, installed_app): - parser = reqparse.RequestParser() - parser.add_argument("is_pinned", type=inputs.boolean) + parser = reqparse.RequestParser().add_argument("is_pinned", type=inputs.boolean) args = parser.parse_args() commit_args = False @@ -185,7 +175,3 @@ class InstalledAppApi(InstalledAppResource): db.session.commit() return {"result": "success", "message": "App info updated successfully"} - - -api.add_resource(InstalledAppsListApi, "/installed-apps") -api.add_resource(InstalledAppApi, "/installed-apps/") diff --git a/api/controllers/console/explore/message.py b/api/controllers/console/explore/message.py index b045e47846..db854e09bb 100644 --- a/api/controllers/console/explore/message.py +++ b/api/controllers/console/explore/message.py @@ -23,8 +23,7 @@ from core.model_runtime.errors.invoke import InvokeError from fields.message_fields import message_infinite_scroll_pagination_fields from libs import helper from libs.helper import uuid_value -from libs.login import current_user -from models import Account +from libs.login import current_account_with_tenant from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.app import MoreLikeThisDisabledError @@ -48,21 +47,22 @@ logger = logging.getLogger(__name__) class MessageListApi(InstalledAppResource): @marshal_with(message_infinite_scroll_pagination_fields) def get(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", required=True, type=uuid_value, location="args") - parser.add_argument("first_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("conversation_id", required=True, type=uuid_value, location="args") + .add_argument("first_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") return MessageService.pagination_by_first_id( app_model, current_user, args["conversation_id"], args["first_id"], args["limit"] ) @@ -78,18 +78,19 @@ class MessageListApi(InstalledAppResource): ) class MessageFeedbackApi(InstalledAppResource): def post(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json") - parser.add_argument("content", type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("rating", type=str, choices=["like", "dislike", None], location="json") + .add_argument("content", type=str, location="json") + ) args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") MessageService.create_feedback( app_model=app_model, message_id=message_id, @@ -109,14 +110,14 @@ class MessageFeedbackApi(InstalledAppResource): ) class MessageMoreLikeThisApi(InstalledAppResource): def get(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "response_mode", type=str, required=True, choices=["blocking", "streaming"], location="args" ) args = parser.parse_args() @@ -124,8 +125,6 @@ class MessageMoreLikeThisApi(InstalledAppResource): streaming = args["response_mode"] == "streaming" try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") response = AppGenerateService.generate_more_like_this( app_model=app_model, user=current_user, @@ -159,6 +158,7 @@ class MessageMoreLikeThisApi(InstalledAppResource): ) class MessageSuggestedQuestionApi(InstalledAppResource): def get(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -167,8 +167,6 @@ class MessageSuggestedQuestionApi(InstalledAppResource): message_id = str(message_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") questions = MessageService.get_suggested_questions_after_answer( app_model=app_model, user=current_user, message_id=message_id, invoke_from=InvokeFrom.EXPLORE ) diff --git a/api/controllers/console/explore/parameter.py b/api/controllers/console/explore/parameter.py index 7742ea24a9..9c6b2aedfb 100644 --- a/api/controllers/console/explore/parameter.py +++ b/api/controllers/console/explore/parameter.py @@ -1,7 +1,7 @@ from flask_restx import marshal_with from controllers.common import fields -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.error import AppUnavailableError from controllers.console.explore.wraps import InstalledAppResource from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict @@ -9,6 +9,7 @@ from models.model import AppMode, InstalledApp from services.app_service import AppService +@console_ns.route("/installed-apps//parameters", endpoint="installed_app_parameters") class AppParameterApi(InstalledAppResource): """Resource for app variables.""" @@ -39,6 +40,7 @@ class AppParameterApi(InstalledAppResource): return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) +@console_ns.route("/installed-apps//meta", endpoint="installed_app_meta") class ExploreAppMetaApi(InstalledAppResource): def get(self, installed_app: InstalledApp): """Get app meta""" @@ -46,9 +48,3 @@ class ExploreAppMetaApi(InstalledAppResource): if not app_model: raise ValueError("App not found") return AppService().get_app_meta(app_model) - - -api.add_resource( - AppParameterApi, "/installed-apps//parameters", endpoint="installed_app_parameters" -) -api.add_resource(ExploreAppMetaApi, "/installed-apps//meta", endpoint="installed_app_meta") diff --git a/api/controllers/console/explore/recommended_app.py b/api/controllers/console/explore/recommended_app.py index 974222ddf7..751012757a 100644 --- a/api/controllers/console/explore/recommended_app.py +++ b/api/controllers/console/explore/recommended_app.py @@ -1,7 +1,7 @@ from flask_restx import Resource, fields, marshal_with, reqparse from constants.languages import languages -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required from libs.helper import AppIconUrlField from libs.login import current_user, login_required @@ -35,14 +35,14 @@ recommended_app_list_fields = { } +@console_ns.route("/explore/apps") class RecommendedAppListApi(Resource): @login_required @account_initialization_required @marshal_with(recommended_app_list_fields) def get(self): # language args - parser = reqparse.RequestParser() - parser.add_argument("language", type=str, location="args") + parser = reqparse.RequestParser().add_argument("language", type=str, location="args") args = parser.parse_args() language = args.get("language") @@ -56,13 +56,10 @@ class RecommendedAppListApi(Resource): return RecommendedAppService.get_recommended_apps_and_categories(language_prefix) +@console_ns.route("/explore/apps/") class RecommendedAppApi(Resource): @login_required @account_initialization_required def get(self, app_id): app_id = str(app_id) return RecommendedAppService.get_recommend_app_detail(app_id) - - -api.add_resource(RecommendedAppListApi, "/explore/apps") -api.add_resource(RecommendedAppApi, "/explore/apps/") diff --git a/api/controllers/console/explore/saved_message.py b/api/controllers/console/explore/saved_message.py index 6f05f898f9..9775c951f7 100644 --- a/api/controllers/console/explore/saved_message.py +++ b/api/controllers/console/explore/saved_message.py @@ -2,13 +2,12 @@ from flask_restx import fields, marshal_with, reqparse from flask_restx.inputs import int_range from werkzeug.exceptions import NotFound -from controllers.console import api +from controllers.console import console_ns from controllers.console.explore.error import NotCompletionAppError from controllers.console.explore.wraps import InstalledAppResource from fields.conversation_fields import message_file_fields from libs.helper import TimestampField, uuid_value -from libs.login import current_user -from models import Account +from libs.login import current_account_with_tenant from services.errors.message import MessageNotExistsError from services.saved_message_service import SavedMessageService @@ -25,6 +24,7 @@ message_fields = { } +@console_ns.route("/installed-apps//saved-messages", endpoint="installed_app_saved_messages") class SavedMessageListApi(InstalledAppResource): saved_message_infinite_scroll_pagination_fields = { "limit": fields.Integer, @@ -34,31 +34,30 @@ class SavedMessageListApi(InstalledAppResource): @marshal_with(saved_message_infinite_scroll_pagination_fields) def get(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") return SavedMessageService.pagination_by_last_id(app_model, current_user, args["last_id"], args["limit"]) def post(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=uuid_value, required=True, location="json") + parser = reqparse.RequestParser().add_argument("message_id", type=uuid_value, required=True, location="json") args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") SavedMessageService.save(app_model, current_user, args["message_id"]) except MessageNotExistsError: raise NotFound("Message Not Exists.") @@ -66,8 +65,12 @@ class SavedMessageListApi(InstalledAppResource): return {"result": "success"} +@console_ns.route( + "/installed-apps//saved-messages/", endpoint="installed_app_saved_message" +) class SavedMessageApi(InstalledAppResource): def delete(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app message_id = str(message_id) @@ -75,20 +78,6 @@ class SavedMessageApi(InstalledAppResource): if app_model.mode != "completion": raise NotCompletionAppError() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") SavedMessageService.delete(app_model, current_user, message_id) return {"result": "success"}, 204 - - -api.add_resource( - SavedMessageListApi, - "/installed-apps//saved-messages", - endpoint="installed_app_saved_messages", -) -api.add_resource( - SavedMessageApi, - "/installed-apps//saved-messages/", - endpoint="installed_app_saved_message", -) diff --git a/api/controllers/console/explore/workflow.py b/api/controllers/console/explore/workflow.py index e32f2814eb..125f603a5a 100644 --- a/api/controllers/console/explore/workflow.py +++ b/api/controllers/console/explore/workflow.py @@ -22,7 +22,7 @@ from core.errors.error import ( from core.model_runtime.errors.invoke import InvokeError from core.workflow.graph_engine.manager import GraphEngineManager from libs import helper -from libs.login import current_user +from libs.login import current_account_with_tenant from models.model import AppMode, InstalledApp from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError @@ -38,6 +38,7 @@ class InstalledAppWorkflowRunApi(InstalledAppResource): """ Run workflow """ + current_user, _ = current_account_with_tenant() app_model = installed_app.app if not app_model: raise NotWorkflowAppError() @@ -45,11 +46,12 @@ class InstalledAppWorkflowRunApi(InstalledAppResource): if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("files", type=list, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("files", type=list, required=False, location="json") + ) args = parser.parse_args() - assert current_user is not None try: response = AppGenerateService.generate( app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=True @@ -85,7 +87,6 @@ class InstalledAppWorkflowTaskStopApi(InstalledAppResource): app_mode = AppMode.value_of(app_model.mode) if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() - assert current_user is not None # Stop using both mechanisms for backward compatibility # Legacy stop flag mechanism (without user check) diff --git a/api/controllers/console/explore/wraps.py b/api/controllers/console/explore/wraps.py index 3a8ba64a03..2a97d312aa 100644 --- a/api/controllers/console/explore/wraps.py +++ b/api/controllers/console/explore/wraps.py @@ -2,16 +2,14 @@ from collections.abc import Callable from functools import wraps from typing import Concatenate, ParamSpec, TypeVar -from flask_login import current_user from flask_restx import Resource from werkzeug.exceptions import NotFound from controllers.console.explore.error import AppAccessDeniedError from controllers.console.wraps import account_initialization_required from extensions.ext_database import db -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import InstalledApp -from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService @@ -24,11 +22,10 @@ def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | Non def decorator(view: Callable[Concatenate[InstalledApp, P], R]): @wraps(view) def decorated(installed_app_id: str, *args: P.args, **kwargs: P.kwargs): + _, current_tenant_id = current_account_with_tenant() installed_app = ( db.session.query(InstalledApp) - .where( - InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_user.current_tenant_id - ) + .where(InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_tenant_id) .first() ) @@ -54,13 +51,13 @@ def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] | def decorator(view: Callable[Concatenate[InstalledApp, P], R]): @wraps(view) def decorated(installed_app: InstalledApp, *args: P.args, **kwargs: P.kwargs): + current_user, _ = current_account_with_tenant() feature = FeatureService.get_system_features() if feature.webapp_auth.enabled: app_id = installed_app.app_id - app_code = AppService.get_app_code_by_id(app_id) res = EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp( user_id=str(current_user.id), - app_code=app_code, + app_id=app_id, ) if not res: raise AppAccessDeniedError() diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index 57f5ab191e..a1d36def0d 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -1,11 +1,10 @@ -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from constants import HIDDEN_VALUE from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from fields.api_based_extension_fields import api_based_extension_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.api_based_extension import APIBasedExtension from services.api_based_extension_service import APIBasedExtensionService from services.code_based_extension_service import CodeBasedExtensionService @@ -30,8 +29,7 @@ class CodeBasedExtensionAPI(Resource): @login_required @account_initialization_required def get(self): - parser = reqparse.RequestParser() - parser.add_argument("module", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("module", type=str, required=True, location="args") args = parser.parse_args() return {"module": args["module"], "data": CodeBasedExtensionService.get_code_based_extension(args["module"])} @@ -47,7 +45,7 @@ class APIBasedExtensionAPI(Resource): @account_initialization_required @marshal_with(api_based_extension_fields) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return APIBasedExtensionService.get_all_by_tenant_id(tenant_id) @api.doc("create_api_based_extension") @@ -68,14 +66,11 @@ class APIBasedExtensionAPI(Resource): @account_initialization_required @marshal_with(api_based_extension_fields) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("api_endpoint", type=str, required=True, location="json") - parser.add_argument("api_key", type=str, required=True, location="json") - args = parser.parse_args() + args = api.payload + _, current_tenant_id = current_account_with_tenant() extension_data = APIBasedExtension( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, name=args["name"], api_endpoint=args["api_endpoint"], api_key=args["api_key"], @@ -96,7 +91,7 @@ class APIBasedExtensionDetailAPI(Resource): @marshal_with(api_based_extension_fields) def get(self, id): api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) @@ -120,15 +115,11 @@ class APIBasedExtensionDetailAPI(Resource): @marshal_with(api_based_extension_fields) def post(self, id): api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() - extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) + extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("api_endpoint", type=str, required=True, location="json") - parser.add_argument("api_key", type=str, required=True, location="json") - args = parser.parse_args() + args = api.payload extension_data_from_db.name = args["name"] extension_data_from_db.api_endpoint = args["api_endpoint"] @@ -147,9 +138,9 @@ class APIBasedExtensionDetailAPI(Resource): @account_initialization_required def delete(self, id): api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() - extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) + extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id) APIBasedExtensionService.delete(extension_data_from_db) diff --git a/api/controllers/console/feature.py b/api/controllers/console/feature.py index d43b839291..39bcf3424c 100644 --- a/api/controllers/console/feature.py +++ b/api/controllers/console/feature.py @@ -1,7 +1,6 @@ -from flask_login import current_user from flask_restx import Resource, fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.feature_service import FeatureService from . import api, console_ns @@ -23,7 +22,9 @@ class FeatureApi(Resource): @cloud_utm_record def get(self): """Get feature configuration for current tenant""" - return FeatureService.get_features(current_user.current_tenant_id).model_dump() + _, current_tenant_id = current_account_with_tenant() + + return FeatureService.get_features(current_tenant_id).model_dump() @console_ns.route("/system-features") diff --git a/api/controllers/console/files.py b/api/controllers/console/files.py index 34f186e2f0..36fcd460bb 100644 --- a/api/controllers/console/files.py +++ b/api/controllers/console/files.py @@ -1,7 +1,6 @@ from typing import Literal from flask import request -from flask_login import current_user from flask_restx import Resource, marshal_with from werkzeug.exceptions import Forbidden @@ -22,8 +21,7 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.file_fields import file_fields, upload_config_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from services.file_service import FileService from . import console_ns @@ -41,6 +39,7 @@ class FileApi(Resource): return { "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, "batch_count_limit": dify_config.UPLOAD_FILE_BATCH_LIMIT, + "file_upload_limit": dify_config.BATCH_UPLOAD_LIMIT, "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, @@ -53,6 +52,7 @@ class FileApi(Resource): @marshal_with(file_fields) @cloud_edition_billing_resource_check("documents") def post(self): + current_user, _ = current_account_with_tenant() source_str = request.form.get("source") source: Literal["datasets"] | None = "datasets" if source_str == "datasets" else None @@ -65,16 +65,12 @@ class FileApi(Resource): if not file.filename: raise FilenameNotExistsError - if source == "datasets" and not current_user.is_dataset_editor: raise Forbidden() if source not in ("datasets", None): source = None - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - try: upload_file = FileService(db.engine).upload_file( filename=file.filename, @@ -108,4 +104,4 @@ class FileSupportTypeApi(Resource): @login_required @account_initialization_required def get(self): - return {"allowed_extensions": DOCUMENT_EXTENSIONS} + return {"allowed_extensions": list(DOCUMENT_EXTENSIONS)} diff --git a/api/controllers/console/init_validate.py b/api/controllers/console/init_validate.py index 30b53458b2..f219425d07 100644 --- a/api/controllers/console/init_validate.py +++ b/api/controllers/console/init_validate.py @@ -57,8 +57,7 @@ class InitValidateAPI(Resource): if tenant_count > 0: raise AlreadySetupError() - parser = reqparse.RequestParser() - parser.add_argument("password", type=StrLen(30), required=True, location="json") + parser = reqparse.RequestParser().add_argument("password", type=StrLen(30), required=True, location="json") input_password = parser.parse_args()["password"] if input_password != os.environ.get("INIT_PASSWORD"): diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index 7aaf807fb0..96c86dc0db 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -1,8 +1,6 @@ import urllib.parse -from typing import cast import httpx -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse import services @@ -16,7 +14,7 @@ from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields -from models.account import Account +from libs.login import current_account_with_tenant from services.file_service import FileService from . import console_ns @@ -42,8 +40,7 @@ class RemoteFileInfoApi(Resource): class RemoteFileUploadApi(Resource): @marshal_with(file_fields_with_signed_url) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("url", type=str, required=True, help="URL is required") + parser = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required") args = parser.parse_args() url = args["url"] @@ -65,7 +62,7 @@ class RemoteFileUploadApi(Resource): content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content try: - user = cast(Account, current_user) + user, _ = current_account_with_tenant() upload_file = FileService(db.engine).upload_file( filename=file_info.filename, content=content, diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index bff5fc1651..1200349e2d 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -69,15 +69,22 @@ class SetupApi(Resource): if not get_init_validate_status(): raise NotInitValidateError() - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("name", type=StrLen(30), required=True, location="json") - parser.add_argument("password", type=valid_password, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("name", type=StrLen(30), required=True, location="json") + .add_argument("password", type=valid_password, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() # setup RegisterService.setup( - email=args["email"], name=args["name"], password=args["password"], ip_address=extract_remote_ip(request) + email=args["email"], + name=args["name"], + password=args["password"], + ip_address=extract_remote_ip(request), + language=args["language"], ) return {"result": "success"}, 201 diff --git a/api/controllers/console/tag/tags.py b/api/controllers/console/tag/tags.py index da236ee5af..40ae7fb4d0 100644 --- a/api/controllers/console/tag/tags.py +++ b/api/controllers/console/tag/tags.py @@ -1,12 +1,11 @@ from flask import request -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from fields.tag_fields import dataset_tag_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.model import Tag from services.tag_service import TagService @@ -17,15 +16,17 @@ def _validate_name(name): return name +@console_ns.route("/tags") class TagListApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(dataset_tag_fields) def get(self): + _, current_tenant_id = current_account_with_tenant() tag_type = request.args.get("type", type=str, default="") keyword = request.args.get("keyword", default=None, type=str) - tags = TagService.get_tags(tag_type, current_user.current_tenant_id, keyword) + tags = TagService.get_tags(tag_type, current_tenant_id, keyword) return tags, 200 @@ -33,16 +34,23 @@ class TagListApi(Resource): @login_required @account_initialization_required def post(self): + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_editor): + if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name - ) - parser.add_argument( - "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 50 characters.", + type=_validate_name, + ) + .add_argument( + "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + ) ) args = parser.parse_args() tag = TagService.save_tags(args) @@ -52,18 +60,19 @@ class TagListApi(Resource): return response, 200 +@console_ns.route("/tags/") class TagUpdateDeleteApi(Resource): @setup_required @login_required @account_initialization_required def patch(self, tag_id): + current_user, _ = current_account_with_tenant() tag_id = str(tag_id) # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_editor): + if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name ) args = parser.parse_args() @@ -79,9 +88,10 @@ class TagUpdateDeleteApi(Resource): @login_required @account_initialization_required def delete(self, tag_id): + current_user, _ = current_account_with_tenant() tag_id = str(tag_id) # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: + if not current_user.has_edit_permission: raise Forbidden() TagService.delete_tag(tag_id) @@ -89,24 +99,28 @@ class TagUpdateDeleteApi(Resource): return 204 +@console_ns.route("/tag-bindings/create") class TagBindingCreateApi(Resource): @setup_required @login_required @account_initialization_required def post(self): + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator - if not (current_user.is_editor or current_user.is_dataset_editor): + if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required." - ) - parser.add_argument( - "target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required." - ) - parser.add_argument( - "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + parser = ( + reqparse.RequestParser() + .add_argument( + "tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required." + ) + .add_argument( + "target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required." + ) + .add_argument( + "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + ) ) args = parser.parse_args() TagService.save_tag_binding(args) @@ -114,28 +128,26 @@ class TagBindingCreateApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/tag-bindings/remove") class TagBindingDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self): + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator - if not (current_user.is_editor or current_user.is_dataset_editor): + if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") - parser.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") - parser.add_argument( - "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + parser = ( + reqparse.RequestParser() + .add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") + .add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") + .add_argument( + "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + ) ) args = parser.parse_args() TagService.delete_tag_binding(args) return {"result": "success"}, 200 - - -api.add_resource(TagListApi, "/tags") -api.add_resource(TagUpdateDeleteApi, "/tags/") -api.add_resource(TagBindingCreateApi, "/tag-bindings/create") -api.add_resource(TagBindingDeleteApi, "/tag-bindings/remove") diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index 965a520f70..417486f59e 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -37,8 +37,7 @@ class VersionApi(Resource): ) def get(self): """Check for application version updates""" - parser = reqparse.RequestParser() - parser.add_argument("current_version", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("current_version", type=str, required=True, location="args") args = parser.parse_args() check_update_url = dify_config.CHECK_UPDATE_URL diff --git a/api/controllers/console/workspace/__init__.py b/api/controllers/console/workspace/__init__.py index 4a048f3c5e..876e2301f2 100644 --- a/api/controllers/console/workspace/__init__.py +++ b/api/controllers/console/workspace/__init__.py @@ -2,11 +2,11 @@ from collections.abc import Callable from functools import wraps from typing import ParamSpec, TypeVar -from flask_login import current_user from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden from extensions.ext_database import db +from libs.login import current_account_with_tenant from models.account import TenantPluginPermission P = ParamSpec("P") @@ -20,8 +20,9 @@ def plugin_permission_required( def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): + current_user, current_tenant_id = current_account_with_tenant() user = current_user - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id with Session(db.engine) as session: permission = ( diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 7a41a8a5cc..499a52370f 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -2,14 +2,13 @@ from datetime import datetime import pytz from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session from configs import dify_config from constants.languages import supported_language -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ( EmailAlreadyInUseError, EmailChangeLimitError, @@ -37,21 +36,19 @@ from extensions.ext_database import db from fields.member_fields import account_fields from libs.datetime_utils import naive_utc_now from libs.helper import TimestampField, email, extract_remote_ip, timezone -from libs.login import login_required -from models import AccountIntegrate, InvitationCode -from models.account import Account +from libs.login import current_account_with_tenant, login_required +from models import Account, AccountIntegrate, InvitationCode from services.account_service import AccountService from services.billing_service import BillingService from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError +@console_ns.route("/account/init") class AccountInitApi(Resource): @setup_required @login_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() if account.status == "active": raise AccountAlreadyInitedError() @@ -60,9 +57,9 @@ class AccountInitApi(Resource): if dify_config.EDITION == "CLOUD": parser.add_argument("invitation_code", type=str, location="json") - - parser.add_argument("interface_language", type=supported_language, required=True, location="json") - parser.add_argument("timezone", type=timezone, required=True, location="json") + parser.add_argument("interface_language", type=supported_language, required=True, location="json").add_argument( + "timezone", type=timezone, required=True, location="json" + ) args = parser.parse_args() if dify_config.EDITION == "CLOUD": @@ -97,6 +94,7 @@ class AccountInitApi(Resource): return {"result": "success"} +@console_ns.route("/account/profile") class AccountProfileApi(Resource): @setup_required @login_required @@ -104,21 +102,19 @@ class AccountProfileApi(Resource): @marshal_with(account_fields) @enterprise_license_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() return current_user +@console_ns.route("/account/name") class AccountNameApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json") args = parser.parse_args() # Validate account name length @@ -130,16 +126,15 @@ class AccountNameApi(Resource): return updated_account +@console_ns.route("/account/avatar") class AccountAvatarApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("avatar", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("avatar", type=str, required=True, location="json") args = parser.parse_args() updated_account = AccountService.update_account(current_user, avatar=args["avatar"]) @@ -147,16 +142,17 @@ class AccountAvatarApi(Resource): return updated_account +@console_ns.route("/account/interface-language") class AccountInterfaceLanguageApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("interface_language", type=supported_language, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument( + "interface_language", type=supported_language, required=True, location="json" + ) args = parser.parse_args() updated_account = AccountService.update_account(current_user, interface_language=args["interface_language"]) @@ -164,16 +160,17 @@ class AccountInterfaceLanguageApi(Resource): return updated_account +@console_ns.route("/account/interface-theme") class AccountInterfaceThemeApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("interface_theme", type=str, choices=["light", "dark"], required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument( + "interface_theme", type=str, choices=["light", "dark"], required=True, location="json" + ) args = parser.parse_args() updated_account = AccountService.update_account(current_user, interface_theme=args["interface_theme"]) @@ -181,16 +178,15 @@ class AccountInterfaceThemeApi(Resource): return updated_account +@console_ns.route("/account/timezone") class AccountTimezoneApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("timezone", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("timezone", type=str, required=True, location="json") args = parser.parse_args() # Validate timezone string, e.g. America/New_York, Asia/Shanghai @@ -202,18 +198,20 @@ class AccountTimezoneApi(Resource): return updated_account +@console_ns.route("/account/password") class AccountPasswordApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("password", type=str, required=False, location="json") - parser.add_argument("new_password", type=str, required=True, location="json") - parser.add_argument("repeat_new_password", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("password", type=str, required=False, location="json") + .add_argument("new_password", type=str, required=True, location="json") + .add_argument("repeat_new_password", type=str, required=True, location="json") + ) args = parser.parse_args() if args["new_password"] != args["repeat_new_password"]: @@ -227,6 +225,7 @@ class AccountPasswordApi(Resource): return {"result": "success"} +@console_ns.route("/account/integrates") class AccountIntegrateApi(Resource): integrate_fields = { "provider": fields.String, @@ -244,9 +243,7 @@ class AccountIntegrateApi(Resource): @account_initialization_required @marshal_with(integrate_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() account_integrates = db.session.scalars( select(AccountIntegrate).where(AccountIntegrate.account_id == account.id) @@ -283,14 +280,13 @@ class AccountIntegrateApi(Resource): return {"data": integrate_data} +@console_ns.route("/account/delete/verify") class AccountDeleteVerifyApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() token, code = AccountService.generate_account_deletion_verification_code(account) AccountService.send_account_deletion_verification_email(account, code) @@ -298,18 +294,19 @@ class AccountDeleteVerifyApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/account/delete") class AccountDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + ) args = parser.parse_args() if not AccountService.verify_account_deletion_code(args["token"], args["code"]): @@ -320,12 +317,15 @@ class AccountDeleteApi(Resource): return {"result": "success"} +@console_ns.route("/account/delete/feedback") class AccountDeleteUpdateFeedbackApi(Resource): @setup_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("feedback", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("feedback", type=str, required=True, location="json") + ) args = parser.parse_args() BillingService.update_account_deletion_feedback(args["email"], args["feedback"]) @@ -333,6 +333,7 @@ class AccountDeleteUpdateFeedbackApi(Resource): return {"result": "success"} +@console_ns.route("/account/education/verify") class EducationVerifyApi(Resource): verify_fields = { "token": fields.String, @@ -345,13 +346,12 @@ class EducationVerifyApi(Resource): @cloud_edition_billing_enabled @marshal_with(verify_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() return BillingService.EducationIdentity.verify(account.id, account.email) +@console_ns.route("/account/education") class EducationApi(Resource): status_fields = { "result": fields.Boolean, @@ -366,14 +366,14 @@ class EducationApi(Resource): @only_edition_cloud @cloud_edition_billing_enabled def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, location="json") - parser.add_argument("institution", type=str, required=True, location="json") - parser.add_argument("role", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, location="json") + .add_argument("institution", type=str, required=True, location="json") + .add_argument("role", type=str, required=True, location="json") + ) args = parser.parse_args() return BillingService.EducationIdentity.activate(account, args["token"], args["institution"], args["role"]) @@ -385,9 +385,7 @@ class EducationApi(Resource): @cloud_edition_billing_enabled @marshal_with(status_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() res = BillingService.EducationIdentity.status(account.id) # convert expire_at to UTC timestamp from isoformat @@ -396,6 +394,7 @@ class EducationApi(Resource): return res +@console_ns.route("/account/education/autocomplete") class EducationAutoCompleteApi(Resource): data_fields = { "data": fields.List(fields.String), @@ -410,26 +409,32 @@ class EducationAutoCompleteApi(Resource): @cloud_edition_billing_enabled @marshal_with(data_fields) def get(self): - parser = reqparse.RequestParser() - parser.add_argument("keywords", type=str, required=True, location="args") - parser.add_argument("page", type=int, required=False, location="args", default=0) - parser.add_argument("limit", type=int, required=False, location="args", default=20) + parser = ( + reqparse.RequestParser() + .add_argument("keywords", type=str, required=True, location="args") + .add_argument("page", type=int, required=False, location="args", default=0) + .add_argument("limit", type=int, required=False, location="args", default=20) + ) args = parser.parse_args() return BillingService.EducationIdentity.autocomplete(args["keywords"], args["page"], args["limit"]) +@console_ns.route("/account/change-email") class ChangeEmailSendEmailApi(Resource): @enable_change_email @setup_required @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") - parser.add_argument("phase", type=str, required=False, location="json") - parser.add_argument("token", type=str, required=False, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + .add_argument("phase", type=str, required=False, location="json") + .add_argument("token", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -451,8 +456,6 @@ class ChangeEmailSendEmailApi(Resource): raise InvalidTokenError() user_email = reset_data.get("email", "") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") if user_email != current_user.email: raise InvalidEmailError() else: @@ -467,16 +470,19 @@ class ChangeEmailSendEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/account/change-email/validity") class ChangeEmailCheckApi(Resource): @enable_change_email @setup_required @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -508,6 +514,7 @@ class ChangeEmailCheckApi(Resource): return {"is_valid": True, "email": token_data.get("email"), "token": new_token} +@console_ns.route("/account/change-email/reset") class ChangeEmailResetApi(Resource): @enable_change_email @setup_required @@ -515,9 +522,11 @@ class ChangeEmailResetApi(Resource): @account_initialization_required @marshal_with(account_fields) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("new_email", type=email, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("new_email", type=email, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() if AccountService.is_account_in_freeze(args["new_email"]): @@ -533,8 +542,7 @@ class ChangeEmailResetApi(Resource): AccountService.revoke_change_email_token(args["token"]) old_email = reset_data.get("old_email", "") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if current_user.email != old_email: raise AccountNotFound() @@ -547,39 +555,14 @@ class ChangeEmailResetApi(Resource): return updated_account +@console_ns.route("/account/change-email/check-email-unique") class CheckEmailUnique(Resource): @setup_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") + parser = reqparse.RequestParser().add_argument("email", type=email, required=True, location="json") args = parser.parse_args() if AccountService.is_account_in_freeze(args["email"]): raise AccountInFreezeError() if not AccountService.check_email_unique(args["email"]): raise EmailAlreadyInUseError() return {"result": "success"} - - -# Register API resources -api.add_resource(AccountInitApi, "/account/init") -api.add_resource(AccountProfileApi, "/account/profile") -api.add_resource(AccountNameApi, "/account/name") -api.add_resource(AccountAvatarApi, "/account/avatar") -api.add_resource(AccountInterfaceLanguageApi, "/account/interface-language") -api.add_resource(AccountInterfaceThemeApi, "/account/interface-theme") -api.add_resource(AccountTimezoneApi, "/account/timezone") -api.add_resource(AccountPasswordApi, "/account/password") -api.add_resource(AccountIntegrateApi, "/account/integrates") -api.add_resource(AccountDeleteVerifyApi, "/account/delete/verify") -api.add_resource(AccountDeleteApi, "/account/delete") -api.add_resource(AccountDeleteUpdateFeedbackApi, "/account/delete/feedback") -api.add_resource(EducationVerifyApi, "/account/education/verify") -api.add_resource(EducationApi, "/account/education") -api.add_resource(EducationAutoCompleteApi, "/account/education/autocomplete") -# Change email -api.add_resource(ChangeEmailSendEmailApi, "/account/change-email") -api.add_resource(ChangeEmailCheckApi, "/account/change-email/validity") -api.add_resource(ChangeEmailResetApi, "/account/change-email/reset") -api.add_resource(CheckEmailUnique, "/account/change-email/check-email-unique") -# api.add_resource(AccountEmailApi, '/account/email') -# api.add_resource(AccountEmailVerifyApi, '/account/email-verify') diff --git a/api/controllers/console/workspace/agent_providers.py b/api/controllers/console/workspace/agent_providers.py index 0a2c8fcfb4..0a8f49d2e5 100644 --- a/api/controllers/console/workspace/agent_providers.py +++ b/api/controllers/console/workspace/agent_providers.py @@ -1,10 +1,9 @@ -from flask_login import current_user from flask_restx import Resource, fields from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.agent_service import AgentService @@ -21,10 +20,11 @@ class AgentProviderListApi(Resource): @login_required @account_initialization_required def get(self): + current_user, current_tenant_id = current_account_with_tenant() user = current_user user_id = user.id - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id return jsonable_encoder(AgentService.list_agent_providers(user_id, tenant_id)) @@ -43,7 +43,5 @@ class AgentProviderApi(Resource): @login_required @account_initialization_required def get(self, provider_name: str): - user = current_user - user_id = user.id - tenant_id = user.current_tenant_id - return jsonable_encoder(AgentService.get_agent_provider(user_id, tenant_id, provider_name)) + current_user, current_tenant_id = current_account_with_tenant() + return jsonable_encoder(AgentService.get_agent_provider(current_user.id, current_tenant_id, provider_name)) diff --git a/api/controllers/console/workspace/endpoint.py b/api/controllers/console/workspace/endpoint.py index 0657b764cc..d115f62d73 100644 --- a/api/controllers/console/workspace/endpoint.py +++ b/api/controllers/console/workspace/endpoint.py @@ -1,4 +1,3 @@ -from flask_login import current_user from flask_restx import Resource, fields, reqparse from werkzeug.exceptions import Forbidden @@ -6,7 +5,7 @@ from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginPermissionDeniedError -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.plugin.endpoint_service import EndpointService @@ -34,14 +33,16 @@ class EndpointCreateApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifier", type=str, required=True) - parser.add_argument("settings", type=dict, required=True) - parser.add_argument("name", type=str, required=True) + parser = ( + reqparse.RequestParser() + .add_argument("plugin_unique_identifier", type=str, required=True) + .add_argument("settings", type=dict, required=True) + .add_argument("name", type=str, required=True) + ) args = parser.parse_args() plugin_unique_identifier = args["plugin_unique_identifier"] @@ -51,7 +52,7 @@ class EndpointCreateApi(Resource): try: return { "success": EndpointService.create_endpoint( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, user_id=user.id, plugin_unique_identifier=plugin_unique_identifier, name=name, @@ -80,11 +81,13 @@ class EndpointListApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, required=True, location="args") - parser.add_argument("page_size", type=int, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, required=True, location="args") + .add_argument("page_size", type=int, required=True, location="args") + ) args = parser.parse_args() page = args["page"] @@ -93,7 +96,7 @@ class EndpointListApi(Resource): return jsonable_encoder( { "endpoints": EndpointService.list_endpoints( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, user_id=user.id, page=page, page_size=page_size, @@ -123,12 +126,14 @@ class EndpointListForSinglePluginApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, required=True, location="args") - parser.add_argument("page_size", type=int, required=True, location="args") - parser.add_argument("plugin_id", type=str, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, required=True, location="args") + .add_argument("page_size", type=int, required=True, location="args") + .add_argument("plugin_id", type=str, required=True, location="args") + ) args = parser.parse_args() page = args["page"] @@ -138,7 +143,7 @@ class EndpointListForSinglePluginApi(Resource): return jsonable_encoder( { "endpoints": EndpointService.list_endpoints_for_single_plugin( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, user_id=user.id, plugin_id=plugin_id, page=page, @@ -165,10 +170,9 @@ class EndpointDeleteApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("endpoint_id", type=str, required=True) + parser = reqparse.RequestParser().add_argument("endpoint_id", type=str, required=True) args = parser.parse_args() if not user.is_admin_or_owner: @@ -177,9 +181,7 @@ class EndpointDeleteApi(Resource): endpoint_id = args["endpoint_id"] return { - "success": EndpointService.delete_endpoint( - tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id - ) + "success": EndpointService.delete_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id) } @@ -207,12 +209,14 @@ class EndpointUpdateApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("endpoint_id", type=str, required=True) - parser.add_argument("settings", type=dict, required=True) - parser.add_argument("name", type=str, required=True) + parser = ( + reqparse.RequestParser() + .add_argument("endpoint_id", type=str, required=True) + .add_argument("settings", type=dict, required=True) + .add_argument("name", type=str, required=True) + ) args = parser.parse_args() endpoint_id = args["endpoint_id"] @@ -224,7 +228,7 @@ class EndpointUpdateApi(Resource): return { "success": EndpointService.update_endpoint( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id, name=name, @@ -250,10 +254,9 @@ class EndpointEnableApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("endpoint_id", type=str, required=True) + parser = reqparse.RequestParser().add_argument("endpoint_id", type=str, required=True) args = parser.parse_args() endpoint_id = args["endpoint_id"] @@ -262,9 +265,7 @@ class EndpointEnableApi(Resource): raise Forbidden() return { - "success": EndpointService.enable_endpoint( - tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id - ) + "success": EndpointService.enable_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id) } @@ -285,10 +286,9 @@ class EndpointDisableApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("endpoint_id", type=str, required=True) + parser = reqparse.RequestParser().add_argument("endpoint_id", type=str, required=True) args = parser.parse_args() endpoint_id = args["endpoint_id"] @@ -297,7 +297,5 @@ class EndpointDisableApi(Resource): raise Forbidden() return { - "success": EndpointService.disable_endpoint( - tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id - ) + "success": EndpointService.disable_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id) } diff --git a/api/controllers/console/workspace/load_balancing_config.py b/api/controllers/console/workspace/load_balancing_config.py index 7c1bc7c075..9bf393ea2e 100644 --- a/api/controllers/console/workspace/load_balancing_config.py +++ b/api/controllers/console/workspace/load_balancing_config.py @@ -1,38 +1,42 @@ from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError -from libs.login import current_user, login_required -from models.account import Account, TenantAccountRole +from libs.login import current_account_with_tenant, login_required +from models import TenantAccountRole from services.model_load_balancing_service import ModelLoadBalancingService +@console_ns.route( + "/workspaces/current/model-providers//models/load-balancing-configs/credentials-validate" +) class LoadBalancingCredentialsValidateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() - tenant_id = current_user.current_tenant_id - assert tenant_id is not None + tenant_id = current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() # validate model load balancing credentials @@ -61,29 +65,33 @@ class LoadBalancingCredentialsValidateApi(Resource): return response +@console_ns.route( + "/workspaces/current/model-providers//models/load-balancing-configs//credentials-validate" +) class LoadBalancingConfigCredentialsValidateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str, config_id: str): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() - tenant_id = current_user.current_tenant_id - assert tenant_id is not None + tenant_id = current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() # validate model load balancing config credentials @@ -111,15 +119,3 @@ class LoadBalancingConfigCredentialsValidateApi(Resource): response["error"] = error return response - - -# Load Balancing Config -api.add_resource( - LoadBalancingCredentialsValidateApi, - "/workspaces/current/model-providers//models/load-balancing-configs/credentials-validate", -) - -api.add_resource( - LoadBalancingConfigCredentialsValidateApi, - "/workspaces/current/model-providers//models/load-balancing-configs//credentials-validate", -) diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index 77f0c9a735..d66f861799 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -1,12 +1,11 @@ from urllib import parse from flask import abort, request -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse import services from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ( CannotTransferOwnerToSelfError, EmailCodeError, @@ -26,13 +25,14 @@ from controllers.console.wraps import ( from extensions.ext_database import db from fields.member_fields import account_with_role_list_fields from libs.helper import extract_remote_ip -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.account import Account, TenantAccountRole from services.account_service import AccountService, RegisterService, TenantService from services.errors.account import AccountAlreadyInTenantError from services.feature_service import FeatureService +@console_ns.route("/workspaces/current/members") class MemberListApi(Resource): """List all members of current tenant.""" @@ -41,14 +41,14 @@ class MemberListApi(Resource): @account_initialization_required @marshal_with(account_with_role_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") members = TenantService.get_tenant_members(current_user.current_tenant) return {"result": "success", "accounts": members}, 200 +@console_ns.route("/workspaces/current/members/invite-email") class MemberInviteEmailApi(Resource): """Invite a new member by email.""" @@ -57,10 +57,12 @@ class MemberInviteEmailApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("members") def post(self): - parser = reqparse.RequestParser() - parser.add_argument("emails", type=list, required=True, location="json") - parser.add_argument("role", type=str, required=True, default="admin", location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("emails", type=list, required=True, location="json") + .add_argument("role", type=str, required=True, default="admin", location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() invitee_emails = args["emails"] @@ -68,9 +70,7 @@ class MemberInviteEmailApi(Resource): interface_language = args["language"] if not TenantAccountRole.is_non_owner_role(invitee_role): return {"code": "invalid-role", "message": "Invalid role"}, 400 - - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() inviter = current_user if not inviter.current_tenant: raise ValueError("No current tenant") @@ -111,6 +111,7 @@ class MemberInviteEmailApi(Resource): }, 201 +@console_ns.route("/workspaces/current/members/") class MemberCancelInviteApi(Resource): """Cancel an invitation by member id.""" @@ -118,8 +119,7 @@ class MemberCancelInviteApi(Resource): @login_required @account_initialization_required def delete(self, member_id): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") member = db.session.query(Account).where(Account.id == str(member_id)).first() @@ -143,6 +143,7 @@ class MemberCancelInviteApi(Resource): }, 200 +@console_ns.route("/workspaces/current/members//update-role") class MemberUpdateRoleApi(Resource): """Update member role.""" @@ -150,16 +151,13 @@ class MemberUpdateRoleApi(Resource): @login_required @account_initialization_required def put(self, member_id): - parser = reqparse.RequestParser() - parser.add_argument("role", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("role", type=str, required=True, location="json") args = parser.parse_args() new_role = args["role"] if not TenantAccountRole.is_valid_role(new_role): return {"code": "invalid-role", "message": "Invalid role"}, 400 - - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") member = db.session.get(Account, str(member_id)) @@ -177,6 +175,7 @@ class MemberUpdateRoleApi(Resource): return {"result": "success"} +@console_ns.route("/workspaces/current/dataset-operators") class DatasetOperatorMemberListApi(Resource): """List all members of current tenant.""" @@ -185,14 +184,14 @@ class DatasetOperatorMemberListApi(Resource): @account_initialization_required @marshal_with(account_with_role_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") members = TenantService.get_dataset_operator_members(current_user.current_tenant) return {"result": "success", "accounts": members}, 200 +@console_ns.route("/workspaces/current/members/send-owner-transfer-confirm-email") class SendOwnerTransferEmailApi(Resource): """Send owner transfer email.""" @@ -201,16 +200,13 @@ class SendOwnerTransferEmailApi(Resource): @account_initialization_required @is_allow_transfer_owner def post(self): - parser = reqparse.RequestParser() - parser.add_argument("language", type=str, required=False, location="json") + parser = reqparse.RequestParser().add_argument("language", type=str, required=False, location="json") args = parser.parse_args() ip_address = extract_remote_ip(request) if AccountService.is_email_send_ip_limit(ip_address): raise EmailSendIpLimitError() - + current_user, _ = current_account_with_tenant() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): @@ -233,19 +229,21 @@ class SendOwnerTransferEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/workspaces/current/members/owner-transfer-check") class OwnerTransferCheckApi(Resource): @setup_required @login_required @account_initialization_required @is_allow_transfer_owner def post(self): - parser = reqparse.RequestParser() - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): @@ -278,19 +276,20 @@ class OwnerTransferCheckApi(Resource): return {"is_valid": True, "email": token_data.get("email"), "token": new_token} +@console_ns.route("/workspaces/current/members//owner-transfer") class OwnerTransfer(Resource): @setup_required @login_required @account_initialization_required @is_allow_transfer_owner def post(self, member_id): - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "token", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): @@ -339,14 +338,3 @@ class OwnerTransfer(Resource): raise ValueError(str(e)) return {"result": "success"} - - -api.add_resource(MemberListApi, "/workspaces/current/members") -api.add_resource(MemberInviteEmailApi, "/workspaces/current/members/invite-email") -api.add_resource(MemberCancelInviteApi, "/workspaces/current/members/") -api.add_resource(MemberUpdateRoleApi, "/workspaces/current/members//update-role") -api.add_resource(DatasetOperatorMemberListApi, "/workspaces/current/dataset-operators") -# owner transfer -api.add_resource(SendOwnerTransferEmailApi, "/workspaces/current/members/send-owner-transfer-confirm-email") -api.add_resource(OwnerTransferCheckApi, "/workspaces/current/members/owner-transfer-check") -api.add_resource(OwnerTransfer, "/workspaces/current/members//owner-transfer") diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index 0c9db660aa..04db975fc2 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -1,35 +1,30 @@ import io from flask import send_file -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder from libs.helper import StrLen, uuid_value -from libs.login import login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService from services.model_provider_service import ModelProviderService +@console_ns.route("/workspaces/current/model-providers") class ModelProviderListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + tenant_id = current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "model_type", type=str, required=False, @@ -45,19 +40,18 @@ class ModelProviderListApi(Resource): return jsonable_encoder({"data": provider_list}) +@console_ns.route("/workspaces/current/model-providers//credentials") class ModelProviderCredentialApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + tenant_id = current_tenant_id # if credential_id is not provided, return current used credential - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") + parser = reqparse.RequestParser().add_argument( + "credential_id", type=uuid_value, required=False, nullable=True, location="args" + ) args = parser.parse_args() model_provider_service = ModelProviderService() @@ -71,23 +65,22 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + ) args = parser.parse_args() model_provider_service = ModelProviderService() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") try: model_provider_service.create_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credentials=args["credentials"], credential_name=args["name"], @@ -101,24 +94,23 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def put(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + ) args = parser.parse_args() model_provider_service = ModelProviderService() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") try: model_provider_service.update_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credentials=args["credentials"], credential_id=args["credential_id"], @@ -133,62 +125,58 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "credential_id", type=uuid_value, required=True, nullable=False, location="json" + ) args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") model_provider_service = ModelProviderService() model_provider_service.remove_provider_credential( - tenant_id=current_user.current_tenant_id, provider=provider, credential_id=args["credential_id"] + tenant_id=current_tenant_id, provider=provider, credential_id=args["credential_id"] ) return {"result": "success"}, 204 +@console_ns.route("/workspaces/current/model-providers//credentials/switch") class ModelProviderCredentialSwitchApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "credential_id", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") service = ModelProviderService() service.switch_active_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credential_id=args["credential_id"], ) return {"result": "success"} +@console_ns.route("/workspaces/current/model-providers//credentials/validate") class ModelProviderValidateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") + _, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument( + "credentials", type=dict, required=True, nullable=False, location="json" + ) args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + tenant_id = current_tenant_id model_provider_service = ModelProviderService() @@ -211,6 +199,7 @@ class ModelProviderValidateApi(Resource): return response +@console_ns.route("/workspaces//model-providers///") class ModelProviderIconApi(Resource): """ Get model provider icon @@ -229,22 +218,19 @@ class ModelProviderIconApi(Resource): return send_file(io.BytesIO(icon), mimetype=mimetype) +@console_ns.route("/workspaces/current/model-providers//preferred-provider-type") class PreferredProviderTypeUpdateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + tenant_id = current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "preferred_provider_type", type=str, required=True, @@ -262,6 +248,7 @@ class PreferredProviderTypeUpdateApi(Resource): return {"result": "success"} +@console_ns.route("/workspaces/current/model-providers//checkout-url") class ModelProviderPaymentCheckoutUrlApi(Resource): @setup_required @login_required @@ -269,33 +256,12 @@ class ModelProviderPaymentCheckoutUrlApi(Resource): def get(self, provider: str): if provider != "anthropic": raise ValueError(f"provider name {provider} is invalid") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() BillingService.is_tenant_owner_or_admin(current_user) - if not current_user.current_tenant_id: - raise ValueError("No current tenant") data = BillingService.get_model_provider_payment_link( provider_name=provider, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, account_id=current_user.id, prefilled_email=current_user.email, ) return data - - -api.add_resource(ModelProviderListApi, "/workspaces/current/model-providers") - -api.add_resource(ModelProviderCredentialApi, "/workspaces/current/model-providers//credentials") -api.add_resource( - ModelProviderCredentialSwitchApi, "/workspaces/current/model-providers//credentials/switch" -) -api.add_resource(ModelProviderValidateApi, "/workspaces/current/model-providers//credentials/validate") - -api.add_resource( - PreferredProviderTypeUpdateApi, "/workspaces/current/model-providers//preferred-provider-type" -) -api.add_resource(ModelProviderPaymentCheckoutUrlApi, "/workspaces/current/model-providers//checkout-url") -api.add_resource( - ModelProviderIconApi, - "/workspaces//model-providers///", -) diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index f174fcc5d3..5ab958d585 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -1,29 +1,30 @@ import logging -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder from libs.helper import StrLen, uuid_value -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.model_load_balancing_service import ModelLoadBalancingService from services.model_provider_service import ModelProviderService logger = logging.getLogger(__name__) +@console_ns.route("/workspaces/current/default-model") class DefaultModelApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - parser = reqparse.RequestParser() - parser.add_argument( + _, tenant_id = current_account_with_tenant() + + parser = reqparse.RequestParser().add_argument( "model_type", type=str, required=True, @@ -33,8 +34,6 @@ class DefaultModelApi(Resource): ) args = parser.parse_args() - tenant_id = current_user.current_tenant_id - model_provider_service = ModelProviderService() default_model_entity = model_provider_service.get_default_model_of_model_type( tenant_id=tenant_id, model_type=args["model_type"] @@ -46,15 +45,15 @@ class DefaultModelApi(Resource): @login_required @account_initialization_required def post(self): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model_settings", type=list, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "model_settings", type=list, required=True, nullable=False, location="json" + ) args = parser.parse_args() - - tenant_id = current_user.current_tenant_id - model_provider_service = ModelProviderService() model_settings = args["model_settings"] for model_setting in model_settings: @@ -85,12 +84,13 @@ class DefaultModelApi(Resource): return {"result": "success"} +@console_ns.route("/workspaces/current/model-providers//models") class ModelProviderModelApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() models = model_provider_service.get_models_by_provider(tenant_id=tenant_id, provider=provider) @@ -102,24 +102,26 @@ class ModelProviderModelApi(Resource): @account_initialization_required def post(self, provider: str): # To save the model's load balance configs + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id - - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("load_balancing", type=dict, required=False, nullable=True, location="json") + .add_argument("config_from", type=str, required=False, nullable=True, location="json") + .add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="json") ) - parser.add_argument("load_balancing", type=dict, required=False, nullable=True, location="json") - parser.add_argument("config_from", type=str, required=False, nullable=True, location="json") - parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="json") args = parser.parse_args() if args.get("config_from", "") == "custom-model": @@ -127,7 +129,7 @@ class ModelProviderModelApi(Resource): raise ValueError("credential_id is required when configuring a custom-model") service = ModelProviderService() service.switch_active_custom_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -162,20 +164,22 @@ class ModelProviderModelApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id - - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) ) args = parser.parse_args() @@ -187,25 +191,28 @@ class ModelProviderModelApi(Resource): return {"result": "success"}, 204 +@console_ns.route("/workspaces/current/model-providers//models/credentials") class ModelProviderModelCredentialApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="args") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="args", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="args") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="args", + ) + .add_argument("config_from", type=str, required=False, nullable=True, location="args") + .add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") ) - parser.add_argument("config_from", type=str, required=False, nullable=True, location="args") - parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") args = parser.parse_args() model_provider_service = ModelProviderService() @@ -254,24 +261,27 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def post(self, provider: str): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() - tenant_id = current_user.current_tenant_id model_provider_service = ModelProviderService() try: @@ -298,29 +308,33 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def put(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") ) - parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") args = parser.parse_args() model_provider_service = ModelProviderService() try: model_provider_service.update_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -337,24 +351,28 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") ) - parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") args = parser.parse_args() model_provider_service = ModelProviderService() model_provider_service.remove_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -364,29 +382,34 @@ class ModelProviderModelCredentialApi(Resource): return {"result": "success"}, 204 +@console_ns.route("/workspaces/current/model-providers//models/credentials/switch") class ModelProviderModelCredentialSwitchApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credential_id", type=str, required=True, nullable=False, location="json") ) - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() service = ModelProviderService() service.add_model_credential_to_model_list( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -395,22 +418,27 @@ class ModelProviderModelCredentialSwitchApi(Resource): return {"result": "success"} +@console_ns.route( + "/workspaces/current/model-providers//models/enable", endpoint="model-provider-model-enable" +) class ModelProviderModelEnableApi(Resource): @setup_required @login_required @account_initialization_required def patch(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) ) args = parser.parse_args() @@ -422,22 +450,27 @@ class ModelProviderModelEnableApi(Resource): return {"result": "success"} +@console_ns.route( + "/workspaces/current/model-providers//models/disable", endpoint="model-provider-model-disable" +) class ModelProviderModelDisableApi(Resource): @setup_required @login_required @account_initialization_required def patch(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) ) args = parser.parse_args() @@ -449,24 +482,27 @@ class ModelProviderModelDisableApi(Resource): return {"result": "success"} +@console_ns.route("/workspaces/current/model-providers//models/credentials/validate") class ModelProviderModelValidateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() model_provider_service = ModelProviderService() @@ -494,16 +530,17 @@ class ModelProviderModelValidateApi(Resource): return response +@console_ns.route("/workspaces/current/model-providers//models/parameter-rules") class ModelProviderModelParameterRuleApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider: str): - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument( + "model", type=str, required=True, nullable=False, location="args" + ) args = parser.parse_args() - - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() parameter_rules = model_provider_service.get_model_parameter_rules( @@ -513,43 +550,14 @@ class ModelProviderModelParameterRuleApi(Resource): return jsonable_encoder({"data": parameter_rules}) +@console_ns.route("/workspaces/current/models/model-types/") class ModelProviderAvailableModelApi(Resource): @setup_required @login_required @account_initialization_required def get(self, model_type): - tenant_id = current_user.current_tenant_id - + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() models = model_provider_service.get_models_by_model_type(tenant_id=tenant_id, model_type=model_type) return jsonable_encoder({"data": models}) - - -api.add_resource(ModelProviderModelApi, "/workspaces/current/model-providers//models") -api.add_resource( - ModelProviderModelEnableApi, - "/workspaces/current/model-providers//models/enable", - endpoint="model-provider-model-enable", -) -api.add_resource( - ModelProviderModelDisableApi, - "/workspaces/current/model-providers//models/disable", - endpoint="model-provider-model-disable", -) -api.add_resource( - ModelProviderModelCredentialApi, "/workspaces/current/model-providers//models/credentials" -) -api.add_resource( - ModelProviderModelCredentialSwitchApi, - "/workspaces/current/model-providers//models/credentials/switch", -) -api.add_resource( - ModelProviderModelValidateApi, "/workspaces/current/model-providers//models/credentials/validate" -) - -api.add_resource( - ModelProviderModelParameterRuleApi, "/workspaces/current/model-providers//models/parameter-rules" -) -api.add_resource(ModelProviderAvailableModelApi, "/workspaces/current/models/model-types/") -api.add_resource(DefaultModelApi, "/workspaces/current/default-model") diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index fd5421fa64..e8bc312caf 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -1,17 +1,16 @@ import io from flask import request, send_file -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.workspace import plugin_permission_required from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginDaemonClientSideError -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.account import TenantPluginAutoUpgradeStrategy, TenantPluginPermission from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService from services.plugin.plugin_parameter_service import PluginParameterService @@ -19,13 +18,14 @@ from services.plugin.plugin_permission_service import PluginPermissionService from services.plugin.plugin_service import PluginService +@console_ns.route("/workspaces/current/plugin/debugging-key") class PluginDebuggingKeyApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(debug_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return { @@ -37,15 +37,18 @@ class PluginDebuggingKeyApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/list") class PluginListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, required=False, location="args", default=1) - parser.add_argument("page_size", type=int, required=False, location="args", default=256) + _, tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, required=False, location="args", default=1) + .add_argument("page_size", type=int, required=False, location="args", default=256) + ) args = parser.parse_args() try: plugins_with_total = PluginService.list_with_total(tenant_id, args["page"], args["page_size"]) @@ -55,13 +58,13 @@ class PluginListApi(Resource): return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total}) +@console_ns.route("/workspaces/current/plugin/list/latest-versions") class PluginListLatestVersionsApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - req = reqparse.RequestParser() - req.add_argument("plugin_ids", type=list, required=True, location="json") + req = reqparse.RequestParser().add_argument("plugin_ids", type=list, required=True, location="json") args = req.parse_args() try: @@ -72,15 +75,15 @@ class PluginListLatestVersionsApi(Resource): return jsonable_encoder({"versions": versions}) +@console_ns.route("/workspaces/current/plugin/list/installations/ids") class PluginListInstallationsFromIdsApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_ids", type=list, required=True, location="json") + parser = reqparse.RequestParser().add_argument("plugin_ids", type=list, required=True, location="json") args = parser.parse_args() try: @@ -91,12 +94,15 @@ class PluginListInstallationsFromIdsApi(Resource): return jsonable_encoder({"plugins": plugins}) +@console_ns.route("/workspaces/current/plugin/icon") class PluginIconApi(Resource): @setup_required def get(self): - req = reqparse.RequestParser() - req.add_argument("tenant_id", type=str, required=True, location="args") - req.add_argument("filename", type=str, required=True, location="args") + req = ( + reqparse.RequestParser() + .add_argument("tenant_id", type=str, required=True, location="args") + .add_argument("filename", type=str, required=True, location="args") + ) args = req.parse_args() try: @@ -108,13 +114,14 @@ class PluginIconApi(Resource): return send_file(io.BytesIO(icon_bytes), mimetype=mimetype, max_age=icon_cache_max_age) +@console_ns.route("/workspaces/current/plugin/upload/pkg") class PluginUploadFromPkgApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() file = request.files["pkg"] @@ -131,18 +138,21 @@ class PluginUploadFromPkgApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/upload/github") class PluginUploadFromGithubApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("repo", type=str, required=True, location="json") - parser.add_argument("version", type=str, required=True, location="json") - parser.add_argument("package", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("repo", type=str, required=True, location="json") + .add_argument("version", type=str, required=True, location="json") + .add_argument("package", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -153,13 +163,14 @@ class PluginUploadFromGithubApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/upload/bundle") class PluginUploadFromBundleApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() file = request.files["bundle"] @@ -176,16 +187,18 @@ class PluginUploadFromBundleApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/install/pkg") class PluginInstallFromPkgApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifiers", type=list, required=True, location="json") + parser = reqparse.RequestParser().add_argument( + "plugin_unique_identifiers", type=list, required=True, location="json" + ) args = parser.parse_args() # check if all plugin_unique_identifiers are valid string @@ -201,19 +214,22 @@ class PluginInstallFromPkgApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/install/github") class PluginInstallFromGithubApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("repo", type=str, required=True, location="json") - parser.add_argument("version", type=str, required=True, location="json") - parser.add_argument("package", type=str, required=True, location="json") - parser.add_argument("plugin_unique_identifier", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("repo", type=str, required=True, location="json") + .add_argument("version", type=str, required=True, location="json") + .add_argument("package", type=str, required=True, location="json") + .add_argument("plugin_unique_identifier", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -230,16 +246,18 @@ class PluginInstallFromGithubApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/install/marketplace") class PluginInstallFromMarketplaceApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifiers", type=list, required=True, location="json") + parser = reqparse.RequestParser().add_argument( + "plugin_unique_identifiers", type=list, required=True, location="json" + ) args = parser.parse_args() # check if all plugin_unique_identifiers are valid string @@ -255,16 +273,18 @@ class PluginInstallFromMarketplaceApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/marketplace/pkg") class PluginFetchMarketplacePkgApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifier", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument( + "plugin_unique_identifier", type=str, required=True, location="args" + ) args = parser.parse_args() try: @@ -280,16 +300,18 @@ class PluginFetchMarketplacePkgApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/fetch-manifest") class PluginFetchManifestApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifier", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument( + "plugin_unique_identifier", type=str, required=True, location="args" + ) args = parser.parse_args() try: @@ -304,17 +326,20 @@ class PluginFetchManifestApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks") class PluginFetchInstallTasksApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, required=True, location="args") - parser.add_argument("page_size", type=int, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, required=True, location="args") + .add_argument("page_size", type=int, required=True, location="args") + ) args = parser.parse_args() try: @@ -325,13 +350,14 @@ class PluginFetchInstallTasksApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks/") class PluginFetchInstallTaskApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def get(self, task_id: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return jsonable_encoder({"task": PluginService.fetch_install_task(tenant_id, task_id)}) @@ -339,13 +365,14 @@ class PluginFetchInstallTaskApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks//delete") class PluginDeleteInstallTaskApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self, task_id: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_install_task(tenant_id, task_id)} @@ -353,13 +380,14 @@ class PluginDeleteInstallTaskApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks/delete_all") class PluginDeleteAllInstallTaskItemsApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_all_install_task_items(tenant_id)} @@ -367,13 +395,14 @@ class PluginDeleteAllInstallTaskItemsApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks//delete/") class PluginDeleteInstallTaskItemApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self, task_id: str, identifier: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_install_task_item(tenant_id, task_id, identifier)} @@ -381,17 +410,20 @@ class PluginDeleteInstallTaskItemApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/upgrade/marketplace") class PluginUpgradeFromMarketplaceApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") - parser.add_argument("new_plugin_unique_identifier", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") + .add_argument("new_plugin_unique_identifier", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -404,20 +436,23 @@ class PluginUpgradeFromMarketplaceApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/upgrade/github") class PluginUpgradeFromGithubApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") - parser.add_argument("new_plugin_unique_identifier", type=str, required=True, location="json") - parser.add_argument("repo", type=str, required=True, location="json") - parser.add_argument("version", type=str, required=True, location="json") - parser.add_argument("package", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") + .add_argument("new_plugin_unique_identifier", type=str, required=True, location="json") + .add_argument("repo", type=str, required=True, location="json") + .add_argument("version", type=str, required=True, location="json") + .add_argument("package", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -435,17 +470,17 @@ class PluginUpgradeFromGithubApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/uninstall") class PluginUninstallApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - req = reqparse.RequestParser() - req.add_argument("plugin_installation_id", type=str, required=True, location="json") + req = reqparse.RequestParser().add_argument("plugin_installation_id", type=str, required=True, location="json") args = req.parse_args() - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.uninstall(tenant_id, args["plugin_installation_id"])} @@ -453,34 +488,39 @@ class PluginUninstallApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/permission/change") class PluginChangePermissionApi(Resource): @setup_required @login_required @account_initialization_required def post(self): + current_user, current_tenant_id = current_account_with_tenant() user = current_user if not user.is_admin_or_owner: raise Forbidden() - req = reqparse.RequestParser() - req.add_argument("install_permission", type=str, required=True, location="json") - req.add_argument("debug_permission", type=str, required=True, location="json") + req = ( + reqparse.RequestParser() + .add_argument("install_permission", type=str, required=True, location="json") + .add_argument("debug_permission", type=str, required=True, location="json") + ) args = req.parse_args() install_permission = TenantPluginPermission.InstallPermission(args["install_permission"]) debug_permission = TenantPluginPermission.DebugPermission(args["debug_permission"]) - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id return {"success": PluginPermissionService.change_permission(tenant_id, install_permission, debug_permission)} +@console_ns.route("/workspaces/current/plugin/permission/fetch") class PluginFetchPermissionApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() permission = PluginPermissionService.get_permission(tenant_id) if not permission: @@ -499,24 +539,27 @@ class PluginFetchPermissionApi(Resource): ) +@console_ns.route("/workspaces/current/plugin/parameters/dynamic-options") class PluginFetchDynamicSelectOptionsApi(Resource): @setup_required @login_required @account_initialization_required def get(self): # check if the user is admin or owner + current_user, tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id user_id = current_user.id - parser = reqparse.RequestParser() - parser.add_argument("plugin_id", type=str, required=True, location="args") - parser.add_argument("provider", type=str, required=True, location="args") - parser.add_argument("action", type=str, required=True, location="args") - parser.add_argument("parameter", type=str, required=True, location="args") - parser.add_argument("provider_type", type=str, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("plugin_id", type=str, required=True, location="args") + .add_argument("provider", type=str, required=True, location="args") + .add_argument("action", type=str, required=True, location="args") + .add_argument("parameter", type=str, required=True, location="args") + .add_argument("provider_type", type=str, required=True, location="args") + ) args = parser.parse_args() try: @@ -535,22 +578,23 @@ class PluginFetchDynamicSelectOptionsApi(Resource): return jsonable_encoder({"options": options}) +@console_ns.route("/workspaces/current/plugin/preferences/change") class PluginChangePreferencesApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - req = reqparse.RequestParser() - req.add_argument("permission", type=dict, required=True, location="json") - req.add_argument("auto_upgrade", type=dict, required=True, location="json") + req = ( + reqparse.RequestParser() + .add_argument("permission", type=dict, required=True, location="json") + .add_argument("auto_upgrade", type=dict, required=True, location="json") + ) args = req.parse_args() - tenant_id = user.current_tenant_id - permission = args["permission"] install_permission = TenantPluginPermission.InstallPermission(permission.get("install_permission", "everyone")) @@ -590,12 +634,13 @@ class PluginChangePreferencesApi(Resource): return jsonable_encoder({"success": True}) +@console_ns.route("/workspaces/current/plugin/preferences/fetch") class PluginFetchPreferencesApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() permission = PluginPermissionService.get_permission(tenant_id) permission_dict = { @@ -628,48 +673,16 @@ class PluginFetchPreferencesApi(Resource): return jsonable_encoder({"permission": permission_dict, "auto_upgrade": auto_upgrade_dict}) +@console_ns.route("/workspaces/current/plugin/preferences/autoupgrade/exclude") class PluginAutoUpgradeExcludePluginApi(Resource): @setup_required @login_required @account_initialization_required def post(self): # exclude one single plugin - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - req = reqparse.RequestParser() - req.add_argument("plugin_id", type=str, required=True, location="json") + req = reqparse.RequestParser().add_argument("plugin_id", type=str, required=True, location="json") args = req.parse_args() return jsonable_encoder({"success": PluginAutoUpgradeService.exclude_plugin(tenant_id, args["plugin_id"])}) - - -api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key") -api.add_resource(PluginListApi, "/workspaces/current/plugin/list") -api.add_resource(PluginListLatestVersionsApi, "/workspaces/current/plugin/list/latest-versions") -api.add_resource(PluginListInstallationsFromIdsApi, "/workspaces/current/plugin/list/installations/ids") -api.add_resource(PluginIconApi, "/workspaces/current/plugin/icon") -api.add_resource(PluginUploadFromPkgApi, "/workspaces/current/plugin/upload/pkg") -api.add_resource(PluginUploadFromGithubApi, "/workspaces/current/plugin/upload/github") -api.add_resource(PluginUploadFromBundleApi, "/workspaces/current/plugin/upload/bundle") -api.add_resource(PluginInstallFromPkgApi, "/workspaces/current/plugin/install/pkg") -api.add_resource(PluginInstallFromGithubApi, "/workspaces/current/plugin/install/github") -api.add_resource(PluginUpgradeFromMarketplaceApi, "/workspaces/current/plugin/upgrade/marketplace") -api.add_resource(PluginUpgradeFromGithubApi, "/workspaces/current/plugin/upgrade/github") -api.add_resource(PluginInstallFromMarketplaceApi, "/workspaces/current/plugin/install/marketplace") -api.add_resource(PluginFetchManifestApi, "/workspaces/current/plugin/fetch-manifest") -api.add_resource(PluginFetchInstallTasksApi, "/workspaces/current/plugin/tasks") -api.add_resource(PluginFetchInstallTaskApi, "/workspaces/current/plugin/tasks/") -api.add_resource(PluginDeleteInstallTaskApi, "/workspaces/current/plugin/tasks//delete") -api.add_resource(PluginDeleteAllInstallTaskItemsApi, "/workspaces/current/plugin/tasks/delete_all") -api.add_resource(PluginDeleteInstallTaskItemApi, "/workspaces/current/plugin/tasks//delete/") -api.add_resource(PluginUninstallApi, "/workspaces/current/plugin/uninstall") -api.add_resource(PluginFetchMarketplacePkgApi, "/workspaces/current/plugin/marketplace/pkg") - -api.add_resource(PluginChangePermissionApi, "/workspaces/current/plugin/permission/change") -api.add_resource(PluginFetchPermissionApi, "/workspaces/current/plugin/permission/fetch") - -api.add_resource(PluginFetchDynamicSelectOptionsApi, "/workspaces/current/plugin/parameters/dynamic-options") - -api.add_resource(PluginFetchPreferencesApi, "/workspaces/current/plugin/preferences/fetch") -api.add_resource(PluginChangePreferencesApi, "/workspaces/current/plugin/preferences/change") -api.add_resource(PluginAutoUpgradeExcludePluginApi, "/workspaces/current/plugin/preferences/autoupgrade/exclude") diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 8693d99e23..870ad87c6c 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -2,34 +2,35 @@ import io from urllib.parse import urlparse from flask import make_response, redirect, request, send_file -from flask_login import current_user from flask_restx import ( Resource, reqparse, ) +from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import ( account_initialization_required, enterprise_license_required, setup_required, ) +from core.entities.mcp_provider import MCPAuthentication, MCPConfiguration from core.mcp.auth.auth_flow import auth, handle_callback -from core.mcp.auth.auth_provider import OAuthClientProvider -from core.mcp.error import MCPAuthError, MCPError +from core.mcp.error import MCPAuthError, MCPError, MCPRefreshTokenError from core.mcp.mcp_client import MCPClient from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.oauth import OAuthHandler from core.tools.entities.tool_entities import CredentialType +from extensions.ext_database import db from libs.helper import StrLen, alphanumeric, uuid_value -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.provider_ids import ToolProviderID from services.plugin.oauth_service import OAuthProxyService from services.tools.api_tools_manage_service import ApiToolManageService from services.tools.builtin_tools_manage_service import BuiltinToolManageService -from services.tools.mcp_tools_manage_service import MCPToolManageService +from services.tools.mcp_tools_manage_service import MCPToolManageService, OAuthDataType from services.tools.tool_labels_service import ToolLabelsService from services.tools.tools_manage_service import ToolCommonService from services.tools.tools_transform_service import ToolTransformService @@ -43,22 +44,23 @@ def is_valid_url(url: str) -> bool: try: parsed = urlparse(url) return all([parsed.scheme, parsed.netloc]) and parsed.scheme in ["http", "https"] - except Exception: + except (ValueError, TypeError): + # ValueError: Invalid URL format + # TypeError: url is not a string return False +@console_ns.route("/workspaces/current/tool-providers") class ToolProviderListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - req = reqparse.RequestParser() - req.add_argument( + req = reqparse.RequestParser().add_argument( "type", type=str, choices=["builtin", "model", "api", "workflow", "mcp"], @@ -71,14 +73,13 @@ class ToolProviderListApi(Resource): return ToolCommonService.list_tool_providers(user_id, tenant_id, args.get("type", None)) +@console_ns.route("/workspaces/current/tool-provider/builtin//tools") class ToolBuiltinProviderListToolsApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - user = current_user - - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.list_builtin_tool_provider_tools( @@ -88,30 +89,30 @@ class ToolBuiltinProviderListToolsApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//info") class ToolBuiltinProviderInfoApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - user = current_user - - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(tenant_id, provider)) +@console_ns.route("/workspaces/current/tool-provider/builtin//delete") class ToolBuiltinProviderDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - tenant_id = user.current_tenant_id - req = reqparse.RequestParser() - req.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + req = reqparse.RequestParser().add_argument( + "credential_id", type=str, required=True, nullable=False, location="json" + ) args = req.parse_args() return BuiltinToolManageService.delete_builtin_tool_provider( @@ -121,20 +122,22 @@ class ToolBuiltinProviderDeleteApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//add") class ToolBuiltinProviderAddApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=False, location="json") - parser.add_argument("type", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=False, location="json") + .add_argument("type", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() if args["type"] not in CredentialType.values(): @@ -150,23 +153,25 @@ class ToolBuiltinProviderAddApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//update") class ToolBuiltinProviderUpdateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credential_id", type=str, required=True, nullable=False, location="json") + .add_argument("credentials", type=dict, required=False, nullable=True, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + ) args = parser.parse_args() @@ -181,12 +186,13 @@ class ToolBuiltinProviderUpdateApi(Resource): return result +@console_ns.route("/workspaces/current/tool-provider/builtin//credentials") class ToolBuiltinProviderGetCredentialsApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_credentials( @@ -196,6 +202,7 @@ class ToolBuiltinProviderGetCredentialsApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//icon") class ToolBuiltinProviderIconApi(Resource): @setup_required def get(self, provider): @@ -204,28 +211,30 @@ class ToolBuiltinProviderIconApi(Resource): return send_file(io.BytesIO(icon_bytes), mimetype=mimetype, max_age=icon_cache_max_age) +@console_ns.route("/workspaces/current/tool-provider/api/add") class ToolApiProviderAddApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("schema_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("schema", type=str, required=True, nullable=False, location="json") - parser.add_argument("provider", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon", type=dict, required=True, nullable=False, location="json") - parser.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json") - parser.add_argument("labels", type=list[str], required=False, nullable=True, location="json", default=[]) - parser.add_argument("custom_disclaimer", type=str, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("schema_type", type=str, required=True, nullable=False, location="json") + .add_argument("schema", type=str, required=True, nullable=False, location="json") + .add_argument("provider", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=dict, required=True, nullable=False, location="json") + .add_argument("privacy_policy", type=str, required=False, nullable=True, location="json") + .add_argument("labels", type=list[str], required=False, nullable=True, location="json", default=[]) + .add_argument("custom_disclaimer", type=str, required=False, nullable=True, location="json") + ) args = parser.parse_args() @@ -243,19 +252,17 @@ class ToolApiProviderAddApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/remote") class ToolApiProviderGetRemoteSchemaApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - - parser.add_argument("url", type=str, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument("url", type=str, required=True, nullable=False, location="args") args = parser.parse_args() @@ -266,19 +273,19 @@ class ToolApiProviderGetRemoteSchemaApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/tools") class ToolApiProviderListToolsApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - - parser.add_argument("provider", type=str, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument( + "provider", type=str, required=True, nullable=False, location="args" + ) args = parser.parse_args() @@ -291,29 +298,31 @@ class ToolApiProviderListToolsApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/update") class ToolApiProviderUpdateApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("schema_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("schema", type=str, required=True, nullable=False, location="json") - parser.add_argument("provider", type=str, required=True, nullable=False, location="json") - parser.add_argument("original_provider", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon", type=dict, required=True, nullable=False, location="json") - parser.add_argument("privacy_policy", type=str, required=True, nullable=True, location="json") - parser.add_argument("labels", type=list[str], required=False, nullable=True, location="json") - parser.add_argument("custom_disclaimer", type=str, required=True, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("schema_type", type=str, required=True, nullable=False, location="json") + .add_argument("schema", type=str, required=True, nullable=False, location="json") + .add_argument("provider", type=str, required=True, nullable=False, location="json") + .add_argument("original_provider", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=dict, required=True, nullable=False, location="json") + .add_argument("privacy_policy", type=str, required=True, nullable=True, location="json") + .add_argument("labels", type=list[str], required=False, nullable=True, location="json") + .add_argument("custom_disclaimer", type=str, required=True, nullable=True, location="json") + ) args = parser.parse_args() @@ -332,22 +341,22 @@ class ToolApiProviderUpdateApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/delete") class ToolApiProviderDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - - parser.add_argument("provider", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "provider", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() @@ -358,19 +367,19 @@ class ToolApiProviderDeleteApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/get") class ToolApiProviderGetApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - - parser.add_argument("provider", type=str, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument( + "provider", type=str, required=True, nullable=False, location="args" + ) args = parser.parse_args() @@ -381,13 +390,13 @@ class ToolApiProviderGetApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//credential/schema/") class ToolBuiltinProviderCredentialsSchemaApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider, credential_type): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.list_builtin_provider_credentials_schema( @@ -396,14 +405,15 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/schema") class ToolApiProviderSchemaApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - - parser.add_argument("schema", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "schema", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() @@ -412,24 +422,26 @@ class ToolApiProviderSchemaApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/test/pre") class ToolApiProviderPreviousTestApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - - parser.add_argument("tool_name", type=str, required=True, nullable=False, location="json") - parser.add_argument("provider_name", type=str, required=False, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("parameters", type=dict, required=True, nullable=False, location="json") - parser.add_argument("schema_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("schema", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("tool_name", type=str, required=True, nullable=False, location="json") + .add_argument("provider_name", type=str, required=False, nullable=False, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("parameters", type=dict, required=True, nullable=False, location="json") + .add_argument("schema_type", type=str, required=True, nullable=False, location="json") + .add_argument("schema", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() - + _, current_tenant_id = current_account_with_tenant() return ApiToolManageService.test_api_tool_preview( - current_user.current_tenant_id, + current_tenant_id, args["provider_name"] or "", args["tool_name"], args["credentials"], @@ -439,28 +451,30 @@ class ToolApiProviderPreviousTestApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/workflow/create") class ToolWorkflowProviderCreateApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - reqparser = reqparse.RequestParser() - reqparser.add_argument("workflow_app_id", type=uuid_value, required=True, nullable=False, location="json") - reqparser.add_argument("name", type=alphanumeric, required=True, nullable=False, location="json") - reqparser.add_argument("label", type=str, required=True, nullable=False, location="json") - reqparser.add_argument("description", type=str, required=True, nullable=False, location="json") - reqparser.add_argument("icon", type=dict, required=True, nullable=False, location="json") - reqparser.add_argument("parameters", type=list[dict], required=True, nullable=False, location="json") - reqparser.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="") - reqparser.add_argument("labels", type=list[str], required=False, nullable=True, location="json") + reqparser = ( + reqparse.RequestParser() + .add_argument("workflow_app_id", type=uuid_value, required=True, nullable=False, location="json") + .add_argument("name", type=alphanumeric, required=True, nullable=False, location="json") + .add_argument("label", type=str, required=True, nullable=False, location="json") + .add_argument("description", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=dict, required=True, nullable=False, location="json") + .add_argument("parameters", type=list[dict], required=True, nullable=False, location="json") + .add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="") + .add_argument("labels", type=list[str], required=False, nullable=True, location="json") + ) args = reqparser.parse_args() @@ -478,28 +492,30 @@ class ToolWorkflowProviderCreateApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/workflow/update") class ToolWorkflowProviderUpdateApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - reqparser = reqparse.RequestParser() - reqparser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") - reqparser.add_argument("name", type=alphanumeric, required=True, nullable=False, location="json") - reqparser.add_argument("label", type=str, required=True, nullable=False, location="json") - reqparser.add_argument("description", type=str, required=True, nullable=False, location="json") - reqparser.add_argument("icon", type=dict, required=True, nullable=False, location="json") - reqparser.add_argument("parameters", type=list[dict], required=True, nullable=False, location="json") - reqparser.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="") - reqparser.add_argument("labels", type=list[str], required=False, nullable=True, location="json") + reqparser = ( + reqparse.RequestParser() + .add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") + .add_argument("name", type=alphanumeric, required=True, nullable=False, location="json") + .add_argument("label", type=str, required=True, nullable=False, location="json") + .add_argument("description", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=dict, required=True, nullable=False, location="json") + .add_argument("parameters", type=list[dict], required=True, nullable=False, location="json") + .add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="") + .add_argument("labels", type=list[str], required=False, nullable=True, location="json") + ) args = reqparser.parse_args() @@ -520,21 +536,22 @@ class ToolWorkflowProviderUpdateApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/workflow/delete") class ToolWorkflowProviderDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - reqparser = reqparse.RequestParser() - reqparser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") + reqparser = reqparse.RequestParser().add_argument( + "workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json" + ) args = reqparser.parse_args() @@ -545,19 +562,21 @@ class ToolWorkflowProviderDeleteApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/workflow/get") class ToolWorkflowProviderGetApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("workflow_tool_id", type=uuid_value, required=False, nullable=True, location="args") - parser.add_argument("workflow_app_id", type=uuid_value, required=False, nullable=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("workflow_tool_id", type=uuid_value, required=False, nullable=True, location="args") + .add_argument("workflow_app_id", type=uuid_value, required=False, nullable=True, location="args") + ) args = parser.parse_args() @@ -579,18 +598,19 @@ class ToolWorkflowProviderGetApi(Resource): return jsonable_encoder(tool) +@console_ns.route("/workspaces/current/tool-provider/workflow/tools") class ToolWorkflowProviderListToolApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument( + "workflow_tool_id", type=uuid_value, required=True, nullable=False, location="args" + ) args = parser.parse_args() @@ -603,15 +623,15 @@ class ToolWorkflowProviderListToolApi(Resource): ) +@console_ns.route("/workspaces/current/tools/builtin") class ToolBuiltinListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id return jsonable_encoder( [ @@ -624,13 +644,13 @@ class ToolBuiltinListApi(Resource): ) +@console_ns.route("/workspaces/current/tools/api") class ToolApiListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( [ @@ -642,15 +662,15 @@ class ToolApiListApi(Resource): ) +@console_ns.route("/workspaces/current/tools/workflow") class ToolWorkflowListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id return jsonable_encoder( [ @@ -663,6 +683,7 @@ class ToolWorkflowListApi(Resource): ) +@console_ns.route("/workspaces/current/tool-labels") class ToolLabelsApi(Resource): @setup_required @login_required @@ -672,6 +693,7 @@ class ToolLabelsApi(Resource): return jsonable_encoder(ToolLabelsService.list_tool_labels()) +@console_ns.route("/oauth/plugin//tool/authorization-url") class ToolPluginOAuthApi(Resource): @setup_required @login_required @@ -682,19 +704,18 @@ class ToolPluginOAuthApi(Resource): provider_name = tool_provider.provider_name # todo check permission - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - tenant_id = user.current_tenant_id oauth_client_params = BuiltinToolManageService.get_oauth_client(tenant_id=tenant_id, provider=provider) if oauth_client_params is None: raise Forbidden("no oauth available client config found for this tool provider") oauth_handler = OAuthHandler() context_id = OAuthProxyService.create_proxy_context( - user_id=current_user.id, tenant_id=tenant_id, plugin_id=plugin_id, provider=provider_name + user_id=user.id, tenant_id=tenant_id, plugin_id=plugin_id, provider=provider_name ) redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback" authorization_url_response = oauth_handler.get_authorization_url( @@ -716,6 +737,7 @@ class ToolPluginOAuthApi(Resource): return response +@console_ns.route("/oauth/plugin//tool/callback") class ToolOAuthCallback(Resource): @setup_required def get(self, provider): @@ -766,36 +788,40 @@ class ToolOAuthCallback(Resource): return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") +@console_ns.route("/workspaces/current/tool-provider/builtin//default-credential") class ToolBuiltinProviderSetDefaultApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): - parser = reqparse.RequestParser() - parser.add_argument("id", type=str, required=True, nullable=False, location="json") + current_user, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() return BuiltinToolManageService.set_default_provider( - tenant_id=current_user.current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"] + tenant_id=current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"] ) +@console_ns.route("/workspaces/current/tool-provider/builtin//oauth/custom-client") class ToolOAuthCustomClient(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): - parser = reqparse.RequestParser() - parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json") - parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("client_params", type=dict, required=False, nullable=True, location="json") + .add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") + ) args = parser.parse_args() - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() return BuiltinToolManageService.save_custom_oauth_client_params( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, provider=provider, client_params=args.get("client_params", {}), enable_oauth_custom_client=args.get("enable_oauth_custom_client", True), @@ -805,41 +831,42 @@ class ToolOAuthCustomClient(Resource): @login_required @account_initialization_required def get(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( - BuiltinToolManageService.get_custom_oauth_client_params( - tenant_id=current_user.current_tenant_id, provider=provider - ) + BuiltinToolManageService.get_custom_oauth_client_params(tenant_id=current_tenant_id, provider=provider) ) @setup_required @login_required @account_initialization_required def delete(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( - BuiltinToolManageService.delete_custom_oauth_client_params( - tenant_id=current_user.current_tenant_id, provider=provider - ) + BuiltinToolManageService.delete_custom_oauth_client_params(tenant_id=current_tenant_id, provider=provider) ) +@console_ns.route("/workspaces/current/tool-provider/builtin//oauth/client-schema") class ToolBuiltinProviderGetOauthClientSchemaApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_oauth_client_schema( - tenant_id=current_user.current_tenant_id, provider_name=provider + tenant_id=current_tenant_id, provider_name=provider ) ) +@console_ns.route("/workspaces/current/tool-provider/builtin//credential/info") class ToolBuiltinProviderGetCredentialInfoApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_credential_info( @@ -849,242 +876,245 @@ class ToolBuiltinProviderGetCredentialInfoApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/mcp") class ToolProviderMCPApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("server_url", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json", default="") - parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json") - parser.add_argument("timeout", type=float, required=False, nullable=False, location="json", default=30) - parser.add_argument( - "sse_read_timeout", type=float, required=False, nullable=False, location="json", default=300 + parser = ( + reqparse.RequestParser() + .add_argument("server_url", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=str, required=True, nullable=False, location="json") + .add_argument("icon_type", type=str, required=True, nullable=False, location="json") + .add_argument("icon_background", type=str, required=False, nullable=True, location="json", default="") + .add_argument("server_identifier", type=str, required=True, nullable=False, location="json") + .add_argument("configuration", type=dict, required=False, nullable=True, location="json", default={}) + .add_argument("headers", type=dict, required=False, nullable=True, location="json", default={}) + .add_argument("authentication", type=dict, required=False, nullable=True, location="json", default={}) ) - parser.add_argument("headers", type=dict, required=False, nullable=True, location="json", default={}) args = parser.parse_args() - user = current_user - if not is_valid_url(args["server_url"]): - raise ValueError("Server URL is not valid.") - return jsonable_encoder( - MCPToolManageService.create_mcp_provider( - tenant_id=user.current_tenant_id, + user, tenant_id = current_account_with_tenant() + + # Parse and validate models + configuration = MCPConfiguration.model_validate(args["configuration"]) + authentication = MCPAuthentication.model_validate(args["authentication"]) if args["authentication"] else None + + # Create provider + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + result = service.create_provider( + tenant_id=tenant_id, + user_id=user.id, server_url=args["server_url"], name=args["name"], icon=args["icon"], icon_type=args["icon_type"], icon_background=args["icon_background"], - user_id=user.id, server_identifier=args["server_identifier"], - timeout=args["timeout"], - sse_read_timeout=args["sse_read_timeout"], headers=args["headers"], + configuration=configuration, + authentication=authentication, ) - ) + return jsonable_encoder(result) @setup_required @login_required @account_initialization_required def put(self): - parser = reqparse.RequestParser() - parser.add_argument("server_url", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json") - parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json") - parser.add_argument("timeout", type=float, required=False, nullable=True, location="json") - parser.add_argument("sse_read_timeout", type=float, required=False, nullable=True, location="json") - parser.add_argument("headers", type=dict, required=False, nullable=True, location="json") - args = parser.parse_args() - if not is_valid_url(args["server_url"]): - if "[__HIDDEN__]" in args["server_url"]: - pass - else: - raise ValueError("Server URL is not valid.") - MCPToolManageService.update_mcp_provider( - tenant_id=current_user.current_tenant_id, - provider_id=args["provider_id"], - server_url=args["server_url"], - name=args["name"], - icon=args["icon"], - icon_type=args["icon_type"], - icon_background=args["icon_background"], - server_identifier=args["server_identifier"], - timeout=args.get("timeout"), - sse_read_timeout=args.get("sse_read_timeout"), - headers=args.get("headers"), + parser = ( + reqparse.RequestParser() + .add_argument("server_url", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=str, required=True, nullable=False, location="json") + .add_argument("icon_type", type=str, required=True, nullable=False, location="json") + .add_argument("icon_background", type=str, required=False, nullable=True, location="json") + .add_argument("provider_id", type=str, required=True, nullable=False, location="json") + .add_argument("server_identifier", type=str, required=True, nullable=False, location="json") + .add_argument("configuration", type=dict, required=False, nullable=True, location="json", default={}) + .add_argument("headers", type=dict, required=False, nullable=True, location="json", default={}) + .add_argument("authentication", type=dict, required=False, nullable=True, location="json", default={}) ) - return {"result": "success"} + args = parser.parse_args() + configuration = MCPConfiguration.model_validate(args["configuration"]) + authentication = MCPAuthentication.model_validate(args["authentication"]) if args["authentication"] else None + _, current_tenant_id = current_account_with_tenant() + + # Step 1: Validate server URL change if needed (includes URL format validation and network operation) + validation_result = None + with Session(db.engine) as session: + service = MCPToolManageService(session=session) + validation_result = service.validate_server_url_change( + tenant_id=current_tenant_id, provider_id=args["provider_id"], new_server_url=args["server_url"] + ) + + # No need to check for errors here, exceptions will be raised directly + + # Step 2: Perform database update in a transaction + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + service.update_provider( + tenant_id=current_tenant_id, + provider_id=args["provider_id"], + server_url=args["server_url"], + name=args["name"], + icon=args["icon"], + icon_type=args["icon_type"], + icon_background=args["icon_background"], + server_identifier=args["server_identifier"], + headers=args["headers"], + configuration=configuration, + authentication=authentication, + validation_result=validation_result, + ) + return {"result": "success"} @setup_required @login_required @account_initialization_required def delete(self): - parser = reqparse.RequestParser() - parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "provider_id", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() - MCPToolManageService.delete_mcp_tool(tenant_id=current_user.current_tenant_id, provider_id=args["provider_id"]) - return {"result": "success"} + _, current_tenant_id = current_account_with_tenant() + + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + service.delete_provider(tenant_id=current_tenant_id, provider_id=args["provider_id"]) + return {"result": "success"} +@console_ns.route("/workspaces/current/tool-provider/mcp/auth") class ToolMCPAuthApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("authorization_code", type=str, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("provider_id", type=str, required=True, nullable=False, location="json") + .add_argument("authorization_code", type=str, required=False, nullable=True, location="json") + ) args = parser.parse_args() provider_id = args["provider_id"] - tenant_id = current_user.current_tenant_id - provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id) - if not provider: - raise ValueError("provider not found") - try: - with MCPClient( - provider.decrypted_server_url, - provider_id, - tenant_id, - authed=False, - authorization_code=args["authorization_code"], - for_list=True, - headers=provider.decrypted_headers, - timeout=provider.timeout, - sse_read_timeout=provider.sse_read_timeout, - ): - MCPToolManageService.update_mcp_provider_credentials( - mcp_provider=provider, - credentials=provider.decrypted_credentials, - authed=True, - ) - return {"result": "success"} + _, tenant_id = current_account_with_tenant() - except MCPAuthError: - auth_provider = OAuthClientProvider(provider_id, tenant_id, for_list=True) - return auth(auth_provider, provider.decrypted_server_url, args["authorization_code"]) + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + db_provider = service.get_provider(provider_id=provider_id, tenant_id=tenant_id) + if not db_provider: + raise ValueError("provider not found") + + # Convert to entity + provider_entity = db_provider.to_entity() + server_url = provider_entity.decrypt_server_url() + headers = provider_entity.decrypt_authentication() + + # Try to connect without active transaction + try: + # Use MCPClientWithAuthRetry to handle authentication automatically + with MCPClient( + server_url=server_url, + headers=headers, + timeout=provider_entity.timeout, + sse_read_timeout=provider_entity.sse_read_timeout, + ): + # Update credentials in new transaction + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + service.update_provider_credentials( + provider_id=provider_id, + tenant_id=tenant_id, + credentials=provider_entity.credentials, + authed=True, + ) + return {"result": "success"} + except MCPAuthError as e: + try: + auth_result = auth(provider_entity, args.get("authorization_code")) + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + response = service.execute_auth_actions(auth_result) + return response + except MCPRefreshTokenError as e: + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id) + raise ValueError(f"Failed to refresh token, please try to authorize again: {e}") from e except MCPError as e: - MCPToolManageService.update_mcp_provider_credentials( - mcp_provider=provider, - credentials={}, - authed=False, - ) + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id) raise ValueError(f"Failed to connect to MCP server: {e}") from e +@console_ns.route("/workspaces/current/tool-provider/mcp/tools/") class ToolMCPDetailApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider_id): - user = current_user - provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, user.current_tenant_id) - return jsonable_encoder(ToolTransformService.mcp_provider_to_user_provider(provider, for_list=True)) + _, tenant_id = current_account_with_tenant() + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + provider = service.get_provider(provider_id=provider_id, tenant_id=tenant_id) + return jsonable_encoder(ToolTransformService.mcp_provider_to_user_provider(provider, for_list=True)) +@console_ns.route("/workspaces/current/tools/mcp") class ToolMCPListAllApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() - tools = MCPToolManageService.retrieve_mcp_tools(tenant_id=tenant_id) + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + # Skip sensitive data decryption for list view to improve performance + tools = service.list_providers(tenant_id=tenant_id, include_sensitive=False) - return [tool.to_dict() for tool in tools] + return [tool.to_dict() for tool in tools] +@console_ns.route("/workspaces/current/tool-provider/mcp/update/") class ToolMCPUpdateApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider_id): - tenant_id = current_user.current_tenant_id - tools = MCPToolManageService.list_mcp_tool_from_remote_server( - tenant_id=tenant_id, - provider_id=provider_id, - ) - return jsonable_encoder(tools) + _, tenant_id = current_account_with_tenant() + with Session(db.engine) as session, session.begin(): + service = MCPToolManageService(session=session) + tools = service.list_provider_tools( + tenant_id=tenant_id, + provider_id=provider_id, + ) + return jsonable_encoder(tools) +@console_ns.route("/mcp/oauth/callback") class ToolMCPCallbackApi(Resource): def get(self): - parser = reqparse.RequestParser() - parser.add_argument("code", type=str, required=True, nullable=False, location="args") - parser.add_argument("state", type=str, required=True, nullable=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("code", type=str, required=True, nullable=False, location="args") + .add_argument("state", type=str, required=True, nullable=False, location="args") + ) args = parser.parse_args() state_key = args["state"] authorization_code = args["code"] - handle_callback(state_key, authorization_code) + + # Create service instance for handle_callback + with Session(db.engine) as session, session.begin(): + mcp_service = MCPToolManageService(session=session) + # handle_callback now returns state data and tokens + state_data, tokens = handle_callback(state_key, authorization_code) + # Save tokens using the service layer + mcp_service.save_oauth_data( + state_data.provider_id, state_data.tenant_id, tokens.model_dump(), OAuthDataType.TOKENS + ) + return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") - - -# tool provider -api.add_resource(ToolProviderListApi, "/workspaces/current/tool-providers") - -# tool oauth -api.add_resource(ToolPluginOAuthApi, "/oauth/plugin//tool/authorization-url") -api.add_resource(ToolOAuthCallback, "/oauth/plugin//tool/callback") -api.add_resource(ToolOAuthCustomClient, "/workspaces/current/tool-provider/builtin//oauth/custom-client") - -# builtin tool provider -api.add_resource(ToolBuiltinProviderListToolsApi, "/workspaces/current/tool-provider/builtin//tools") -api.add_resource(ToolBuiltinProviderInfoApi, "/workspaces/current/tool-provider/builtin//info") -api.add_resource(ToolBuiltinProviderAddApi, "/workspaces/current/tool-provider/builtin//add") -api.add_resource(ToolBuiltinProviderDeleteApi, "/workspaces/current/tool-provider/builtin//delete") -api.add_resource(ToolBuiltinProviderUpdateApi, "/workspaces/current/tool-provider/builtin//update") -api.add_resource( - ToolBuiltinProviderSetDefaultApi, "/workspaces/current/tool-provider/builtin//default-credential" -) -api.add_resource( - ToolBuiltinProviderGetCredentialInfoApi, "/workspaces/current/tool-provider/builtin//credential/info" -) -api.add_resource( - ToolBuiltinProviderGetCredentialsApi, "/workspaces/current/tool-provider/builtin//credentials" -) -api.add_resource( - ToolBuiltinProviderCredentialsSchemaApi, - "/workspaces/current/tool-provider/builtin//credential/schema/", -) -api.add_resource( - ToolBuiltinProviderGetOauthClientSchemaApi, - "/workspaces/current/tool-provider/builtin//oauth/client-schema", -) -api.add_resource(ToolBuiltinProviderIconApi, "/workspaces/current/tool-provider/builtin//icon") - -# api tool provider -api.add_resource(ToolApiProviderAddApi, "/workspaces/current/tool-provider/api/add") -api.add_resource(ToolApiProviderGetRemoteSchemaApi, "/workspaces/current/tool-provider/api/remote") -api.add_resource(ToolApiProviderListToolsApi, "/workspaces/current/tool-provider/api/tools") -api.add_resource(ToolApiProviderUpdateApi, "/workspaces/current/tool-provider/api/update") -api.add_resource(ToolApiProviderDeleteApi, "/workspaces/current/tool-provider/api/delete") -api.add_resource(ToolApiProviderGetApi, "/workspaces/current/tool-provider/api/get") -api.add_resource(ToolApiProviderSchemaApi, "/workspaces/current/tool-provider/api/schema") -api.add_resource(ToolApiProviderPreviousTestApi, "/workspaces/current/tool-provider/api/test/pre") - -# workflow tool provider -api.add_resource(ToolWorkflowProviderCreateApi, "/workspaces/current/tool-provider/workflow/create") -api.add_resource(ToolWorkflowProviderUpdateApi, "/workspaces/current/tool-provider/workflow/update") -api.add_resource(ToolWorkflowProviderDeleteApi, "/workspaces/current/tool-provider/workflow/delete") -api.add_resource(ToolWorkflowProviderGetApi, "/workspaces/current/tool-provider/workflow/get") -api.add_resource(ToolWorkflowProviderListToolApi, "/workspaces/current/tool-provider/workflow/tools") - -# mcp tool provider -api.add_resource(ToolMCPDetailApi, "/workspaces/current/tool-provider/mcp/tools/") -api.add_resource(ToolProviderMCPApi, "/workspaces/current/tool-provider/mcp") -api.add_resource(ToolMCPUpdateApi, "/workspaces/current/tool-provider/mcp/update/") -api.add_resource(ToolMCPAuthApi, "/workspaces/current/tool-provider/mcp/auth") -api.add_resource(ToolMCPCallbackApi, "/mcp/oauth/callback") - -api.add_resource(ToolBuiltinListApi, "/workspaces/current/tools/builtin") -api.add_resource(ToolApiListApi, "/workspaces/current/tools/api") -api.add_resource(ToolMCPListAllApi, "/workspaces/current/tools/mcp") -api.add_resource(ToolWorkflowListApi, "/workspaces/current/tools/workflow") -api.add_resource(ToolLabelsApi, "/workspaces/current/tool-labels") diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index 6bec70b5da..f9856df9ea 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -1,7 +1,6 @@ import logging from flask import request -from flask_login import current_user from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy import select from werkzeug.exceptions import Unauthorized @@ -14,7 +13,7 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) -from controllers.console import api +from controllers.console import console_ns from controllers.console.admin import admin_required from controllers.console.error import AccountNotLinkTenantError from controllers.console.wraps import ( @@ -24,8 +23,8 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from libs.helper import TimestampField -from libs.login import login_required -from models.account import Account, Tenant, TenantStatus +from libs.login import current_account_with_tenant, login_required +from models.account import Tenant, TenantStatus from services.account_service import TenantService from services.feature_service import FeatureService from services.file_service import FileService @@ -65,13 +64,13 @@ tenants_fields = { workspace_fields = {"id": fields.String, "name": fields.String, "status": fields.String, "created_at": TimestampField} +@console_ns.route("/workspaces") class TenantListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() tenants = TenantService.get_join_tenants(current_user) tenant_dicts = [] @@ -85,7 +84,7 @@ class TenantListApi(Resource): "status": tenant.status, "created_at": tenant.created_at, "plan": features.billing.subscription.plan if features.billing.enabled else "sandbox", - "current": tenant.id == current_user.current_tenant_id if current_user.current_tenant_id else False, + "current": tenant.id == current_tenant_id if current_tenant_id else False, } tenant_dicts.append(tenant_dict) @@ -93,13 +92,16 @@ class TenantListApi(Resource): return {"workspaces": marshal(tenant_dicts, tenants_fields)}, 200 +@console_ns.route("/all-workspaces") class WorkspaceListApi(Resource): @setup_required @admin_required def get(self): - parser = reqparse.RequestParser() - parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() stmt = select(Tenant).order_by(Tenant.created_at.desc()) @@ -118,6 +120,8 @@ class WorkspaceListApi(Resource): }, 200 +@console_ns.route("/workspaces/current", endpoint="workspaces_current") +@console_ns.route("/info", endpoint="info") # Deprecated class TenantApi(Resource): @setup_required @login_required @@ -127,8 +131,7 @@ class TenantApi(Resource): if request.path == "/info": logger.warning("Deprecated URL /info was used.") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() tenant = current_user.current_tenant if not tenant: raise ValueError("No current tenant") @@ -143,20 +146,17 @@ class TenantApi(Resource): else: raise Unauthorized("workspace is archived") - if not tenant: - raise ValueError("No tenant available") return WorkspaceService.get_tenant_info(tenant), 200 +@console_ns.route("/workspaces/switch") class SwitchWorkspaceApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("tenant_id", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("tenant_id", type=str, required=True, location="json") args = parser.parse_args() # check if tenant_id is valid, 403 if not @@ -172,22 +172,21 @@ class SwitchWorkspaceApi(Resource): return {"result": "success", "new_tenant": marshal(WorkspaceService.get_tenant_info(new_tenant), tenant_fields)} +@console_ns.route("/workspaces/custom-config") class CustomConfigWorkspaceApi(Resource): @setup_required @login_required @account_initialization_required @cloud_edition_billing_resource_check("workspace_custom") def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("remove_webapp_brand", type=bool, location="json") - parser.add_argument("replace_webapp_logo", type=str, location="json") + _, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("remove_webapp_brand", type=bool, location="json") + .add_argument("replace_webapp_logo", type=str, location="json") + ) args = parser.parse_args() - - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant = db.get_or_404(Tenant, current_user.current_tenant_id) + tenant = db.get_or_404(Tenant, current_tenant_id) custom_config_dict = { "remove_webapp_brand": args["remove_webapp_brand"], @@ -202,14 +201,14 @@ class CustomConfigWorkspaceApi(Resource): return {"result": "success", "tenant": marshal(WorkspaceService.get_tenant_info(tenant), tenant_fields)} +@console_ns.route("/workspaces/custom-config/webapp-logo/upload") class WebappLogoWorkspaceApi(Resource): @setup_required @login_required @account_initialization_required @cloud_edition_billing_resource_check("workspace_custom") def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() # check file if "file" not in request.files: raise NoFileUploadedError() @@ -242,32 +241,21 @@ class WebappLogoWorkspaceApi(Resource): return {"id": upload_file.id}, 201 +@console_ns.route("/workspaces/info") class WorkspaceInfoApi(Resource): @setup_required @login_required @account_initialization_required # Change workspace name def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") + _, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json") args = parser.parse_args() - if not current_user.current_tenant_id: + if not current_tenant_id: raise ValueError("No current tenant") - tenant = db.get_or_404(Tenant, current_user.current_tenant_id) + tenant = db.get_or_404(Tenant, current_tenant_id) tenant.name = args["name"] db.session.commit() return {"result": "success", "tenant": marshal(WorkspaceService.get_tenant_info(tenant), tenant_fields)} - - -api.add_resource(TenantListApi, "/workspaces") # GET for getting all tenants -api.add_resource(WorkspaceListApi, "/all-workspaces") # GET for getting all tenants -api.add_resource(TenantApi, "/workspaces/current", endpoint="workspaces_current") # GET for getting current tenant info -api.add_resource(TenantApi, "/info", endpoint="info") # Deprecated -api.add_resource(SwitchWorkspaceApi, "/workspaces/switch") # POST for switching tenant -api.add_resource(CustomConfigWorkspaceApi, "/workspaces/custom-config") -api.add_resource(WebappLogoWorkspaceApi, "/workspaces/custom-config/webapp-logo/upload") -api.add_resource(WorkspaceInfoApi, "/workspaces/info") # POST for changing workspace info diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index 914d386c78..8572a6dc9b 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -7,12 +7,12 @@ from functools import wraps from typing import ParamSpec, TypeVar from flask import abort, request -from flask_login import current_user from configs import dify_config from controllers.console.workspace.error import AccountNotInitializedError from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.login import current_account_with_tenant from models.account import AccountStatus from models.dataset import RateLimitLog from models.model import DifySetup @@ -29,9 +29,8 @@ def account_initialization_required(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): # check account initialization - account = current_user - - if account.status == AccountStatus.UNINITIALIZED: + current_user, _ = current_account_with_tenant() + if current_user.status == AccountStatus.UNINITIALIZED: raise AccountNotInitializedError() return view(*args, **kwargs) @@ -75,7 +74,8 @@ def only_edition_self_hosted(view: Callable[P, R]): def cloud_edition_billing_enabled(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if not features.billing.enabled: abort(403, "Billing feature is not enabled.") return view(*args, **kwargs) @@ -87,7 +87,8 @@ def cloud_edition_billing_resource_check(resource: str): def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: members = features.members apps = features.apps @@ -128,7 +129,8 @@ def cloud_edition_billing_knowledge_limit_check(resource: str): def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: if resource == "add_segment": if features.billing.subscription.plan == "sandbox": @@ -151,10 +153,11 @@ def cloud_edition_billing_rate_limit_check(resource: str): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): if resource == "knowledge": - knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(current_tenant_id) if knowledge_rate_limit.enabled: current_time = int(time.time() * 1000) - key = f"rate_limit_{current_user.current_tenant_id}" + key = f"rate_limit_{current_tenant_id}" redis_client.zadd(key, {current_time: current_time}) @@ -165,7 +168,7 @@ def cloud_edition_billing_rate_limit_check(resource: str): if request_count > knowledge_rate_limit.limit: # add ratelimit record rate_limit_log = RateLimitLog( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, subscription_plan=knowledge_rate_limit.subscription_plan, operation="knowledge", ) @@ -185,14 +188,15 @@ def cloud_utm_record(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): with contextlib.suppress(Exception): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: utm_info = request.cookies.get("utm_info") if utm_info: utm_info_dict: dict = json.loads(utm_info) - OperationService.record_utm(current_user.current_tenant_id, utm_info_dict) + OperationService.record_utm(current_tenant_id, utm_info_dict) return view(*args, **kwargs) @@ -242,9 +246,9 @@ def email_password_login_enabled(view: Callable[P, R]): return decorated -def email_register_enabled(view): +def email_register_enabled(view: Callable[P, R]): @wraps(view) - def decorated(*args, **kwargs): + def decorated(*args: P.args, **kwargs: P.kwargs): features = FeatureService.get_system_features() if features.is_allow_register: return view(*args, **kwargs) @@ -271,7 +275,8 @@ def enable_change_email(view: Callable[P, R]): def is_allow_transfer_owner(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.is_allow_transfer_workspace: return view(*args, **kwargs) @@ -281,12 +286,31 @@ def is_allow_transfer_owner(view: Callable[P, R]): return decorated -def knowledge_pipeline_publish_enabled(view): +def knowledge_pipeline_publish_enabled(view: Callable[P, R]): @wraps(view) - def decorated(*args, **kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + def decorated(*args: P.args, **kwargs: P.kwargs): + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.knowledge_pipeline.publish_enabled: return view(*args, **kwargs) abort(403) return decorated + + +def edit_permission_required(f: Callable[P, R]): + @wraps(f) + def decorated_function(*args: P.args, **kwargs: P.kwargs): + from werkzeug.exceptions import Forbidden + + from libs.login import current_user + from models import Account + + user = current_user._get_current_object() # type: ignore + if not isinstance(user, Account): + raise Forbidden() + if not current_user.has_edit_permission: + raise Forbidden() + return f(*args, **kwargs) + + return decorated_function diff --git a/api/controllers/files/image_preview.py b/api/controllers/files/image_preview.py index 0efee0c377..d320855f29 100644 --- a/api/controllers/files/image_preview.py +++ b/api/controllers/files/image_preview.py @@ -14,10 +14,25 @@ from services.file_service import FileService @files_ns.route("//image-preview") class ImagePreviewApi(Resource): - """ - Deprecated - """ + """Deprecated endpoint for retrieving image previews.""" + @files_ns.doc("get_image_preview") + @files_ns.doc(description="Retrieve a signed image preview for a file") + @files_ns.doc( + params={ + "file_id": "ID of the file to preview", + "timestamp": "Unix timestamp used in the signature", + "nonce": "Random string used in the signature", + "sign": "HMAC signature verifying the request", + } + ) + @files_ns.doc( + responses={ + 200: "Image preview returned successfully", + 400: "Missing or invalid signature parameters", + 415: "Unsupported file type", + } + ) def get(self, file_id): file_id = str(file_id) @@ -43,14 +58,35 @@ class ImagePreviewApi(Resource): @files_ns.route("//file-preview") class FilePreviewApi(Resource): + @files_ns.doc("get_file_preview") + @files_ns.doc(description="Download a file preview or attachment using signed parameters") + @files_ns.doc( + params={ + "file_id": "ID of the file to preview", + "timestamp": "Unix timestamp used in the signature", + "nonce": "Random string used in the signature", + "sign": "HMAC signature verifying the request", + "as_attachment": "Whether to download the file as an attachment", + } + ) + @files_ns.doc( + responses={ + 200: "File stream returned successfully", + 400: "Missing or invalid signature parameters", + 404: "File not found", + 415: "Unsupported file type", + } + ) def get(self, file_id): file_id = str(file_id) - parser = reqparse.RequestParser() - parser.add_argument("timestamp", type=str, required=True, location="args") - parser.add_argument("nonce", type=str, required=True, location="args") - parser.add_argument("sign", type=str, required=True, location="args") - parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("timestamp", type=str, required=True, location="args") + .add_argument("nonce", type=str, required=True, location="args") + .add_argument("sign", type=str, required=True, location="args") + .add_argument("as_attachment", type=bool, required=False, default=False, location="args") + ) args = parser.parse_args() @@ -99,6 +135,20 @@ class FilePreviewApi(Resource): @files_ns.route("/workspaces//webapp-logo") class WorkspaceWebappLogoApi(Resource): + @files_ns.doc("get_workspace_webapp_logo") + @files_ns.doc(description="Fetch the custom webapp logo for a workspace") + @files_ns.doc( + params={ + "workspace_id": "Workspace identifier", + } + ) + @files_ns.doc( + responses={ + 200: "Logo returned successfully", + 404: "Webapp logo not configured", + 415: "Unsupported file type", + } + ) def get(self, workspace_id): workspace_id = str(workspace_id) diff --git a/api/controllers/files/tool_files.py b/api/controllers/files/tool_files.py index 42207b878c..ecaeb85821 100644 --- a/api/controllers/files/tool_files.py +++ b/api/controllers/files/tool_files.py @@ -13,15 +13,36 @@ from extensions.ext_database import db as global_db @files_ns.route("/tools/.") class ToolFileApi(Resource): + @files_ns.doc("get_tool_file") + @files_ns.doc(description="Download a tool file by ID using signed parameters") + @files_ns.doc( + params={ + "file_id": "Tool file identifier", + "extension": "Expected file extension", + "timestamp": "Unix timestamp used in the signature", + "nonce": "Random string used in the signature", + "sign": "HMAC signature verifying the request", + "as_attachment": "Whether to download the file as an attachment", + } + ) + @files_ns.doc( + responses={ + 200: "Tool file stream returned successfully", + 403: "Forbidden - invalid signature", + 404: "File not found", + 415: "Unsupported file type", + } + ) def get(self, file_id, extension): file_id = str(file_id) - parser = reqparse.RequestParser() - - parser.add_argument("timestamp", type=str, required=True, location="args") - parser.add_argument("nonce", type=str, required=True, location="args") - parser.add_argument("sign", type=str, required=True, location="args") - parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("timestamp", type=str, required=True, location="args") + .add_argument("nonce", type=str, required=True, location="args") + .add_argument("sign", type=str, required=True, location="args") + .add_argument("as_attachment", type=bool, required=False, default=False, location="args") + ) args = parser.parse_args() if not verify_tool_file_signature( diff --git a/api/controllers/files/upload.py b/api/controllers/files/upload.py index 206a5d1cc2..a09e24e2d9 100644 --- a/api/controllers/files/upload.py +++ b/api/controllers/files/upload.py @@ -18,19 +18,17 @@ from core.tools.tool_file_manager import ToolFileManager from fields.file_fields import build_file_model # Define parser for both documentation and validation -upload_parser = reqparse.RequestParser() -upload_parser.add_argument("file", location="files", type=FileStorage, required=True, help="File to upload") -upload_parser.add_argument( - "timestamp", type=str, required=True, location="args", help="Unix timestamp for signature verification" +upload_parser = ( + reqparse.RequestParser() + .add_argument("file", location="files", type=FileStorage, required=True, help="File to upload") + .add_argument( + "timestamp", type=str, required=True, location="args", help="Unix timestamp for signature verification" + ) + .add_argument("nonce", type=str, required=True, location="args", help="Random string for signature verification") + .add_argument("sign", type=str, required=True, location="args", help="HMAC signature for request validation") + .add_argument("tenant_id", type=str, required=True, location="args", help="Tenant identifier") + .add_argument("user_id", type=str, required=False, location="args", help="User identifier") ) -upload_parser.add_argument( - "nonce", type=str, required=True, location="args", help="Random string for signature verification" -) -upload_parser.add_argument( - "sign", type=str, required=True, location="args", help="HMAC signature for request validation" -) -upload_parser.add_argument("tenant_id", type=str, required=True, location="args", help="Tenant identifier") -upload_parser.add_argument("user_id", type=str, required=False, location="args", help="User identifier") @files_ns.route("/upload/for-plugin") diff --git a/api/controllers/inner_api/mail.py b/api/controllers/inner_api/mail.py index 0b2be03e43..7e40d81706 100644 --- a/api/controllers/inner_api/mail.py +++ b/api/controllers/inner_api/mail.py @@ -5,11 +5,13 @@ from controllers.inner_api import inner_api_ns from controllers.inner_api.wraps import billing_inner_api_only, enterprise_inner_api_only from tasks.mail_inner_task import send_inner_email_task -_mail_parser = reqparse.RequestParser() -_mail_parser.add_argument("to", type=str, action="append", required=True) -_mail_parser.add_argument("subject", type=str, required=True) -_mail_parser.add_argument("body", type=str, required=True) -_mail_parser.add_argument("substitutions", type=dict, required=False) +_mail_parser = ( + reqparse.RequestParser() + .add_argument("to", type=str, action="append", required=True) + .add_argument("subject", type=str, required=True) + .add_argument("body", type=str, required=True) + .add_argument("substitutions", type=dict, required=False) +) class BaseMail(Resource): @@ -17,7 +19,7 @@ class BaseMail(Resource): def post(self): args = _mail_parser.parse_args() - send_inner_email_task.delay( + send_inner_email_task.delay( # type: ignore to=args["to"], subject=args["subject"], body=args["body"], diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index deab50076d..e4fe8d44bf 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -31,7 +31,7 @@ from core.plugin.entities.request import ( ) from core.tools.entities.tool_entities import ToolProviderType from libs.helper import length_prefixed_response -from models.account import Account, Tenant +from models import Account, Tenant from models.model import EndUser diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index b683aa3160..2a57bb745b 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -25,8 +25,8 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser: As a result, it could only be considered as an end user id. """ if not user_id: - user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID.value - is_anonymous = user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID.value + user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID + is_anonymous = user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID try: with Session(db.engine) as session: user_model = None @@ -72,9 +72,11 @@ def get_user_tenant(view: Callable[P, R] | None = None): @wraps(view_func) def decorated_view(*args: P.args, **kwargs: P.kwargs): # fetch json body - parser = reqparse.RequestParser() - parser.add_argument("tenant_id", type=str, required=True, location="json") - parser.add_argument("user_id", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("tenant_id", type=str, required=True, location="json") + .add_argument("user_id", type=str, required=True, location="json") + ) p = parser.parse_args() @@ -85,7 +87,7 @@ def get_user_tenant(view: Callable[P, R] | None = None): raise ValueError("tenant_id is required") if not user_id: - user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID.value + user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID try: tenant_model = ( @@ -128,7 +130,7 @@ def plugin_data(view: Callable[P, R] | None = None, *, payload_type: type[BaseMo raise ValueError("invalid json") try: - payload = payload_type(**data) + payload = payload_type.model_validate(data) except Exception as e: raise ValueError(f"invalid payload: {str(e)}") diff --git a/api/controllers/inner_api/workspace/workspace.py b/api/controllers/inner_api/workspace/workspace.py index 47f0240cd2..8391a15919 100644 --- a/api/controllers/inner_api/workspace/workspace.py +++ b/api/controllers/inner_api/workspace/workspace.py @@ -7,7 +7,7 @@ from controllers.inner_api import inner_api_ns from controllers.inner_api.wraps import enterprise_inner_api_only from events.tenant_event import tenant_was_created from extensions.ext_database import db -from models.account import Account +from models import Account from services.account_service import TenantService @@ -25,9 +25,11 @@ class EnterpriseWorkspace(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("owner_email", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, location="json") + .add_argument("owner_email", type=str, required=True, location="json") + ) args = parser.parse_args() account = db.session.query(Account).filter_by(email=args["owner_email"]).first() @@ -68,8 +70,7 @@ class EnterpriseWorkspaceNoOwnerEmail(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json") args = parser.parse_args() tenant = TenantService.create_tenant(args["name"], is_from_dashboard=True) diff --git a/api/controllers/mcp/mcp.py b/api/controllers/mcp/mcp.py index a8629dca20..8d8fe6b3a8 100644 --- a/api/controllers/mcp/mcp.py +++ b/api/controllers/mcp/mcp.py @@ -33,14 +33,12 @@ def int_or_str(value): # Define parser for both documentation and validation -mcp_request_parser = reqparse.RequestParser() -mcp_request_parser.add_argument( - "jsonrpc", type=str, required=True, location="json", help="JSON-RPC version (should be '2.0')" -) -mcp_request_parser.add_argument("method", type=str, required=True, location="json", help="The method to invoke") -mcp_request_parser.add_argument("params", type=dict, required=False, location="json", help="Parameters for the method") -mcp_request_parser.add_argument( - "id", type=int_or_str, required=False, location="json", help="Request ID for tracking responses" +mcp_request_parser = ( + reqparse.RequestParser() + .add_argument("jsonrpc", type=str, required=True, location="json", help="JSON-RPC version (should be '2.0')") + .add_argument("method", type=str, required=True, location="json", help="The method to invoke") + .add_argument("params", type=dict, required=False, location="json", help="Parameters for the method") + .add_argument("id", type=int_or_str, required=False, location="json", help="Request ID for tracking responses") ) @@ -195,15 +193,16 @@ class MCPAppApi(Resource): except ValidationError as e: raise MCPRequestError(mcp_types.INVALID_PARAMS, f"Invalid MCP request: {str(e)}") - def _retrieve_end_user(self, tenant_id: str, mcp_server_id: str, session: Session) -> EndUser | None: - """Get end user from existing session - optimized query""" - return ( - session.query(EndUser) - .where(EndUser.tenant_id == tenant_id) - .where(EndUser.session_id == mcp_server_id) - .where(EndUser.type == "mcp") - .first() - ) + def _retrieve_end_user(self, tenant_id: str, mcp_server_id: str) -> EndUser | None: + """Get end user - manages its own database session""" + with Session(db.engine, expire_on_commit=False) as session, session.begin(): + return ( + session.query(EndUser) + .where(EndUser.tenant_id == tenant_id) + .where(EndUser.session_id == mcp_server_id) + .where(EndUser.type == "mcp") + .first() + ) def _create_end_user( self, client_name: str, tenant_id: str, app_id: str, mcp_server_id: str, session: Session @@ -231,7 +230,7 @@ class MCPAppApi(Resource): request_id: Union[int, str], ) -> mcp_types.JSONRPCResponse | mcp_types.JSONRPCError | None: """Handle MCP request and return response""" - end_user = self._retrieve_end_user(mcp_server.tenant_id, mcp_server.id, session) + end_user = self._retrieve_end_user(mcp_server.tenant_id, mcp_server.id) if not end_user and isinstance(mcp_request.root, mcp_types.InitializeRequest): client_info = mcp_request.root.params.clientInfo diff --git a/api/controllers/service_api/app/annotation.py b/api/controllers/service_api/app/annotation.py index ad1bdc7334..ed013b1674 100644 --- a/api/controllers/service_api/app/annotation.py +++ b/api/controllers/service_api/app/annotation.py @@ -10,24 +10,24 @@ from controllers.service_api.wraps import validate_app_token from extensions.ext_redis import redis_client from fields.annotation_fields import annotation_fields, build_annotation_model from libs.login import current_user -from models.account import Account +from models import Account from models.model import App from services.annotation_service import AppAnnotationService # Define parsers for annotation API -annotation_create_parser = reqparse.RequestParser() -annotation_create_parser.add_argument("question", required=True, type=str, location="json", help="Annotation question") -annotation_create_parser.add_argument("answer", required=True, type=str, location="json", help="Annotation answer") +annotation_create_parser = ( + reqparse.RequestParser() + .add_argument("question", required=True, type=str, location="json", help="Annotation question") + .add_argument("answer", required=True, type=str, location="json", help="Annotation answer") +) -annotation_reply_action_parser = reqparse.RequestParser() -annotation_reply_action_parser.add_argument( - "score_threshold", required=True, type=float, location="json", help="Score threshold for annotation matching" -) -annotation_reply_action_parser.add_argument( - "embedding_provider_name", required=True, type=str, location="json", help="Embedding provider name" -) -annotation_reply_action_parser.add_argument( - "embedding_model_name", required=True, type=str, location="json", help="Embedding model name" +annotation_reply_action_parser = ( + reqparse.RequestParser() + .add_argument( + "score_threshold", required=True, type=float, location="json", help="Score threshold for annotation matching" + ) + .add_argument("embedding_provider_name", required=True, type=str, location="json", help="Embedding provider name") + .add_argument("embedding_model_name", required=True, type=str, location="json", help="Embedding model name") ) diff --git a/api/controllers/service_api/app/audio.py b/api/controllers/service_api/app/audio.py index 33035123d7..c069a7ddfb 100644 --- a/api/controllers/service_api/app/audio.py +++ b/api/controllers/service_api/app/audio.py @@ -85,11 +85,13 @@ class AudioApi(Resource): # Define parser for text-to-audio API -text_to_audio_parser = reqparse.RequestParser() -text_to_audio_parser.add_argument("message_id", type=str, required=False, location="json", help="Message ID") -text_to_audio_parser.add_argument("voice", type=str, location="json", help="Voice to use for TTS") -text_to_audio_parser.add_argument("text", type=str, location="json", help="Text to convert to audio") -text_to_audio_parser.add_argument("streaming", type=bool, location="json", help="Enable streaming response") +text_to_audio_parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=str, required=False, location="json", help="Message ID") + .add_argument("voice", type=str, location="json", help="Voice to use for TTS") + .add_argument("text", type=str, location="json", help="Text to convert to audio") + .add_argument("streaming", type=bool, location="json", help="Enable streaming response") +) @service_api_ns.route("/text-to-audio") diff --git a/api/controllers/service_api/app/completion.py b/api/controllers/service_api/app/completion.py index 22428ee0ab..915e7e9416 100644 --- a/api/controllers/service_api/app/completion.py +++ b/api/controllers/service_api/app/completion.py @@ -37,40 +37,34 @@ logger = logging.getLogger(__name__) # Define parser for completion API -completion_parser = reqparse.RequestParser() -completion_parser.add_argument( - "inputs", type=dict, required=True, location="json", help="Input parameters for completion" -) -completion_parser.add_argument("query", type=str, location="json", default="", help="The query string") -completion_parser.add_argument("files", type=list, required=False, location="json", help="List of file attachments") -completion_parser.add_argument( - "response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode" -) -completion_parser.add_argument( - "retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source" +completion_parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json", help="Input parameters for completion") + .add_argument("query", type=str, location="json", default="", help="The query string") + .add_argument("files", type=list, required=False, location="json", help="List of file attachments") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode") + .add_argument("retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source") ) # Define parser for chat API -chat_parser = reqparse.RequestParser() -chat_parser.add_argument("inputs", type=dict, required=True, location="json", help="Input parameters for chat") -chat_parser.add_argument("query", type=str, required=True, location="json", help="The chat query") -chat_parser.add_argument("files", type=list, required=False, location="json", help="List of file attachments") -chat_parser.add_argument( - "response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode" +chat_parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json", help="Input parameters for chat") + .add_argument("query", type=str, required=True, location="json", help="The chat query") + .add_argument("files", type=list, required=False, location="json", help="List of file attachments") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode") + .add_argument("conversation_id", type=uuid_value, location="json", help="Existing conversation ID") + .add_argument("retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source") + .add_argument( + "auto_generate_name", + type=bool, + required=False, + default=True, + location="json", + help="Auto generate conversation name", + ) + .add_argument("workflow_id", type=str, required=False, location="json", help="Workflow ID for advanced chat") ) -chat_parser.add_argument("conversation_id", type=uuid_value, location="json", help="Existing conversation ID") -chat_parser.add_argument( - "retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source" -) -chat_parser.add_argument( - "auto_generate_name", - type=bool, - required=False, - default=True, - location="json", - help="Auto generate conversation name", -) -chat_parser.add_argument("workflow_id", type=str, required=False, location="json", help="Workflow ID for advanced chat") @service_api_ns.route("/completion-messages") diff --git a/api/controllers/service_api/app/conversation.py b/api/controllers/service_api/app/conversation.py index 711dd5704c..c4e23dd2e7 100644 --- a/api/controllers/service_api/app/conversation.py +++ b/api/controllers/service_api/app/conversation.py @@ -24,48 +24,63 @@ from models.model import App, AppMode, EndUser from services.conversation_service import ConversationService # Define parsers for conversation APIs -conversation_list_parser = reqparse.RequestParser() -conversation_list_parser.add_argument( - "last_id", type=uuid_value, location="args", help="Last conversation ID for pagination" -) -conversation_list_parser.add_argument( - "limit", - type=int_range(1, 100), - required=False, - default=20, - location="args", - help="Number of conversations to return", -) -conversation_list_parser.add_argument( - "sort_by", - type=str, - choices=["created_at", "-created_at", "updated_at", "-updated_at"], - required=False, - default="-updated_at", - location="args", - help="Sort order for conversations", +conversation_list_parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args", help="Last conversation ID for pagination") + .add_argument( + "limit", + type=int_range(1, 100), + required=False, + default=20, + location="args", + help="Number of conversations to return", + ) + .add_argument( + "sort_by", + type=str, + choices=["created_at", "-created_at", "updated_at", "-updated_at"], + required=False, + default="-updated_at", + location="args", + help="Sort order for conversations", + ) ) -conversation_rename_parser = reqparse.RequestParser() -conversation_rename_parser.add_argument("name", type=str, required=False, location="json", help="New conversation name") -conversation_rename_parser.add_argument( - "auto_generate", type=bool, required=False, default=False, location="json", help="Auto-generate conversation name" +conversation_rename_parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, location="json", help="New conversation name") + .add_argument( + "auto_generate", + type=bool, + required=False, + default=False, + location="json", + help="Auto-generate conversation name", + ) ) -conversation_variables_parser = reqparse.RequestParser() -conversation_variables_parser.add_argument( - "last_id", type=uuid_value, location="args", help="Last variable ID for pagination" -) -conversation_variables_parser.add_argument( - "limit", type=int_range(1, 100), required=False, default=20, location="args", help="Number of variables to return" +conversation_variables_parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args", help="Last variable ID for pagination") + .add_argument( + "limit", + type=int_range(1, 100), + required=False, + default=20, + location="args", + help="Number of variables to return", + ) ) -conversation_variable_update_parser = reqparse.RequestParser() -# using lambda is for passing the already-typed value without modification -# if no lambda, it will be converted to string -# the string cannot be converted using json.loads -conversation_variable_update_parser.add_argument( - "value", required=True, location="json", type=lambda x: x, help="New value for the conversation variable" +conversation_variable_update_parser = reqparse.RequestParser().add_argument( + # using lambda is for passing the already-typed value without modification + # if no lambda, it will be converted to string + # the string cannot be converted using json.loads + "value", + required=True, + location="json", + type=lambda x: x, + help="New value for the conversation variable", ) diff --git a/api/controllers/service_api/app/file_preview.py b/api/controllers/service_api/app/file_preview.py index 63b46f49f2..b8e91f0657 100644 --- a/api/controllers/service_api/app/file_preview.py +++ b/api/controllers/service_api/app/file_preview.py @@ -18,8 +18,7 @@ logger = logging.getLogger(__name__) # Define parser for file preview API -file_preview_parser = reqparse.RequestParser() -file_preview_parser.add_argument( +file_preview_parser = reqparse.RequestParser().add_argument( "as_attachment", type=bool, required=False, default=False, location="args", help="Download as attachment" ) diff --git a/api/controllers/service_api/app/message.py b/api/controllers/service_api/app/message.py index fc506ef723..b8e5ed28e4 100644 --- a/api/controllers/service_api/app/message.py +++ b/api/controllers/service_api/app/message.py @@ -26,25 +26,37 @@ logger = logging.getLogger(__name__) # Define parsers for message APIs -message_list_parser = reqparse.RequestParser() -message_list_parser.add_argument( - "conversation_id", required=True, type=uuid_value, location="args", help="Conversation ID" -) -message_list_parser.add_argument("first_id", type=uuid_value, location="args", help="First message ID for pagination") -message_list_parser.add_argument( - "limit", type=int_range(1, 100), required=False, default=20, location="args", help="Number of messages to return" +message_list_parser = ( + reqparse.RequestParser() + .add_argument("conversation_id", required=True, type=uuid_value, location="args", help="Conversation ID") + .add_argument("first_id", type=uuid_value, location="args", help="First message ID for pagination") + .add_argument( + "limit", + type=int_range(1, 100), + required=False, + default=20, + location="args", + help="Number of messages to return", + ) ) -message_feedback_parser = reqparse.RequestParser() -message_feedback_parser.add_argument( - "rating", type=str, choices=["like", "dislike", None], location="json", help="Feedback rating" +message_feedback_parser = ( + reqparse.RequestParser() + .add_argument("rating", type=str, choices=["like", "dislike", None], location="json", help="Feedback rating") + .add_argument("content", type=str, location="json", help="Feedback content") ) -message_feedback_parser.add_argument("content", type=str, location="json", help="Feedback content") -feedback_list_parser = reqparse.RequestParser() -feedback_list_parser.add_argument("page", type=int, default=1, location="args", help="Page number") -feedback_list_parser.add_argument( - "limit", type=int_range(1, 101), required=False, default=20, location="args", help="Number of feedbacks per page" +feedback_list_parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, default=1, location="args", help="Page number") + .add_argument( + "limit", + type=int_range(1, 101), + required=False, + default=20, + location="args", + help="Number of feedbacks per page", + ) ) diff --git a/api/controllers/service_api/app/workflow.py b/api/controllers/service_api/app/workflow.py index e912563bc6..af5eae463d 100644 --- a/api/controllers/service_api/app/workflow.py +++ b/api/controllers/service_api/app/workflow.py @@ -42,32 +42,36 @@ from services.workflow_app_service import WorkflowAppService logger = logging.getLogger(__name__) # Define parsers for workflow APIs -workflow_run_parser = reqparse.RequestParser() -workflow_run_parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") -workflow_run_parser.add_argument("files", type=list, required=False, location="json") -workflow_run_parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") +workflow_run_parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("files", type=list, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") +) -workflow_log_parser = reqparse.RequestParser() -workflow_log_parser.add_argument("keyword", type=str, location="args") -workflow_log_parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args") -workflow_log_parser.add_argument("created_at__before", type=str, location="args") -workflow_log_parser.add_argument("created_at__after", type=str, location="args") -workflow_log_parser.add_argument( - "created_by_end_user_session_id", - type=str, - location="args", - required=False, - default=None, +workflow_log_parser = ( + reqparse.RequestParser() + .add_argument("keyword", type=str, location="args") + .add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args") + .add_argument("created_at__before", type=str, location="args") + .add_argument("created_at__after", type=str, location="args") + .add_argument( + "created_by_end_user_session_id", + type=str, + location="args", + required=False, + default=None, + ) + .add_argument( + "created_by_account", + type=str, + location="args", + required=False, + default=None, + ) + .add_argument("page", type=int_range(1, 99999), default=1, location="args") + .add_argument("limit", type=int_range(1, 100), default=20, location="args") ) -workflow_log_parser.add_argument( - "created_by_account", - type=str, - location="args", - required=False, - default=None, -) -workflow_log_parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") -workflow_log_parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") workflow_run_fields = { "id": fields.String, diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 6a70345f7c..9d5566919b 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -1,10 +1,10 @@ -from typing import Literal +from typing import Any, Literal, cast from flask import request from flask_restx import marshal, reqparse from werkzeug.exceptions import Forbidden, NotFound -import services.dataset_service +import services from controllers.service_api import service_api_ns from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError from controllers.service_api.wraps import ( @@ -17,6 +17,7 @@ from core.provider_manager import ProviderManager from fields.dataset_fields import dataset_detail_fields from fields.tag_fields import build_dataset_tag_fields from libs.login import current_user +from libs.validators import validate_description_length from models.account import Account from models.dataset import Dataset, DatasetPermissionEnum from models.provider_ids import ModelProviderID @@ -31,126 +32,119 @@ def _validate_name(name): return name -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - # Define parsers for dataset operations -dataset_create_parser = reqparse.RequestParser() -dataset_create_parser.add_argument( - "name", - nullable=False, - required=True, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, -) -dataset_create_parser.add_argument( - "description", - type=_validate_description_length, - nullable=True, - required=False, - default="", -) -dataset_create_parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - help="Invalid indexing technique.", -) -dataset_create_parser.add_argument( - "permission", - type=str, - location="json", - choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), - help="Invalid permission.", - required=False, - nullable=False, -) -dataset_create_parser.add_argument( - "external_knowledge_api_id", - type=str, - nullable=True, - required=False, - default="_validate_name", -) -dataset_create_parser.add_argument( - "provider", - type=str, - nullable=True, - required=False, - default="vendor", -) -dataset_create_parser.add_argument( - "external_knowledge_id", - type=str, - nullable=True, - required=False, -) -dataset_create_parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") -dataset_create_parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") -dataset_create_parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") - -dataset_update_parser = reqparse.RequestParser() -dataset_update_parser.add_argument( - "name", - nullable=False, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, -) -dataset_update_parser.add_argument( - "description", location="json", store_missing=False, type=_validate_description_length -) -dataset_update_parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - help="Invalid indexing technique.", -) -dataset_update_parser.add_argument( - "permission", - type=str, - location="json", - choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), - help="Invalid permission.", -) -dataset_update_parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") -dataset_update_parser.add_argument( - "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider." -) -dataset_update_parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") -dataset_update_parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") -dataset_update_parser.add_argument( - "external_retrieval_model", - type=dict, - required=False, - nullable=True, - location="json", - help="Invalid external retrieval model.", -) -dataset_update_parser.add_argument( - "external_knowledge_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge id.", -) -dataset_update_parser.add_argument( - "external_knowledge_api_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge api id.", +dataset_create_parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument( + "description", + type=validate_description_length, + nullable=True, + required=False, + default="", + ) + .add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + help="Invalid indexing technique.", + ) + .add_argument( + "permission", + type=str, + location="json", + choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), + help="Invalid permission.", + required=False, + nullable=False, + ) + .add_argument( + "external_knowledge_api_id", + type=str, + nullable=True, + required=False, + default="_validate_name", + ) + .add_argument( + "provider", + type=str, + nullable=True, + required=False, + default="vendor", + ) + .add_argument( + "external_knowledge_id", + type=str, + nullable=True, + required=False, + ) + .add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") + .add_argument("embedding_model", type=str, required=False, nullable=True, location="json") + .add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") ) -tag_create_parser = reqparse.RequestParser() -tag_create_parser.add_argument( +dataset_update_parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument("description", location="json", store_missing=False, type=validate_description_length) + .add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + help="Invalid indexing technique.", + ) + .add_argument( + "permission", + type=str, + location="json", + choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), + help="Invalid permission.", + ) + .add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") + .add_argument("embedding_model_provider", type=str, location="json", help="Invalid embedding model provider.") + .add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") + .add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") + .add_argument( + "external_retrieval_model", + type=dict, + required=False, + nullable=True, + location="json", + help="Invalid external retrieval model.", + ) + .add_argument( + "external_knowledge_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge id.", + ) + .add_argument( + "external_knowledge_api_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge api id.", + ) +) + +tag_create_parser = reqparse.RequestParser().add_argument( "name", nullable=False, required=True, @@ -160,32 +154,37 @@ tag_create_parser.add_argument( else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")), ) -tag_update_parser = reqparse.RequestParser() -tag_update_parser.add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 50 characters.", - type=lambda x: x - if x and 1 <= len(x) <= 50 - else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")), -) -tag_update_parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) - -tag_delete_parser = reqparse.RequestParser() -tag_delete_parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) - -tag_binding_parser = reqparse.RequestParser() -tag_binding_parser.add_argument( - "tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required." -) -tag_binding_parser.add_argument( - "target_id", type=str, nullable=False, required=True, location="json", help="Target Dataset ID is required." +tag_update_parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 50 characters.", + type=lambda x: x + if x and 1 <= len(x) <= 50 + else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")), + ) + .add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) ) -tag_unbinding_parser = reqparse.RequestParser() -tag_unbinding_parser.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") -tag_unbinding_parser.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") +tag_delete_parser = reqparse.RequestParser().add_argument( + "tag_id", nullable=False, required=True, help="Id of a tag.", type=str +) + +tag_binding_parser = ( + reqparse.RequestParser() + .add_argument("tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required.") + .add_argument( + "target_id", type=str, nullable=False, required=True, location="json", help="Target Dataset ID is required." + ) +) + +tag_unbinding_parser = ( + reqparse.RequestParser() + .add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") + .add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") +) @service_api_ns.route("/datasets") @@ -254,19 +253,21 @@ class DatasetListApi(DatasetApiResource): """Resource for creating datasets.""" args = dataset_create_parser.parse_args() - if args.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - tenant_id, args.get("embedding_model_provider"), args.get("embedding_model") - ) + embedding_model_provider = args.get("embedding_model_provider") + embedding_model = args.get("embedding_model") + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting(tenant_id, embedding_model_provider, embedding_model) + + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) try: @@ -283,7 +284,7 @@ class DatasetListApi(DatasetApiResource): external_knowledge_id=args["external_knowledge_id"], embedding_model_provider=args["embedding_model_provider"], embedding_model_name=args["embedding_model"], - retrieval_model=RetrievalModel(**args["retrieval_model"]) + retrieval_model=RetrievalModel.model_validate(args["retrieval_model"]) if args["retrieval_model"] is not None else None, ) @@ -317,7 +318,7 @@ class DatasetApi(DatasetApiResource): DatasetService.check_dataset_permission(dataset, current_user) except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - data = marshal(dataset, dataset_detail_fields) + data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) # check embedding setting provider_manager = ProviderManager() assert isinstance(current_user, Account) @@ -331,8 +332,8 @@ class DatasetApi(DatasetApiResource): for embedding_model in embedding_models: model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}") - if data["indexing_technique"] == "high_quality": - item_model = f"{data['embedding_model']}:{data['embedding_model_provider']}" + if data.get("indexing_technique") == "high_quality": + item_model = f"{data.get('embedding_model')}:{data.get('embedding_model_provider')}" if item_model in model_names: data["embedding_available"] = True else: @@ -341,7 +342,9 @@ class DatasetApi(DatasetApiResource): data["embedding_available"] = True # force update search method to keyword_search if indexing_technique is economic - data["retrieval_model_dict"]["search_method"] = "keyword_search" + retrieval_model_dict = data.get("retrieval_model_dict") + if retrieval_model_dict: + retrieval_model_dict["search_method"] = "keyword_search" if data.get("permission") == "partial_members": part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str) @@ -372,19 +375,24 @@ class DatasetApi(DatasetApiResource): data = request.get_json() # check embedding model setting - if data.get("indexing_technique") == "high_quality" or data.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - dataset.tenant_id, data.get("embedding_model_provider"), data.get("embedding_model") - ) + embedding_model_provider = data.get("embedding_model_provider") + embedding_model = data.get("embedding_model") + if data.get("indexing_technique") == "high_quality" or embedding_model_provider: + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting( + dataset.tenant_id, embedding_model_provider, embedding_model + ) + + retrieval_model = data.get("retrieval_model") if ( - data.get("retrieval_model") - and data.get("retrieval_model").get("reranking_model") - and data.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( dataset.tenant_id, - data.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - data.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator @@ -397,7 +405,7 @@ class DatasetApi(DatasetApiResource): if dataset is None: raise NotFound("Dataset not found.") - result_data = marshal(dataset, dataset_detail_fields) + result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) assert isinstance(current_user, Account) tenant_id = current_user.current_tenant_id @@ -591,9 +599,10 @@ class DatasetTagsApi(DatasetApiResource): args = tag_update_parser.parse_args() args["type"] = "knowledge" - tag = TagService.update_tags(args, args.get("tag_id")) + tag_id = args["tag_id"] + tag = TagService.update_tags(args, tag_id) - binding_count = TagService.get_tag_binding_count(args.get("tag_id")) + binding_count = TagService.get_tag_binding_count(tag_id) response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": binding_count} @@ -616,7 +625,7 @@ class DatasetTagsApi(DatasetApiResource): if not current_user.has_edit_permission: raise Forbidden() args = tag_delete_parser.parse_args() - TagService.delete_tag(args.get("tag_id")) + TagService.delete_tag(args["tag_id"]) return 204 diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index e01bc8940c..893cd7c923 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -35,37 +35,31 @@ from services.entities.knowledge_entities.knowledge_entities import KnowledgeCon from services.file_service import FileService # Define parsers for document operations -document_text_create_parser = reqparse.RequestParser() -document_text_create_parser.add_argument("name", type=str, required=True, nullable=False, location="json") -document_text_create_parser.add_argument("text", type=str, required=True, nullable=False, location="json") -document_text_create_parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json") -document_text_create_parser.add_argument("original_document_id", type=str, required=False, location="json") -document_text_create_parser.add_argument( - "doc_form", type=str, default="text_model", required=False, nullable=False, location="json" -) -document_text_create_parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" -) -document_text_create_parser.add_argument( - "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" -) -document_text_create_parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") -document_text_create_parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") -document_text_create_parser.add_argument( - "embedding_model_provider", type=str, required=False, nullable=True, location="json" +document_text_create_parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, nullable=False, location="json") + .add_argument("text", type=str, required=True, nullable=False, location="json") + .add_argument("process_rule", type=dict, required=False, nullable=True, location="json") + .add_argument("original_document_id", type=str, required=False, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") + .add_argument( + "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" + ) + .add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") + .add_argument("embedding_model", type=str, required=False, nullable=True, location="json") + .add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") ) -document_text_update_parser = reqparse.RequestParser() -document_text_update_parser.add_argument("name", type=str, required=False, nullable=True, location="json") -document_text_update_parser.add_argument("text", type=str, required=False, nullable=True, location="json") -document_text_update_parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json") -document_text_update_parser.add_argument( - "doc_form", type=str, default="text_model", required=False, nullable=False, location="json" +document_text_update_parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, nullable=True, location="json") + .add_argument("text", type=str, required=False, nullable=True, location="json") + .add_argument("process_rule", type=dict, required=False, nullable=True, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") + .add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") ) -document_text_update_parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" -) -document_text_update_parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") @service_api_ns.route( @@ -108,19 +102,21 @@ class DocumentAddByTextApi(DatasetApiResource): if text is None or name is None: raise ValueError("Both 'text' and 'name' must be non-null values.") - if args.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - tenant_id, args.get("embedding_model_provider"), args.get("embedding_model") - ) + embedding_model_provider = args.get("embedding_model_provider") + embedding_model = args.get("embedding_model") + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting(tenant_id, embedding_model_provider, embedding_model) + + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) if not current_user: @@ -134,7 +130,7 @@ class DocumentAddByTextApi(DatasetApiResource): "info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}}, } args["data_source"] = data_source - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) # validate args DocumentService.document_create_args_validate(knowledge_config) @@ -187,15 +183,16 @@ class DocumentUpdateByTextApi(DatasetApiResource): if not dataset: raise ValueError("Dataset does not exist.") + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) # indexing_technique is already set in dataset since this is an update @@ -218,7 +215,7 @@ class DocumentUpdateByTextApi(DatasetApiResource): args["data_source"] = data_source # validate args args["original_document_id"] = str(document_id) - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) DocumentService.document_create_args_validate(knowledge_config) try: @@ -325,7 +322,7 @@ class DocumentAddByFileApi(DatasetApiResource): } args["data_source"] = data_source # validate args - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) DocumentService.document_create_args_validate(knowledge_config) dataset_process_rule = dataset.latest_process_rule if "process_rule" not in args else None @@ -423,7 +420,7 @@ class DocumentUpdateByFileApi(DatasetApiResource): # validate args args["original_document_id"] = str(document_id) - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) DocumentService.document_create_args_validate(knowledge_config) try: diff --git a/api/controllers/service_api/dataset/metadata.py b/api/controllers/service_api/dataset/metadata.py index c6032048e6..f646f1f4fa 100644 --- a/api/controllers/service_api/dataset/metadata.py +++ b/api/controllers/service_api/dataset/metadata.py @@ -15,21 +15,17 @@ from services.entities.knowledge_entities.knowledge_entities import ( from services.metadata_service import MetadataService # Define parsers for metadata APIs -metadata_create_parser = reqparse.RequestParser() -metadata_create_parser.add_argument( - "type", type=str, required=True, nullable=False, location="json", help="Metadata type" -) -metadata_create_parser.add_argument( - "name", type=str, required=True, nullable=False, location="json", help="Metadata name" +metadata_create_parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json", help="Metadata type") + .add_argument("name", type=str, required=True, nullable=False, location="json", help="Metadata name") ) -metadata_update_parser = reqparse.RequestParser() -metadata_update_parser.add_argument( +metadata_update_parser = reqparse.RequestParser().add_argument( "name", type=str, required=True, nullable=False, location="json", help="New metadata name" ) -document_metadata_parser = reqparse.RequestParser() -document_metadata_parser.add_argument( +document_metadata_parser = reqparse.RequestParser().add_argument( "operation_data", type=list, required=True, nullable=False, location="json", help="Metadata operation data" ) @@ -51,7 +47,7 @@ class DatasetMetadataCreateServiceApi(DatasetApiResource): def post(self, tenant_id, dataset_id): """Create metadata for a dataset.""" args = metadata_create_parser.parse_args() - metadata_args = MetadataArgs(**args) + metadata_args = MetadataArgs.model_validate(args) dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -106,7 +102,7 @@ class DatasetMetadataServiceApi(DatasetApiResource): raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) + metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args["name"]) return marshal(metadata, dataset_metadata_fields), 200 @service_api_ns.doc("delete_dataset_metadata") @@ -200,7 +196,7 @@ class DocumentMetadataEditServiceApi(DatasetApiResource): DatasetService.check_dataset_permission(dataset, current_user) args = document_metadata_parser.parse_args() - metadata_args = MetadataOperationData(**args) + metadata_args = MetadataOperationData.model_validate(args) MetadataService.update_documents_metadata(dataset, metadata_args) diff --git a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py index f05325d711..c177e9180a 100644 --- a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py @@ -17,7 +17,7 @@ from core.app.apps.pipeline.pipeline_generator import PipelineGenerator from core.app.entities.app_invoke_entities import InvokeFrom from libs import helper from libs.login import current_user -from models.account import Account +from models import Account from models.dataset import Pipeline from models.engine import db from services.errors.file import FileTooLargeError, UnsupportedFileTypeError @@ -91,14 +91,16 @@ class DatasourceNodeRunApi(DatasetApiResource): def post(self, tenant_id: str, dataset_id: str, node_id: str): """Resource for getting datasource plugins.""" # Get query parameter to determine published or draft - parser: RequestParser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("credential_id", type=str, required=False, location="json") - parser.add_argument("is_published", type=bool, required=True, location="json") + parser: RequestParser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("credential_id", type=str, required=False, location="json") + .add_argument("is_published", type=bool, required=True, location="json") + ) args: ParseResult = parser.parse_args() - datasource_node_run_api_entity: DatasourceNodeRunApiEntity = DatasourceNodeRunApiEntity(**args) + datasource_node_run_api_entity = DatasourceNodeRunApiEntity.model_validate(args) assert isinstance(current_user, Account) rag_pipeline_service: RagPipelineService = RagPipelineService() pipeline: Pipeline = rag_pipeline_service.get_pipeline(tenant_id=tenant_id, dataset_id=dataset_id) @@ -147,19 +149,21 @@ class PipelineRunApi(DatasetApiResource): ) def post(self, tenant_id: str, dataset_id: str): """Resource for running a rag pipeline.""" - parser: RequestParser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("datasource_info_list", type=list, required=True, location="json") - parser.add_argument("start_node_id", type=str, required=True, location="json") - parser.add_argument("is_published", type=bool, required=True, default=True, location="json") - parser.add_argument( - "response_mode", - type=str, - required=True, - choices=["streaming", "blocking"], - default="blocking", - location="json", + parser: RequestParser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("datasource_info_list", type=list, required=True, location="json") + .add_argument("start_node_id", type=str, required=True, location="json") + .add_argument("is_published", type=bool, required=True, default=True, location="json") + .add_argument( + "response_mode", + type=str, + required=True, + choices=["streaming", "blocking"], + default="blocking", + location="json", + ) ) args: ParseResult = parser.parse_args() diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index a22155b07a..81abd19fed 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -1,5 +1,4 @@ from flask import request -from flask_login import current_user from flask_restx import marshal, reqparse from werkzeug.exceptions import NotFound @@ -16,6 +15,7 @@ from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType from extensions.ext_database import db from fields.segment_fields import child_chunk_fields, segment_fields +from libs.login import current_account_with_tenant from models.dataset import Dataset from services.dataset_service import DatasetService, DocumentService, SegmentService from services.entities.knowledge_entities.knowledge_entities import SegmentUpdateArgs @@ -24,26 +24,34 @@ from services.errors.chunk import ChildChunkDeleteIndexError as ChildChunkDelete from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingServiceError # Define parsers for segment operations -segment_create_parser = reqparse.RequestParser() -segment_create_parser.add_argument("segments", type=list, required=False, nullable=True, location="json") +segment_create_parser = reqparse.RequestParser().add_argument( + "segments", type=list, required=False, nullable=True, location="json" +) -segment_list_parser = reqparse.RequestParser() -segment_list_parser.add_argument("status", type=str, action="append", default=[], location="args") -segment_list_parser.add_argument("keyword", type=str, default=None, location="args") +segment_list_parser = ( + reqparse.RequestParser() + .add_argument("status", type=str, action="append", default=[], location="args") + .add_argument("keyword", type=str, default=None, location="args") +) -segment_update_parser = reqparse.RequestParser() -segment_update_parser.add_argument("segment", type=dict, required=False, nullable=True, location="json") +segment_update_parser = reqparse.RequestParser().add_argument( + "segment", type=dict, required=False, nullable=True, location="json" +) -child_chunk_create_parser = reqparse.RequestParser() -child_chunk_create_parser.add_argument("content", type=str, required=True, nullable=False, location="json") +child_chunk_create_parser = reqparse.RequestParser().add_argument( + "content", type=str, required=True, nullable=False, location="json" +) -child_chunk_list_parser = reqparse.RequestParser() -child_chunk_list_parser.add_argument("limit", type=int, default=20, location="args") -child_chunk_list_parser.add_argument("keyword", type=str, default=None, location="args") -child_chunk_list_parser.add_argument("page", type=int, default=1, location="args") +child_chunk_list_parser = ( + reqparse.RequestParser() + .add_argument("limit", type=int, default=20, location="args") + .add_argument("keyword", type=str, default=None, location="args") + .add_argument("page", type=int, default=1, location="args") +) -child_chunk_update_parser = reqparse.RequestParser() -child_chunk_update_parser.add_argument("content", type=str, required=True, nullable=False, location="json") +child_chunk_update_parser = reqparse.RequestParser().add_argument( + "content", type=str, required=True, nullable=False, location="json" +) @service_api_ns.route("/datasets//documents//segments") @@ -66,6 +74,7 @@ class SegmentApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str): + _, current_tenant_id = current_account_with_tenant() """Create single segment.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -84,7 +93,7 @@ class SegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -117,6 +126,7 @@ class SegmentApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str): + _, current_tenant_id = current_account_with_tenant() """Get segments.""" # check dataset page = request.args.get("page", default=1, type=int) @@ -133,7 +143,7 @@ class SegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -149,7 +159,7 @@ class SegmentApi(DatasetApiResource): segments, total = SegmentService.get_segments( document_id=document_id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, status_list=args["status"], keyword=args["keyword"], page=page, @@ -184,6 +194,7 @@ class DatasetSegmentApi(DatasetApiResource): ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -195,7 +206,7 @@ class DatasetSegmentApi(DatasetApiResource): if not document: raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") SegmentService.delete_segment(segment, document, dataset) @@ -217,6 +228,7 @@ class DatasetSegmentApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -232,7 +244,7 @@ class DatasetSegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -244,7 +256,7 @@ class DatasetSegmentApi(DatasetApiResource): except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -252,7 +264,7 @@ class DatasetSegmentApi(DatasetApiResource): args = segment_update_parser.parse_args() updated_segment = SegmentService.update_segment( - SegmentUpdateArgs(**args["segment"]), segment, document, dataset + SegmentUpdateArgs.model_validate(args["segment"]), segment, document, dataset ) return {"data": marshal(updated_segment, segment_fields), "doc_form": document.doc_form}, 200 @@ -266,6 +278,7 @@ class DatasetSegmentApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -277,7 +290,7 @@ class DatasetSegmentApi(DatasetApiResource): if not document: raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -307,6 +320,7 @@ class ChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() """Create child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -319,7 +333,7 @@ class ChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -328,7 +342,7 @@ class ChildChunkApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -364,6 +378,7 @@ class ChildChunkApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() """Get child chunks.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -376,7 +391,7 @@ class ChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -423,6 +438,7 @@ class DatasetChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str, child_chunk_id: str): + _, current_tenant_id = current_account_with_tenant() """Delete child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -435,7 +451,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -444,9 +460,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check child chunk - child_chunk = SegmentService.get_child_chunk_by_id( - child_chunk_id=child_chunk_id, tenant_id=current_user.current_tenant_id - ) + child_chunk = SegmentService.get_child_chunk_by_id(child_chunk_id=child_chunk_id, tenant_id=current_tenant_id) if not child_chunk: raise NotFound("Child chunk not found.") @@ -483,6 +497,7 @@ class DatasetChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str, child_chunk_id: str): + _, current_tenant_id = current_account_with_tenant() """Update child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -495,7 +510,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # get segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -504,9 +519,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Segment not found.") # get child chunk - child_chunk = SegmentService.get_child_chunk_by_id( - child_chunk_id=child_chunk_id, tenant_id=current_user.current_tenant_id - ) + child_chunk = SegmentService.get_child_chunk_by_id(child_chunk_id=child_chunk_id, tenant_id=current_tenant_id) if not child_chunk: raise NotFound("Child chunk not found.") diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index ee8e1d105b..638ab528f3 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -17,7 +17,7 @@ from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account, Tenant, TenantAccountJoin, TenantStatus +from models import Account, Tenant, TenantAccountJoin, TenantStatus from models.dataset import Dataset, RateLimitLog from models.model import ApiToken, App, DefaultEndUserSessionID, EndUser from services.feature_service import FeatureService @@ -313,7 +313,7 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: str | None = Create or update session terminal based on user ID. """ if not user_id: - user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID.value + user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID with Session(db.engine, expire_on_commit=False) as session: end_user = ( @@ -332,7 +332,7 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: str | None = tenant_id=app_model.tenant_id, app_id=app_model.id, type="service_api", - is_anonymous=user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID.value, + is_anonymous=user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID, session_id=user_id, ) session.add(end_user) diff --git a/api/controllers/web/app.py b/api/controllers/web/app.py index 2bc068ec75..60193f5f15 100644 --- a/api/controllers/web/app.py +++ b/api/controllers/web/app.py @@ -4,12 +4,14 @@ from flask import request from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Unauthorized +from constants import HEADER_NAME_APP_CODE from controllers.common import fields from controllers.web import web_ns from controllers.web.error import AppUnavailableError from controllers.web.wraps import WebApiResource from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict from libs.passport import PassportService +from libs.token import extract_webapp_passport from models.model import App, AppMode from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -94,9 +96,11 @@ class AppAccessMode(Resource): } ) def get(self): - parser = reqparse.RequestParser() - parser.add_argument("appId", type=str, required=False, location="args") - parser.add_argument("appCode", type=str, required=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("appId", type=str, required=False, location="args") + .add_argument("appCode", type=str, required=False, location="args") + ) args = parser.parse_args() features = FeatureService.get_system_features() @@ -131,18 +135,19 @@ class AppWebAuthPermission(Resource): ) def get(self): user_id = "visitor" + app_code = request.headers.get(HEADER_NAME_APP_CODE) + app_id = request.args.get("appId") + if not app_id or not app_code: + raise ValueError("appId must be provided") + + require_permission_check = WebAppAuthService.is_app_require_permission_check(app_id=app_id) + if not require_permission_check: + return {"result": True} + try: - auth_header = request.headers.get("Authorization") - if auth_header is None: - raise Unauthorized("Authorization header is missing.") - if " " not in auth_header: - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - - auth_scheme, tk = auth_header.split(None, 1) - auth_scheme = auth_scheme.lower() - if auth_scheme != "bearer": - raise Unauthorized("Authorization scheme must be 'Bearer'") - + tk = extract_webapp_passport(app_code, request) + if not tk: + raise Unauthorized("Access token is missing.") decoded = PassportService().verify(tk) user_id = decoded.get("user_id", "visitor") except Unauthorized: @@ -155,14 +160,7 @@ class AppWebAuthPermission(Resource): if not features.webapp_auth.enabled: return {"result": True} - parser = reqparse.RequestParser() - parser.add_argument("appId", type=str, required=True, location="args") - args = parser.parse_args() - - app_id = args["appId"] - app_code = AppService.get_app_code_by_id(app_id) - res = True if WebAppAuthService.is_app_require_permission_check(app_id=app_id): - res = EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(str(user_id), app_code) + res = EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(str(user_id), app_id) return {"result": res} diff --git a/api/controllers/web/audio.py b/api/controllers/web/audio.py index c1c46891b6..3103851088 100644 --- a/api/controllers/web/audio.py +++ b/api/controllers/web/audio.py @@ -108,11 +108,13 @@ class TextApi(WebApiResource): def post(self, app_model: App, end_user): """Convert text to audio""" try: - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=str, required=False, location="json") - parser.add_argument("voice", type=str, location="json") - parser.add_argument("text", type=str, location="json") - parser.add_argument("streaming", type=bool, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=str, required=False, location="json") + .add_argument("voice", type=str, location="json") + .add_argument("text", type=str, location="json") + .add_argument("streaming", type=bool, location="json") + ) args = parser.parse_args() message_id = args.get("message_id", None) diff --git a/api/controllers/web/completion.py b/api/controllers/web/completion.py index 67ae970388..5e45beffc0 100644 --- a/api/controllers/web/completion.py +++ b/api/controllers/web/completion.py @@ -67,12 +67,14 @@ class CompletionApi(WebApiResource): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, location="json", default="") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="web_app", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, location="json", default="") + .add_argument("files", type=list, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("retriever_from", type=str, required=False, default="web_app", location="json") + ) args = parser.parse_args() @@ -166,14 +168,16 @@ class ChatApi(WebApiResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, required=True, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") - parser.add_argument("retriever_from", type=str, required=False, default="web_app", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, required=True, location="json") + .add_argument("files", type=list, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("conversation_id", type=uuid_value, location="json") + .add_argument("parent_message_id", type=uuid_value, required=False, location="json") + .add_argument("retriever_from", type=str, required=False, default="web_app", location="json") + ) args = parser.parse_args() diff --git a/api/controllers/web/conversation.py b/api/controllers/web/conversation.py index 03dd986aed..86e19423e5 100644 --- a/api/controllers/web/conversation.py +++ b/api/controllers/web/conversation.py @@ -60,17 +60,19 @@ class ConversationListApi(WebApiResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - parser.add_argument("pinned", type=str, choices=["true", "false", None], location="args") - parser.add_argument( - "sort_by", - type=str, - choices=["created_at", "-created_at", "updated_at", "-updated_at"], - required=False, - default="-updated_at", - location="args", + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + .add_argument("pinned", type=str, choices=["true", "false", None], location="args") + .add_argument( + "sort_by", + type=str, + choices=["created_at", "-created_at", "updated_at", "-updated_at"], + required=False, + default="-updated_at", + location="args", + ) ) args = parser.parse_args() @@ -161,9 +163,11 @@ class ConversationRenameApi(WebApiResource): conversation_id = str(c_id) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=False, location="json") - parser.add_argument("auto_generate", type=bool, required=False, default=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, location="json") + .add_argument("auto_generate", type=bool, required=False, default=False, location="json") + ) args = parser.parse_args() try: diff --git a/api/controllers/web/forgot_password.py b/api/controllers/web/forgot_password.py index c743d0f52b..b9e391e049 100644 --- a/api/controllers/web/forgot_password.py +++ b/api/controllers/web/forgot_password.py @@ -20,7 +20,7 @@ from controllers.web import web_ns from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password -from models.account import Account +from models import Account from services.account_service import AccountService @@ -40,9 +40,11 @@ class ForgotPasswordSendEmailApi(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -76,10 +78,12 @@ class ForgotPasswordCheckApi(Resource): responses={200: "Token is valid", 400: "Bad request - invalid token format", 401: "Invalid or expired token"} ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -127,10 +131,12 @@ class ForgotPasswordResetApi(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, nullable=False, location="json") - parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") - parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, nullable=False, location="json") + .add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") + .add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + ) args = parser.parse_args() # Validate passwords match diff --git a/api/controllers/web/login.py b/api/controllers/web/login.py index a489101cc9..244ef47982 100644 --- a/api/controllers/web/login.py +++ b/api/controllers/web/login.py @@ -1,7 +1,9 @@ +from flask import make_response, request from flask_restx import Resource, reqparse from jwt import InvalidTokenError import services +from configs import dify_config from controllers.console.auth.error import ( AuthenticationFailedError, EmailCodeError, @@ -10,9 +12,16 @@ from controllers.console.auth.error import ( from controllers.console.error import AccountBannedError from controllers.console.wraps import only_edition_enterprise, setup_required from controllers.web import web_ns +from controllers.web.wraps import decode_jwt_token from libs.helper import email +from libs.passport import PassportService from libs.password import valid_password +from libs.token import ( + clear_webapp_access_token_from_cookie, + extract_webapp_access_token, +) from services.account_service import AccountService +from services.app_service import AppService from services.webapp_auth_service import WebAppAuthService @@ -35,9 +44,11 @@ class LoginApi(Resource): ) def post(self): """Authenticate user and login.""" - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("password", type=valid_password, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("password", type=valid_password, required=True, location="json") + ) args = parser.parse_args() try: @@ -50,17 +61,75 @@ class LoginApi(Resource): raise AuthenticationFailedError() token = WebAppAuthService.login(account=account) - return {"result": "success", "data": {"access_token": token}} + response = make_response({"result": "success", "data": {"access_token": token}}) + # set_access_token_to_cookie(request, response, token, samesite="None", httponly=False) + return response -# class LogoutApi(Resource): -# @setup_required -# def get(self): -# account = cast(Account, flask_login.current_user) -# if isinstance(account, flask_login.AnonymousUserMixin): -# return {"result": "success"} -# flask_login.logout_user() -# return {"result": "success"} +# this api helps frontend to check whether user is authenticated +# TODO: remove in the future. frontend should redirect to login page by catching 401 status +@web_ns.route("/login/status") +class LoginStatusApi(Resource): + @setup_required + @web_ns.doc("web_app_login_status") + @web_ns.doc(description="Check login status") + @web_ns.doc( + responses={ + 200: "Login status", + 401: "Login status", + } + ) + def get(self): + app_code = request.args.get("app_code") + token = extract_webapp_access_token(request) + if not app_code: + return { + "logged_in": bool(token), + "app_logged_in": False, + } + app_id = AppService.get_app_id_by_code(app_code) + is_public = not dify_config.ENTERPRISE_ENABLED or not WebAppAuthService.is_app_require_permission_check( + app_id=app_id + ) + user_logged_in = False + + if is_public: + user_logged_in = True + else: + try: + PassportService().verify(token=token) + user_logged_in = True + except Exception: + user_logged_in = False + + try: + _ = decode_jwt_token(app_code=app_code) + app_logged_in = True + except Exception: + app_logged_in = False + + return { + "logged_in": user_logged_in, + "app_logged_in": app_logged_in, + } + + +@web_ns.route("/logout") +class LogoutApi(Resource): + @setup_required + @web_ns.doc("web_app_logout") + @web_ns.doc(description="Logout user from web application") + @web_ns.doc( + responses={ + 200: "Logout successful", + } + ) + def post(self): + response = make_response({"result": "success"}) + # enterprise SSO sets same site to None in https deployment + # so we need to logout by calling api + clear_webapp_access_token_from_cookie(response, samesite="None") + return response @web_ns.route("/email-code-login") @@ -77,9 +146,11 @@ class EmailCodeLoginSendEmailApi(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() if args["language"] is not None and args["language"] == "zh-Hans": @@ -92,7 +163,6 @@ class EmailCodeLoginSendEmailApi(Resource): raise AuthenticationFailedError() else: token = WebAppAuthService.send_email_code_login_email(account=account, language=language) - return {"result": "success", "data": token} @@ -111,10 +181,12 @@ class EmailCodeLoginApi(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -136,4 +208,6 @@ class EmailCodeLoginApi(Resource): token = WebAppAuthService.login(account=account) AccountService.reset_login_error_rate_limit(args["email"]) - return {"result": "success", "data": {"access_token": token}} + response = make_response({"result": "success", "data": {"access_token": token}}) + # set_access_token_to_cookie(request, response, token, samesite="None", httponly=False) + return response diff --git a/api/controllers/web/message.py b/api/controllers/web/message.py index a52cccac13..9f9aa4838c 100644 --- a/api/controllers/web/message.py +++ b/api/controllers/web/message.py @@ -93,10 +93,12 @@ class MessageListApi(WebApiResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", required=True, type=uuid_value, location="args") - parser.add_argument("first_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("conversation_id", required=True, type=uuid_value, location="args") + .add_argument("first_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() try: @@ -143,9 +145,11 @@ class MessageFeedbackApi(WebApiResource): def post(self, app_model, end_user, message_id): message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json") - parser.add_argument("content", type=str, location="json", default=None) + parser = ( + reqparse.RequestParser() + .add_argument("rating", type=str, choices=["like", "dislike", None], location="json") + .add_argument("content", type=str, location="json", default=None) + ) args = parser.parse_args() try: @@ -193,8 +197,7 @@ class MessageMoreLikeThisApi(WebApiResource): message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "response_mode", type=str, required=True, choices=["blocking", "streaming"], location="args" ) args = parser.parse_args() diff --git a/api/controllers/web/passport.py b/api/controllers/web/passport.py index 6f7105a724..6a2e0b65fb 100644 --- a/api/controllers/web/passport.py +++ b/api/controllers/web/passport.py @@ -1,18 +1,19 @@ import uuid from datetime import UTC, datetime, timedelta -from flask import request +from flask import make_response, request from flask_restx import Resource from sqlalchemy import func, select from werkzeug.exceptions import NotFound, Unauthorized from configs import dify_config +from constants import HEADER_NAME_APP_CODE from controllers.web import web_ns from controllers.web.error import WebAppAuthRequiredError from extensions.ext_database import db from libs.passport import PassportService +from libs.token import extract_webapp_access_token from models.model import App, EndUser, Site -from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService from services.webapp_auth_service import WebAppAuthService, WebAppAuthType @@ -32,25 +33,20 @@ class PassportResource(Resource): ) def get(self): system_features = FeatureService.get_system_features() - app_code = request.headers.get("X-App-Code") + app_code = request.headers.get(HEADER_NAME_APP_CODE) user_id = request.args.get("user_id") - web_app_access_token = request.args.get("web_app_access_token") - + access_token = extract_webapp_access_token(request) if app_code is None: raise Unauthorized("X-App-Code header is missing.") - - # exchange token for enterprise logined web user - enterprise_user_decoded = decode_enterprise_webapp_user_id(web_app_access_token) - if enterprise_user_decoded: - # a web user has already logged in, exchange a token for this app without redirecting to the login page - return exchange_token_for_existing_web_user( - app_code=app_code, enterprise_user_decoded=enterprise_user_decoded - ) - if system_features.webapp_auth.enabled: - app_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_code(app_code=app_code) - if not app_settings or not app_settings.access_mode == "public": - raise WebAppAuthRequiredError() + enterprise_user_decoded = decode_enterprise_webapp_user_id(access_token) + app_auth_type = WebAppAuthService.get_app_auth_type(app_code=app_code) + if app_auth_type != WebAppAuthType.PUBLIC: + if not enterprise_user_decoded: + raise WebAppAuthRequiredError() + return exchange_token_for_existing_web_user( + app_code=app_code, enterprise_user_decoded=enterprise_user_decoded, auth_type=app_auth_type + ) # get site from db and check if it is normal site = db.session.scalar(select(Site).where(Site.code == app_code, Site.status == "normal")) @@ -99,9 +95,12 @@ class PassportResource(Resource): tk = PassportService().issue(payload) - return { - "access_token": tk, - } + response = make_response( + { + "access_token": tk, + } + ) + return response def decode_enterprise_webapp_user_id(jwt_token: str | None): @@ -118,7 +117,7 @@ def decode_enterprise_webapp_user_id(jwt_token: str | None): return decoded -def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: dict): +def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: dict, auth_type: WebAppAuthType): """ Exchange a token for an existing web user session. """ @@ -126,6 +125,8 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: end_user_id = enterprise_user_decoded.get("end_user_id") session_id = enterprise_user_decoded.get("session_id") user_auth_type = enterprise_user_decoded.get("auth_type") + exchanged_token_expires_unix = enterprise_user_decoded.get("exp") + if not user_auth_type: raise Unauthorized("Missing auth_type in the token.") @@ -137,13 +138,11 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: if not app_model or app_model.status != "normal" or not app_model.enable_site: raise NotFound() - app_auth_type = WebAppAuthService.get_app_auth_type(app_code=app_code) - - if app_auth_type == WebAppAuthType.PUBLIC: + if auth_type == WebAppAuthType.PUBLIC: return _exchange_for_public_app_token(app_model, site, enterprise_user_decoded) - elif app_auth_type == WebAppAuthType.EXTERNAL and user_auth_type != "external": + elif auth_type == WebAppAuthType.EXTERNAL and user_auth_type != "external": raise WebAppAuthRequiredError("Please login as external user.") - elif app_auth_type == WebAppAuthType.INTERNAL and user_auth_type != "internal": + elif auth_type == WebAppAuthType.INTERNAL and user_auth_type != "internal": raise WebAppAuthRequiredError("Please login as internal user.") end_user = None @@ -169,8 +168,11 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: ) db.session.add(end_user) db.session.commit() - exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES) - exp = int(exp_dt.timestamp()) + + exp = int((datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)).timestamp()) + if exchanged_token_expires_unix: + exp = int(exchanged_token_expires_unix) + payload = { "iss": site.id, "sub": "Web API Passport", @@ -184,9 +186,12 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: "exp": exp, } token: str = PassportService().issue(payload) - return { - "access_token": token, - } + resp = make_response( + { + "access_token": token, + } + ) + return resp def _exchange_for_public_app_token(app_model, site, token_decoded): @@ -219,9 +224,12 @@ def _exchange_for_public_app_token(app_model, site, token_decoded): tk = PassportService().issue(payload) - return { - "access_token": tk, - } + resp = make_response( + { + "access_token": tk, + } + ) + return resp def generate_session_id(): diff --git a/api/controllers/web/remote_files.py b/api/controllers/web/remote_files.py index 0983e30b9d..dac4b3da94 100644 --- a/api/controllers/web/remote_files.py +++ b/api/controllers/web/remote_files.py @@ -97,8 +97,7 @@ class RemoteFileUploadApi(WebApiResource): FileTooLargeError: File exceeds size limit UnsupportedFileTypeError: File type not supported """ - parser = reqparse.RequestParser() - parser.add_argument("url", type=str, required=True, help="URL is required") + parser = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required") args = parser.parse_args() url = args["url"] diff --git a/api/controllers/web/saved_message.py b/api/controllers/web/saved_message.py index 96f09c8d3c..865f3610a7 100644 --- a/api/controllers/web/saved_message.py +++ b/api/controllers/web/saved_message.py @@ -63,9 +63,11 @@ class SavedMessageListApi(WebApiResource): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() return SavedMessageService.pagination_by_last_id(app_model, end_user, args["last_id"], args["limit"]) @@ -92,8 +94,7 @@ class SavedMessageListApi(WebApiResource): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=uuid_value, required=True, location="json") + parser = reqparse.RequestParser().add_argument("message_id", type=uuid_value, required=True, location="json") args = parser.parse_args() try: diff --git a/api/controllers/web/workflow.py b/api/controllers/web/workflow.py index 9a980148d9..3cbb07a296 100644 --- a/api/controllers/web/workflow.py +++ b/api/controllers/web/workflow.py @@ -58,9 +58,11 @@ class WorkflowRunApi(WebApiResource): if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("files", type=list, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("files", type=list, required=False, location="json") + ) args = parser.parse_args() try: diff --git a/api/controllers/web/wraps.py b/api/controllers/web/wraps.py index ba03c4eae4..9efd9f25d1 100644 --- a/api/controllers/web/wraps.py +++ b/api/controllers/web/wraps.py @@ -9,10 +9,13 @@ from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, NotFound, Unauthorized +from constants import HEADER_NAME_APP_CODE from controllers.web.error import WebAppAuthAccessDeniedError, WebAppAuthRequiredError from extensions.ext_database import db from libs.passport import PassportService +from libs.token import extract_webapp_passport from models.model import App, EndUser, Site +from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService, WebAppSettings from services.feature_service import FeatureService from services.webapp_auth_service import WebAppAuthService @@ -35,22 +38,14 @@ def validate_jwt_token(view: Callable[Concatenate[App, EndUser, P], R] | None = return decorator -def decode_jwt_token(): +def decode_jwt_token(app_code: str | None = None): system_features = FeatureService.get_system_features() - app_code = str(request.headers.get("X-App-Code")) + if not app_code: + app_code = str(request.headers.get(HEADER_NAME_APP_CODE)) try: - auth_header = request.headers.get("Authorization") - if auth_header is None: - raise Unauthorized("Authorization header is missing.") - - if " " not in auth_header: - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - - auth_scheme, tk = auth_header.split(None, 1) - auth_scheme = auth_scheme.lower() - - if auth_scheme != "bearer": - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") + tk = extract_webapp_passport(app_code, request) + if not tk: + raise Unauthorized("App token is missing.") decoded = PassportService().verify(tk) app_code = decoded.get("app_code") app_id = decoded.get("app_id") @@ -72,7 +67,8 @@ def decode_jwt_token(): app_web_auth_enabled = False webapp_settings = None if system_features.webapp_auth.enabled: - webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_code(app_code=app_code) + app_id = AppService.get_app_id_by_code(app_code) + webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id) if not webapp_settings: raise NotFound("Web app settings not found.") app_web_auth_enabled = webapp_settings.access_mode != "public" @@ -87,8 +83,9 @@ def decode_jwt_token(): if system_features.webapp_auth.enabled: if not app_code: raise Unauthorized("Please re-login to access the web app.") + app_id = AppService.get_app_id_by_code(app_code) app_web_auth_enabled = ( - EnterpriseService.WebAppAuth.get_app_access_mode_by_code(app_code=str(app_code)).access_mode != "public" + EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=app_id).access_mode != "public" ) if app_web_auth_enabled: raise WebAppAuthRequiredError() @@ -129,7 +126,8 @@ def _validate_user_accessibility( raise WebAppAuthRequiredError("Web app settings not found.") if WebAppAuthService.is_app_require_permission_check(access_mode=webapp_settings.access_mode): - if not EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(user_id, app_code=app_code): + app_id = AppService.get_app_id_by_code(app_code) + if not EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(user_id, app_id): raise WebAppAuthAccessDeniedError() auth_type = decoded.get("auth_type") diff --git a/api/core/app/app_config/easy_ui_based_app/agent/manager.py b/api/core/app/app_config/easy_ui_based_app/agent/manager.py index eab26e5af9..c1f336fdde 100644 --- a/api/core/app/app_config/easy_ui_based_app/agent/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/agent/manager.py @@ -40,7 +40,7 @@ class AgentConfigManager: "credential_id": tool.get("credential_id", None), } - agent_tools.append(AgentToolEntity(**agent_tool_properties)) + agent_tools.append(AgentToolEntity.model_validate(agent_tool_properties)) if "strategy" in config["agent_mode"] and config["agent_mode"]["strategy"] not in { "react_router", diff --git a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py index 4b824bde76..aacafb2dad 100644 --- a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py @@ -1,4 +1,5 @@ import uuid +from typing import Literal, cast from core.app.app_config.entities import ( DatasetEntity, @@ -74,6 +75,9 @@ class DatasetConfigManager: return None query_variable = config.get("dataset_query_variable") + metadata_model_config_dict = dataset_configs.get("metadata_model_config") + metadata_filtering_conditions_dict = dataset_configs.get("metadata_filtering_conditions") + if dataset_configs["retrieval_model"] == "single": return DatasetEntity( dataset_ids=dataset_ids, @@ -82,18 +86,23 @@ class DatasetConfigManager: retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of( dataset_configs["retrieval_model"] ), - metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"), - metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config")) - if dataset_configs.get("metadata_model_config") + metadata_filtering_mode=cast( + Literal["disabled", "automatic", "manual"], + dataset_configs.get("metadata_filtering_mode", "disabled"), + ), + metadata_model_config=ModelConfig(**metadata_model_config_dict) + if isinstance(metadata_model_config_dict, dict) else None, - metadata_filtering_conditions=MetadataFilteringCondition( - **dataset_configs.get("metadata_filtering_conditions", {}) - ) - if dataset_configs.get("metadata_filtering_conditions") + metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict) + if isinstance(metadata_filtering_conditions_dict, dict) else None, ), ) else: + score_threshold_val = dataset_configs.get("score_threshold") + reranking_model_val = dataset_configs.get("reranking_model") + weights_val = dataset_configs.get("weights") + return DatasetEntity( dataset_ids=dataset_ids, retrieve_config=DatasetRetrieveConfigEntity( @@ -101,22 +110,23 @@ class DatasetConfigManager: retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of( dataset_configs["retrieval_model"] ), - top_k=dataset_configs.get("top_k", 4), - score_threshold=dataset_configs.get("score_threshold") - if dataset_configs.get("score_threshold_enabled", False) + top_k=int(dataset_configs.get("top_k", 4)), + score_threshold=float(score_threshold_val) + if dataset_configs.get("score_threshold_enabled", False) and score_threshold_val is not None else None, - reranking_model=dataset_configs.get("reranking_model"), - weights=dataset_configs.get("weights"), - reranking_enabled=dataset_configs.get("reranking_enabled", True), + reranking_model=reranking_model_val if isinstance(reranking_model_val, dict) else None, + weights=weights_val if isinstance(weights_val, dict) else None, + reranking_enabled=bool(dataset_configs.get("reranking_enabled", True)), rerank_mode=dataset_configs.get("reranking_mode", "reranking_model"), - metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"), - metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config")) - if dataset_configs.get("metadata_model_config") + metadata_filtering_mode=cast( + Literal["disabled", "automatic", "manual"], + dataset_configs.get("metadata_filtering_mode", "disabled"), + ), + metadata_model_config=ModelConfig(**metadata_model_config_dict) + if isinstance(metadata_model_config_dict, dict) else None, - metadata_filtering_conditions=MetadataFilteringCondition( - **dataset_configs.get("metadata_filtering_conditions", {}) - ) - if dataset_configs.get("metadata_filtering_conditions") + metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict) + if isinstance(metadata_filtering_conditions_dict, dict) else None, ), ) @@ -134,18 +144,17 @@ class DatasetConfigManager: config = cls.extract_dataset_config_for_legacy_compatibility(tenant_id, app_mode, config) # dataset_configs - if not config.get("dataset_configs"): - config["dataset_configs"] = {"retrieval_model": "single"} + if "dataset_configs" not in config or not config.get("dataset_configs"): + config["dataset_configs"] = {} + config["dataset_configs"]["retrieval_model"] = config["dataset_configs"].get("retrieval_model", "single") if not isinstance(config["dataset_configs"], dict): raise ValueError("dataset_configs must be of object type") - if not config["dataset_configs"].get("datasets"): + if "datasets" not in config["dataset_configs"] or not config["dataset_configs"].get("datasets"): config["dataset_configs"]["datasets"] = {"strategy": "router", "datasets": []} - need_manual_query_datasets = config.get("dataset_configs") and config["dataset_configs"].get( - "datasets", {} - ).get("datasets") + need_manual_query_datasets = config.get("dataset_configs", {}).get("datasets", {}).get("datasets") if need_manual_query_datasets and app_mode == AppMode.COMPLETION: # Only check when mode is completion @@ -166,8 +175,8 @@ class DatasetConfigManager: :param config: app model config args """ # Extract dataset config for legacy compatibility - if not config.get("agent_mode"): - config["agent_mode"] = {"enabled": False, "tools": []} + if "agent_mode" not in config or not config.get("agent_mode"): + config["agent_mode"] = {} if not isinstance(config["agent_mode"], dict): raise ValueError("agent_mode must be of object type") @@ -180,19 +189,22 @@ class DatasetConfigManager: raise ValueError("enabled in agent_mode must be of boolean type") # tools - if not config["agent_mode"].get("tools"): + if "tools" not in config["agent_mode"] or not config["agent_mode"].get("tools"): config["agent_mode"]["tools"] = [] if not isinstance(config["agent_mode"]["tools"], list): raise ValueError("tools in agent_mode must be a list of objects") # strategy - if not config["agent_mode"].get("strategy"): - config["agent_mode"]["strategy"] = PlanningStrategy.ROUTER.value + if "strategy" not in config["agent_mode"] or not config["agent_mode"].get("strategy"): + config["agent_mode"]["strategy"] = PlanningStrategy.ROUTER has_datasets = False - if config["agent_mode"]["strategy"] in {PlanningStrategy.ROUTER.value, PlanningStrategy.REACT_ROUTER.value}: - for tool in config["agent_mode"]["tools"]: + if config.get("agent_mode", {}).get("strategy") in { + PlanningStrategy.ROUTER, + PlanningStrategy.REACT_ROUTER, + }: + for tool in config.get("agent_mode", {}).get("tools", []): key = list(tool.keys())[0] if key == "dataset": # old style, use tool name as key @@ -217,7 +229,7 @@ class DatasetConfigManager: has_datasets = True - need_manual_query_datasets = has_datasets and config["agent_mode"]["enabled"] + need_manual_query_datasets = has_datasets and config.get("agent_mode", {}).get("enabled") if need_manual_query_datasets and app_mode == AppMode.COMPLETION: # Only check when mode is completion diff --git a/api/core/app/app_config/easy_ui_based_app/model_config/converter.py b/api/core/app/app_config/easy_ui_based_app/model_config/converter.py index 5b5eefe315..b816c8d7d0 100644 --- a/api/core/app/app_config/easy_ui_based_app/model_config/converter.py +++ b/api/core/app/app_config/easy_ui_based_app/model_config/converter.py @@ -68,9 +68,13 @@ class ModelConfigConverter: # get model mode model_mode = model_config.mode if not model_mode: - model_mode = LLMMode.CHAT.value + model_mode = LLMMode.CHAT if model_schema and model_schema.model_properties.get(ModelPropertyKey.MODE): - model_mode = LLMMode(model_schema.model_properties[ModelPropertyKey.MODE]).value + try: + model_mode = LLMMode(model_schema.model_properties[ModelPropertyKey.MODE]) + except ValueError: + # Fall back to CHAT mode if the stored value is invalid + model_mode = LLMMode.CHAT if not model_schema: raise ValueError(f"Model {model_name} not exist.") diff --git a/api/core/app/app_config/easy_ui_based_app/prompt_template/manager.py b/api/core/app/app_config/easy_ui_based_app/prompt_template/manager.py index ec4f6074ab..21614c010c 100644 --- a/api/core/app/app_config/easy_ui_based_app/prompt_template/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/prompt_template/manager.py @@ -100,7 +100,7 @@ class PromptTemplateConfigManager: if config["model"]["mode"] not in model_mode_vals: raise ValueError(f"model.mode must be in {model_mode_vals} when prompt_type is advanced") - if app_mode == AppMode.CHAT and config["model"]["mode"] == ModelMode.COMPLETION.value: + if app_mode == AppMode.CHAT and config["model"]["mode"] == ModelMode.COMPLETION: user_prefix = config["completion_prompt_config"]["conversation_histories_role"]["user_prefix"] assistant_prefix = config["completion_prompt_config"]["conversation_histories_role"]["assistant_prefix"] @@ -110,7 +110,7 @@ class PromptTemplateConfigManager: if not assistant_prefix: config["completion_prompt_config"]["conversation_histories_role"]["assistant_prefix"] = "Assistant" - if config["model"]["mode"] == ModelMode.CHAT.value: + if config["model"]["mode"] == ModelMode.CHAT: prompt_list = config["chat_prompt_config"]["prompt"] if len(prompt_list) > 10: diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index b6234491c5..feb0d3358c 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -447,6 +447,8 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): "message_id": message.id, "context": context, "variable_loader": variable_loader, + "workflow_execution_repository": workflow_execution_repository, + "workflow_node_execution_repository": workflow_node_execution_repository, }, ) @@ -466,8 +468,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation=conversation, message=message, user=user, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, stream=stream, draft_var_saver_factory=self._get_draft_var_saver_factory(invoke_from, account=user), ) @@ -483,6 +483,8 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): message_id: str, context: contextvars.Context, variable_loader: VariableLoader, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, ): """ Generate worker in a new thread. @@ -538,6 +540,8 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): workflow=workflow, system_user_id=system_user_id, app=app, + workflow_execution_repository=workflow_execution_repository, + workflow_node_execution_repository=workflow_node_execution_repository, ) try: @@ -570,8 +574,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation: Conversation, message: Message, user: Union[Account, EndUser], - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, stream: bool = False, ) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]: @@ -584,7 +586,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): :param message: message :param user: account or end user :param stream: is stream - :param workflow_node_execution_repository: optional repository for workflow node execution :return: """ # init generate task pipeline @@ -596,8 +597,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): message=message, user=user, dialogue_count=self._dialogue_count, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, stream=stream, draft_var_saver_factory=draft_var_saver_factory, ) diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index 919b135ec9..587c663482 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -23,8 +23,12 @@ from core.app.features.annotation_reply.annotation_reply import AnnotationReplyF from core.moderation.base import ModerationError from core.moderation.input_moderation import InputModeration from core.variables.variables import VariableUnion -from core.workflow.entities import GraphRuntimeState, VariablePool +from core.workflow.enums import WorkflowType from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel +from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry @@ -55,6 +59,8 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): workflow: Workflow, system_user_id: str, app: App, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, ): super().__init__( queue_manager=queue_manager, @@ -68,11 +74,24 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): self._workflow = workflow self.system_user_id = system_user_id self._app = app + self._workflow_execution_repository = workflow_execution_repository + self._workflow_node_execution_repository = workflow_node_execution_repository def run(self): app_config = self.application_generate_entity.app_config app_config = cast(AdvancedChatAppConfig, app_config) + system_inputs = SystemVariable( + query=self.application_generate_entity.query, + files=self.application_generate_entity.files, + conversation_id=self.conversation.id, + user_id=self.system_user_id, + dialogue_count=self._dialogue_count, + app_id=app_config.app_id, + workflow_id=app_config.workflow_id, + workflow_execution_id=self.application_generate_entity.workflow_run_id, + ) + with Session(db.engine, expire_on_commit=False) as session: app_record = session.scalar(select(App).where(App.id == app_config.app_id)) @@ -89,7 +108,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): else: inputs = self.application_generate_entity.inputs query = self.application_generate_entity.query - files = self.application_generate_entity.files # moderation if self.handle_input_moderation( @@ -114,17 +132,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): conversation_variables = self._initialize_conversation_variables() # Create a variable pool. - system_inputs = SystemVariable( - query=query, - files=files, - conversation_id=self.conversation.id, - user_id=self.system_user_id, - dialogue_count=self._dialogue_count, - app_id=app_config.app_id, - workflow_id=app_config.workflow_id, - workflow_execution_id=self.application_generate_entity.workflow_run_id, - ) - # init variable pool variable_pool = VariablePool( system_variables=system_inputs, @@ -172,6 +179,23 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): command_channel=command_channel, ) + self._queue_manager.graph_runtime_state = graph_runtime_state + + persistence_layer = WorkflowPersistenceLayer( + application_generate_entity=self.application_generate_entity, + workflow_info=PersistenceWorkflowInfo( + workflow_id=self._workflow.id, + workflow_type=WorkflowType(self._workflow.type), + version=self._workflow.version, + graph_data=self._workflow.graph_dict, + ), + workflow_execution_repository=self._workflow_execution_repository, + workflow_node_execution_repository=self._workflow_node_execution_repository, + trace_manager=self.application_generate_entity.trace_manager, + ) + + workflow_entry.graph_engine.layer(persistence_layer) + generator = workflow_entry.run() for event in generator: diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index e021b0aca7..8c0102d9bd 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -11,6 +11,7 @@ from sqlalchemy.orm import Session from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom +from core.app.apps.common.graph_runtime_state_support import GraphRuntimeStateSupport from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter from core.app.entities.app_invoke_entities import ( AdvancedChatAppGenerateEntity, @@ -60,25 +61,21 @@ from core.app.task_pipeline.message_cycle_manager import MessageCycleManager from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk from core.model_runtime.entities.llm_entities import LLMUsage from core.ops.ops_trace_manager import TraceQueueManager -from core.workflow.entities import GraphRuntimeState -from core.workflow.enums import WorkflowExecutionStatus, WorkflowType +from core.workflow.enums import WorkflowExecutionStatus from core.workflow.nodes import NodeType from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory -from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository -from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState from core.workflow.system_variable import SystemVariable -from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from models import Conversation, EndUser, Message, MessageFile -from models.account import Account +from models import Account, Conversation, EndUser, Message, MessageFile from models.enums import CreatorUserRole from models.workflow import Workflow logger = logging.getLogger(__name__) -class AdvancedChatAppGenerateTaskPipeline: +class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): """ AdvancedChatAppGenerateTaskPipeline is a class that generate stream output and state management for Application. """ @@ -93,8 +90,6 @@ class AdvancedChatAppGenerateTaskPipeline: user: Union[Account, EndUser], stream: bool, dialogue_count: int, - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, ): self._base_task_pipeline = BasedGenerateTaskPipeline( @@ -114,31 +109,20 @@ class AdvancedChatAppGenerateTaskPipeline: else: raise NotImplementedError(f"User type not supported: {type(user)}") - self._workflow_cycle_manager = WorkflowCycleManager( - application_generate_entity=application_generate_entity, - workflow_system_variables=SystemVariable( - query=message.query, - files=application_generate_entity.files, - conversation_id=conversation.id, - user_id=user_session_id, - dialogue_count=dialogue_count, - app_id=application_generate_entity.app_config.app_id, - workflow_id=workflow.id, - workflow_execution_id=application_generate_entity.workflow_run_id, - ), - workflow_info=CycleManagerWorkflowInfo( - workflow_id=workflow.id, - workflow_type=WorkflowType(workflow.type), - version=workflow.version, - graph_data=workflow.graph_dict, - ), - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, + self._workflow_system_variables = SystemVariable( + query=message.query, + files=application_generate_entity.files, + conversation_id=conversation.id, + user_id=user_session_id, + dialogue_count=dialogue_count, + app_id=application_generate_entity.app_config.app_id, + workflow_id=workflow.id, + workflow_execution_id=application_generate_entity.workflow_run_id, ) - self._workflow_response_converter = WorkflowResponseConverter( application_generate_entity=application_generate_entity, user=user, + system_variables=self._workflow_system_variables, ) self._task_state = WorkflowTaskState() @@ -157,6 +141,8 @@ class AdvancedChatAppGenerateTaskPipeline: self._recorded_files: list[Mapping[str, Any]] = [] self._workflow_run_id: str = "" self._draft_var_saver_factory = draft_var_saver_factory + self._graph_runtime_state: GraphRuntimeState | None = None + self._seed_graph_runtime_state_from_queue_manager() def process(self) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]: """ @@ -289,12 +275,6 @@ class AdvancedChatAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState: - """Fluent validation for graph runtime state.""" - if not graph_runtime_state: - raise ValueError("graph runtime state not initialized.") - return graph_runtime_state - def _handle_ping_event(self, event: QueuePingEvent, **kwargs) -> Generator[PingStreamResponse, None, None]: """Handle ping events.""" yield self._base_task_pipeline.ping_stream_response() @@ -305,21 +285,28 @@ class AdvancedChatAppGenerateTaskPipeline: err = self._base_task_pipeline.handle_error(event=event, session=session, message_id=self._message_id) yield self._base_task_pipeline.error_to_stream_response(err) - def _handle_workflow_started_event(self, *args, **kwargs) -> Generator[StreamResponse, None, None]: + def _handle_workflow_started_event( + self, + event: QueueWorkflowStartedEvent, + **kwargs, + ) -> Generator[StreamResponse, None, None]: """Handle workflow started events.""" - with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start() - self._workflow_run_id = workflow_execution.id_ + runtime_state = self._resolve_graph_runtime_state() + run_id = self._extract_workflow_run_id(runtime_state) + self._workflow_run_id = run_id + with self._database_session() as session: message = self._get_message(session=session) if not message: raise ValueError(f"Message not found: {self._message_id}") - message.workflow_run_id = workflow_execution.id_ - workflow_start_resp = self._workflow_response_converter.workflow_start_to_stream_response( - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + message.workflow_run_id = run_id + + workflow_start_resp = self._workflow_response_converter.workflow_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_run_id=run_id, + workflow_id=self._workflow_id, + ) yield workflow_start_resp @@ -327,13 +314,9 @@ class AdvancedChatAppGenerateTaskPipeline: """Handle node retry events.""" self._ensure_workflow_initialized() - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried( - workflow_execution_id=self._workflow_run_id, event=event - ) node_retry_resp = self._workflow_response_converter.workflow_node_retry_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if node_retry_resp: @@ -345,14 +328,9 @@ class AdvancedChatAppGenerateTaskPipeline: """Handle node started events.""" self._ensure_workflow_initialized() - workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start( - workflow_execution_id=self._workflow_run_id, event=event - ) - node_start_resp = self._workflow_response_converter.workflow_node_start_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if node_start_resp: @@ -368,14 +346,12 @@ class AdvancedChatAppGenerateTaskPipeline: self._workflow_response_converter.fetch_files_from_node_outputs(event.outputs or {}) ) - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(event=event) node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) - self._save_output_for_event(event, workflow_node_execution.id) + self._save_output_for_event(event, event.node_execution_id) if node_finish_resp: yield node_finish_resp @@ -386,16 +362,13 @@ class AdvancedChatAppGenerateTaskPipeline: **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle various node failure events.""" - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed(event=event) - node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if isinstance(event, QueueNodeExceptionEvent): - self._save_output_for_event(event, workflow_node_execution.id) + self._save_output_for_event(event, event.node_execution_id) if node_finish_resp: yield node_finish_resp @@ -505,29 +478,19 @@ class AdvancedChatAppGenerateTaskPipeline: self, event: QueueWorkflowSucceededEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow succeeded events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) - - with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - outputs=event.outputs, - conversation_id=self._conversation_id, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + validated_state = self._ensure_graph_runtime_initialized() + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow_id, + status=WorkflowExecutionStatus.SUCCEEDED, + graph_runtime_state=validated_state, + ) yield workflow_finish_resp self._base_task_pipeline.queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE) @@ -536,30 +499,20 @@ class AdvancedChatAppGenerateTaskPipeline: self, event: QueueWorkflowPartialSuccessEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow partial success events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) - - with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - outputs=event.outputs, - exceptions_count=event.exceptions_count, - conversation_id=self._conversation_id, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + validated_state = self._ensure_graph_runtime_initialized() + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow_id, + status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED, + graph_runtime_state=validated_state, + exceptions_count=event.exceptions_count, + ) yield workflow_finish_resp self._base_task_pipeline.queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE) @@ -568,32 +521,25 @@ class AdvancedChatAppGenerateTaskPipeline: self, event: QueueWorkflowFailedEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow failed events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) + validated_state = self._ensure_graph_runtime_initialized() + + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow_id, + status=WorkflowExecutionStatus.FAILED, + graph_runtime_state=validated_state, + error=event.error, + exceptions_count=event.exceptions_count, + ) with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - status=WorkflowExecutionStatus.FAILED, - error_message=event.error, - conversation_id=self._conversation_id, - trace_manager=trace_manager, - exceptions_count=event.exceptions_count, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) - err_event = QueueErrorEvent(error=ValueError(f"Run failed: {workflow_execution.error_message}")) + err_event = QueueErrorEvent(error=ValueError(f"Run failed: {event.error}")) err = self._base_task_pipeline.handle_error(event=err_event, session=session, message_id=self._message_id) yield workflow_finish_resp @@ -608,25 +554,23 @@ class AdvancedChatAppGenerateTaskPipeline: **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle stop events.""" - if self._workflow_run_id and graph_runtime_state: + _ = trace_manager + resolved_state = None + if self._workflow_run_id: + resolved_state = self._resolve_graph_runtime_state(graph_runtime_state) + + if self._workflow_run_id and resolved_state: + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow_id, + status=WorkflowExecutionStatus.STOPPED, + graph_runtime_state=resolved_state, + error=event.get_stop_reason(), + ) + with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( - workflow_run_id=self._workflow_run_id, - total_tokens=graph_runtime_state.total_tokens, - total_steps=graph_runtime_state.node_run_steps, - status=WorkflowExecutionStatus.STOPPED, - error_message=event.get_stop_reason(), - conversation_id=self._conversation_id, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) # Save message - self._save_message(session=session, graph_runtime_state=graph_runtime_state) + self._save_message(session=session, graph_runtime_state=resolved_state) yield workflow_finish_resp elif event.stopped_by in ( @@ -648,7 +592,7 @@ class AdvancedChatAppGenerateTaskPipeline: **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle advanced chat message end events.""" - self._ensure_graph_runtime_initialized(graph_runtime_state) + resolved_state = self._ensure_graph_runtime_initialized(graph_runtime_state) output_moderation_answer = self._base_task_pipeline.handle_output_moderation_when_task_finished( self._task_state.answer @@ -662,7 +606,7 @@ class AdvancedChatAppGenerateTaskPipeline: # Save message with self._database_session() as session: - self._save_message(session=session, graph_runtime_state=graph_runtime_state) + self._save_message(session=session, graph_runtime_state=resolved_state) yield self._message_end_to_stream_response() @@ -671,10 +615,6 @@ class AdvancedChatAppGenerateTaskPipeline: ) -> Generator[StreamResponse, None, None]: """Handle retriever resources events.""" self._message_cycle_manager.handle_retriever_resources(event) - - with self._database_session() as session: - message = self._get_message(session=session) - message.message_metadata = self._task_state.metadata.model_dump_json() return yield # Make this a generator @@ -683,10 +623,6 @@ class AdvancedChatAppGenerateTaskPipeline: ) -> Generator[StreamResponse, None, None]: """Handle annotation reply events.""" self._message_cycle_manager.handle_annotation_reply(event) - - with self._database_session() as session: - message = self._get_message(session=session) - message.message_metadata = self._task_state.metadata.model_dump_json() return yield # Make this a generator @@ -740,7 +676,6 @@ class AdvancedChatAppGenerateTaskPipeline: self, event: Any, *, - graph_runtime_state: GraphRuntimeState | None = None, tts_publisher: AppGeneratorTTSPublisher | None = None, trace_manager: TraceQueueManager | None = None, queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, @@ -753,7 +688,6 @@ class AdvancedChatAppGenerateTaskPipeline: if handler := handlers.get(event_type): yield from handler( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -770,7 +704,6 @@ class AdvancedChatAppGenerateTaskPipeline: ): yield from self._handle_node_failed_events( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -789,15 +722,12 @@ class AdvancedChatAppGenerateTaskPipeline: Process stream response using elegant Fluent Python patterns. Maintains exact same functionality as original 57-if-statement version. """ - # Initialize graph runtime state - graph_runtime_state: GraphRuntimeState | None = None - for queue_message in self._base_task_pipeline.queue_manager.listen(): event = queue_message.event match event: case QueueWorkflowStartedEvent(): - graph_runtime_state = event.graph_runtime_state + self._resolve_graph_runtime_state() yield from self._handle_workflow_started_event(event) case QueueErrorEvent(): @@ -805,15 +735,11 @@ class AdvancedChatAppGenerateTaskPipeline: break case QueueWorkflowFailedEvent(): - yield from self._handle_workflow_failed_event( - event, graph_runtime_state=graph_runtime_state, trace_manager=trace_manager - ) + yield from self._handle_workflow_failed_event(event, trace_manager=trace_manager) break case QueueStopEvent(): - yield from self._handle_stop_event( - event, graph_runtime_state=graph_runtime_state, trace_manager=trace_manager - ) + yield from self._handle_stop_event(event, graph_runtime_state=None, trace_manager=trace_manager) break # Handle all other events through elegant dispatch @@ -821,7 +747,6 @@ class AdvancedChatAppGenerateTaskPipeline: if responses := list( self._dispatch_event( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -879,6 +804,12 @@ class AdvancedChatAppGenerateTaskPipeline: else: self._task_state.metadata.usage = LLMUsage.empty_usage() + def _seed_graph_runtime_state_from_queue_manager(self) -> None: + """Bootstrap the cached runtime state from the queue manager when present.""" + candidate = self._base_task_pipeline.queue_manager.graph_runtime_state + if candidate is not None: + self._graph_runtime_state = candidate + def _message_end_to_stream_response(self) -> MessageEndStreamResponse: """ Message end to stream response. diff --git a/api/core/app/apps/agent_chat/app_config_manager.py b/api/core/app/apps/agent_chat/app_config_manager.py index 9ce841f432..801619ddbc 100644 --- a/api/core/app/apps/agent_chat/app_config_manager.py +++ b/api/core/app/apps/agent_chat/app_config_manager.py @@ -186,7 +186,7 @@ class AgentChatAppConfigManager(BaseAppConfigManager): raise ValueError("enabled in agent_mode must be of boolean type") if not agent_mode.get("strategy"): - agent_mode["strategy"] = PlanningStrategy.ROUTER.value + agent_mode["strategy"] = PlanningStrategy.ROUTER if agent_mode["strategy"] not in [member.value for member in list(PlanningStrategy.__members__.values())]: raise ValueError("strategy in agent_mode must be in the specified strategy list") diff --git a/api/core/app/apps/agent_chat/app_generator.py b/api/core/app/apps/agent_chat/app_generator.py index c6d98374c1..7bd3b8a56e 100644 --- a/api/core/app/apps/agent_chat/app_generator.py +++ b/api/core/app/apps/agent_chat/app_generator.py @@ -211,8 +211,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): user=user, stream=streaming, ) - # FIXME: Type hinting issue here, ignore it for now, will fix it later - return AgentChatAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from) # type: ignore + return AgentChatAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from) def _generate_worker( self, diff --git a/api/core/app/apps/agent_chat/app_runner.py b/api/core/app/apps/agent_chat/app_runner.py index 388bed5255..759398b556 100644 --- a/api/core/app/apps/agent_chat/app_runner.py +++ b/api/core/app/apps/agent_chat/app_runner.py @@ -198,9 +198,9 @@ class AgentChatAppRunner(AppRunner): # start agent runner if agent_entity.strategy == AgentEntity.Strategy.CHAIN_OF_THOUGHT: # check LLM mode - if model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.CHAT.value: + if model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.CHAT: runner_cls = CotChatAgentRunner - elif model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.COMPLETION.value: + elif model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.COMPLETION: runner_cls = CotCompletionAgentRunner else: raise ValueError(f"Invalid LLM mode: {model_schema.model_properties.get(ModelPropertyKey.MODE)}") diff --git a/api/core/app/apps/base_app_queue_manager.py b/api/core/app/apps/base_app_queue_manager.py index fdba952eeb..698eee9894 100644 --- a/api/core/app/apps/base_app_queue_manager.py +++ b/api/core/app/apps/base_app_queue_manager.py @@ -1,9 +1,13 @@ +import logging import queue +import threading import time from abc import abstractmethod from enum import IntEnum, auto from typing import Any +from cachetools import TTLCache, cachedmethod +from redis.exceptions import RedisError from sqlalchemy.orm import DeclarativeMeta from configs import dify_config @@ -16,8 +20,11 @@ from core.app.entities.queue_entities import ( QueueStopEvent, WorkflowQueueMessage, ) +from core.workflow.runtime import GraphRuntimeState from extensions.ext_redis import redis_client +logger = logging.getLogger(__name__) + class PublishFrom(IntEnum): APPLICATION_MANAGER = auto() @@ -35,13 +42,15 @@ class AppQueueManager: self.invoke_from = invoke_from # Public accessor for invoke_from user_prefix = "account" if self._invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end-user" - redis_client.setex( - AppQueueManager._generate_task_belong_cache_key(self._task_id), 1800, f"{user_prefix}-{self._user_id}" - ) + self._task_belong_cache_key = AppQueueManager._generate_task_belong_cache_key(self._task_id) + redis_client.setex(self._task_belong_cache_key, 1800, f"{user_prefix}-{self._user_id}") q: queue.Queue[WorkflowQueueMessage | MessageQueueMessage | None] = queue.Queue() self._q = q + self._graph_runtime_state: GraphRuntimeState | None = None + self._stopped_cache: TTLCache[tuple, bool] = TTLCache(maxsize=1, ttl=1) + self._cache_lock = threading.Lock() def listen(self): """ @@ -79,9 +88,21 @@ class AppQueueManager: Stop listen to queue :return: """ + self._clear_task_belong_cache() self._q.put(None) - def publish_error(self, e, pub_from: PublishFrom): + def _clear_task_belong_cache(self) -> None: + """ + Remove the task belong cache key once listening is finished. + """ + try: + redis_client.delete(self._task_belong_cache_key) + except RedisError: + logger.exception( + "Failed to clear task belong cache for task %s (key: %s)", self._task_id, self._task_belong_cache_key + ) + + def publish_error(self, e, pub_from: PublishFrom) -> None: """ Publish error :param e: error @@ -90,6 +111,16 @@ class AppQueueManager: """ self.publish(QueueErrorEvent(error=e), pub_from) + @property + def graph_runtime_state(self) -> GraphRuntimeState | None: + """Retrieve the attached graph runtime state, if available.""" + return self._graph_runtime_state + + @graph_runtime_state.setter + def graph_runtime_state(self, graph_runtime_state: GraphRuntimeState | None) -> None: + """Attach the live graph runtime state reference for downstream consumers.""" + self._graph_runtime_state = graph_runtime_state + def publish(self, event: AppQueueEvent, pub_from: PublishFrom): """ Publish event to queue @@ -142,6 +173,7 @@ class AppQueueManager: stopped_cache_key = cls._generate_stopped_cache_key(task_id) redis_client.setex(stopped_cache_key, 600, 1) + @cachedmethod(lambda self: self._stopped_cache, lock=lambda self: self._cache_lock) def _is_stopped(self) -> bool: """ Check if task is stopped diff --git a/api/core/app/apps/base_app_runner.py b/api/core/app/apps/base_app_runner.py index e7db3bc41b..61ac040c05 100644 --- a/api/core/app/apps/base_app_runner.py +++ b/api/core/app/apps/base_app_runner.py @@ -61,9 +61,6 @@ class AppRunner: if model_context_tokens is None: return -1 - if max_tokens is None: - max_tokens = 0 - prompt_tokens = model_instance.get_llm_num_tokens(prompt_messages) if prompt_tokens + max_tokens > model_context_tokens: diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index 8bd956b314..c1251d2feb 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -23,7 +23,7 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.ops.ops_trace_manager import TraceQueueManager from extensions.ext_database import db from factories import file_factory -from models.account import Account +from models import Account from models.model import App, EndUser from services.conversation_service import ConversationService diff --git a/api/core/app/apps/common/graph_runtime_state_support.py b/api/core/app/apps/common/graph_runtime_state_support.py new file mode 100644 index 0000000000..0b03149665 --- /dev/null +++ b/api/core/app/apps/common/graph_runtime_state_support.py @@ -0,0 +1,55 @@ +"""Shared helpers for managing GraphRuntimeState across task pipelines.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from core.workflow.runtime import GraphRuntimeState + +if TYPE_CHECKING: + from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline + + +class GraphRuntimeStateSupport: + """ + Mixin that centralises common GraphRuntimeState access patterns used by task pipelines. + + Subclasses are expected to provide: + * `_base_task_pipeline` – exposing the queue manager with an optional cached runtime state. + * `_graph_runtime_state` attribute used as the local cache for the runtime state. + """ + + _base_task_pipeline: BasedGenerateTaskPipeline + _graph_runtime_state: GraphRuntimeState | None = None + + def _ensure_graph_runtime_initialized( + self, + graph_runtime_state: GraphRuntimeState | None = None, + ) -> GraphRuntimeState: + """Validate and return the active graph runtime state.""" + return self._resolve_graph_runtime_state(graph_runtime_state) + + def _extract_workflow_run_id(self, graph_runtime_state: GraphRuntimeState) -> str: + system_variables = graph_runtime_state.variable_pool.system_variables + if not system_variables or not system_variables.workflow_execution_id: + raise ValueError("workflow_execution_id missing from runtime state") + return str(system_variables.workflow_execution_id) + + def _resolve_graph_runtime_state( + self, + graph_runtime_state: GraphRuntimeState | None = None, + ) -> GraphRuntimeState: + """Return the cached runtime state or bootstrap it from the queue manager.""" + if graph_runtime_state is not None: + self._graph_runtime_state = graph_runtime_state + return graph_runtime_state + + if self._graph_runtime_state is None: + candidate = self._base_task_pipeline.queue_manager.graph_runtime_state + if candidate is not None: + self._graph_runtime_state = candidate + + if self._graph_runtime_state is None: + raise ValueError("graph runtime state not initialized.") + + return self._graph_runtime_state diff --git a/api/core/app/apps/common/workflow_response_converter.py b/api/core/app/apps/common/workflow_response_converter.py index 7c7a4fd6ac..eebaaaff80 100644 --- a/api/core/app/apps/common/workflow_response_converter.py +++ b/api/core/app/apps/common/workflow_response_converter.py @@ -1,11 +1,10 @@ import time from collections.abc import Mapping, Sequence -from datetime import UTC, datetime -from typing import Any, Union +from dataclasses import dataclass +from datetime import datetime +from typing import Any, NewType, Union -from sqlalchemy.orm import Session - -from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity +from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.queue_entities import ( QueueAgentLogEvent, QueueIterationCompletedEvent, @@ -39,54 +38,196 @@ from core.plugin.impl.datasource import PluginDatasourceManager from core.tools.entities.tool_entities import ToolProviderType from core.tools.tool_manager import ToolManager from core.variables.segments import ArrayFileSegment, FileSegment, Segment -from core.workflow.entities import WorkflowExecution, WorkflowNodeExecution -from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus +from core.workflow.enums import ( + NodeType, + SystemVariableKey, + WorkflowExecutionStatus, + WorkflowNodeExecutionMetadataKey, + WorkflowNodeExecutionStatus, +) +from core.workflow.runtime import GraphRuntimeState +from core.workflow.system_variable import SystemVariable +from core.workflow.workflow_entry import WorkflowEntry from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter from libs.datetime_utils import naive_utc_now -from models import ( - Account, - EndUser, -) -from services.variable_truncator import VariableTruncator +from models import Account, EndUser +from services.variable_truncator import BaseTruncator, DummyVariableTruncator, VariableTruncator + +NodeExecutionId = NewType("NodeExecutionId", str) + + +@dataclass(slots=True) +class _NodeSnapshot: + """In-memory cache for node metadata between start and completion events.""" + + title: str + index: int + start_at: datetime + iteration_id: str = "" + """Empty string means the node is not executing inside an iteration.""" + loop_id: str = "" + """Empty string means the node is not executing inside a loop.""" class WorkflowResponseConverter: + _truncator: BaseTruncator + def __init__( self, *, application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity], user: Union[Account, EndUser], + system_variables: SystemVariable, ): self._application_generate_entity = application_generate_entity self._user = user - self._truncator = VariableTruncator.default() + self._system_variables = system_variables + self._workflow_inputs = self._prepare_workflow_inputs() + + # Disable truncation for SERVICE_API calls to keep backward compatibility. + if application_generate_entity.invoke_from == InvokeFrom.SERVICE_API: + self._truncator = DummyVariableTruncator() + else: + self._truncator = VariableTruncator.default() + + self._node_snapshots: dict[NodeExecutionId, _NodeSnapshot] = {} + self._workflow_execution_id: str | None = None + self._workflow_started_at: datetime | None = None + + # ------------------------------------------------------------------ + # Workflow lifecycle helpers + # ------------------------------------------------------------------ + def _prepare_workflow_inputs(self) -> Mapping[str, Any]: + inputs = dict(self._application_generate_entity.inputs) + for field_name, value in self._system_variables.to_dict().items(): + # TODO(@future-refactor): store system variables separately from user inputs so we don't + # need to flatten `sys.*` entries into the input payload just for rerun/export tooling. + if field_name == SystemVariableKey.CONVERSATION_ID: + # Conversation IDs are session-scoped; omitting them keeps workflow inputs + # reusable without pinning new runs to a prior conversation. + continue + inputs[f"sys.{field_name}"] = value + handled = WorkflowEntry.handle_special_values(inputs) + return dict(handled or {}) + + def _ensure_workflow_run_id(self, workflow_run_id: str | None = None) -> str: + """Return the memoized workflow run id, optionally seeding it during start events.""" + if workflow_run_id is not None: + self._workflow_execution_id = workflow_run_id + if not self._workflow_execution_id: + raise ValueError("workflow_run_id missing before streaming workflow events") + return self._workflow_execution_id + + # ------------------------------------------------------------------ + # Node snapshot helpers + # ------------------------------------------------------------------ + def _store_snapshot(self, event: QueueNodeStartedEvent) -> _NodeSnapshot: + snapshot = _NodeSnapshot( + title=event.node_title, + index=event.node_run_index, + start_at=event.start_at, + iteration_id=event.in_iteration_id or "", + loop_id=event.in_loop_id or "", + ) + node_execution_id = NodeExecutionId(event.node_execution_id) + self._node_snapshots[node_execution_id] = snapshot + return snapshot + + def _get_snapshot(self, node_execution_id: str) -> _NodeSnapshot | None: + return self._node_snapshots.get(NodeExecutionId(node_execution_id)) + + def _pop_snapshot(self, node_execution_id: str) -> _NodeSnapshot | None: + return self._node_snapshots.pop(NodeExecutionId(node_execution_id), None) + + @staticmethod + def _merge_metadata( + base_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None, + snapshot: _NodeSnapshot | None, + ) -> Mapping[WorkflowNodeExecutionMetadataKey, Any] | None: + if not base_metadata and not snapshot: + return base_metadata + + merged: dict[WorkflowNodeExecutionMetadataKey, Any] = {} + if base_metadata: + merged.update(base_metadata) + + if snapshot: + if snapshot.iteration_id: + merged[WorkflowNodeExecutionMetadataKey.ITERATION_ID] = snapshot.iteration_id + if snapshot.loop_id: + merged[WorkflowNodeExecutionMetadataKey.LOOP_ID] = snapshot.loop_id + + return merged or None + + def _truncate_mapping( + self, + mapping: Mapping[str, Any] | None, + ) -> tuple[Mapping[str, Any] | None, bool]: + if mapping is None: + return None, False + if not mapping: + return {}, False + + normalized = WorkflowEntry.handle_special_values(dict(mapping)) + if normalized is None: + return None, False + + truncated, is_truncated = self._truncator.truncate_variable_mapping(dict(normalized)) + return truncated, is_truncated + + @staticmethod + def _encode_outputs(outputs: Mapping[str, Any] | None) -> Mapping[str, Any] | None: + if outputs is None: + return None + converter = WorkflowRuntimeTypeConverter() + return converter.to_json_encodable(outputs) def workflow_start_to_stream_response( self, *, task_id: str, - workflow_execution: WorkflowExecution, + workflow_run_id: str, + workflow_id: str, ) -> WorkflowStartStreamResponse: + run_id = self._ensure_workflow_run_id(workflow_run_id) + started_at = naive_utc_now() + self._workflow_started_at = started_at + return WorkflowStartStreamResponse( task_id=task_id, - workflow_run_id=workflow_execution.id_, + workflow_run_id=run_id, data=WorkflowStartStreamResponse.Data( - id=workflow_execution.id_, - workflow_id=workflow_execution.workflow_id, - inputs=workflow_execution.inputs, - created_at=int(workflow_execution.started_at.timestamp()), + id=run_id, + workflow_id=workflow_id, + inputs=self._workflow_inputs, + created_at=int(started_at.timestamp()), ), ) def workflow_finish_to_stream_response( self, *, - session: Session, task_id: str, - workflow_execution: WorkflowExecution, + workflow_id: str, + status: WorkflowExecutionStatus, + graph_runtime_state: GraphRuntimeState, + error: str | None = None, + exceptions_count: int = 0, ) -> WorkflowFinishStreamResponse: - created_by = None + run_id = self._ensure_workflow_run_id() + started_at = self._workflow_started_at + if started_at is None: + raise ValueError( + "workflow_finish_to_stream_response called before workflow_start_to_stream_response", + ) + finished_at = naive_utc_now() + elapsed_time = (finished_at - started_at).total_seconds() + + outputs_mapping = graph_runtime_state.outputs or {} + encoded_outputs = WorkflowRuntimeTypeConverter().to_json_encodable(outputs_mapping) + + created_by: Mapping[str, object] | None user = self._user if isinstance(user, Account): created_by = { @@ -94,38 +235,29 @@ class WorkflowResponseConverter: "name": user.name, "email": user.email, } - elif isinstance(user, EndUser): + else: created_by = { "id": user.id, "user": user.session_id, } - else: - raise NotImplementedError(f"User type not supported: {type(user)}") - - # Handle the case where finished_at is None by using current time as default - finished_at_timestamp = ( - int(workflow_execution.finished_at.timestamp()) - if workflow_execution.finished_at - else int(datetime.now(UTC).timestamp()) - ) return WorkflowFinishStreamResponse( task_id=task_id, - workflow_run_id=workflow_execution.id_, + workflow_run_id=run_id, data=WorkflowFinishStreamResponse.Data( - id=workflow_execution.id_, - workflow_id=workflow_execution.workflow_id, - status=workflow_execution.status, - outputs=WorkflowRuntimeTypeConverter().to_json_encodable(workflow_execution.outputs), - error=workflow_execution.error_message, - elapsed_time=workflow_execution.elapsed_time, - total_tokens=workflow_execution.total_tokens, - total_steps=workflow_execution.total_steps, + id=run_id, + workflow_id=workflow_id, + status=status.value, + outputs=encoded_outputs, + error=error, + elapsed_time=elapsed_time, + total_tokens=graph_runtime_state.total_tokens, + total_steps=graph_runtime_state.node_run_steps, created_by=created_by, - created_at=int(workflow_execution.started_at.timestamp()), - finished_at=finished_at_timestamp, - files=self.fetch_files_from_node_outputs(workflow_execution.outputs), - exceptions_count=workflow_execution.exceptions_count, + created_at=int(started_at.timestamp()), + finished_at=int(finished_at.timestamp()), + files=self.fetch_files_from_node_outputs(outputs_mapping), + exceptions_count=exceptions_count, ), ) @@ -134,38 +266,28 @@ class WorkflowResponseConverter: *, event: QueueNodeStartedEvent, task_id: str, - workflow_node_execution: WorkflowNodeExecution, ) -> NodeStartStreamResponse | None: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_execution_id: + if event.node_type in {NodeType.ITERATION, NodeType.LOOP}: return None + run_id = self._ensure_workflow_run_id() + snapshot = self._store_snapshot(event) response = NodeStartStreamResponse( task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_execution_id, + workflow_run_id=run_id, data=NodeStartStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - title=workflow_node_execution.title, - index=workflow_node_execution.index, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.get_response_inputs(), - inputs_truncated=workflow_node_execution.inputs_truncated, - created_at=int(workflow_node_execution.created_at.timestamp()), - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, + id=event.node_execution_id, + node_id=event.node_id, + node_type=event.node_type, + title=snapshot.title, + index=snapshot.index, + created_at=int(snapshot.start_at.timestamp()), iteration_id=event.in_iteration_id, loop_id=event.in_loop_id, - parallel_run_id=event.parallel_mode_run_id, agent_strategy=event.agent_strategy, ), ) - # extras logic if event.node_type == NodeType.TOOL: response.data.extras["icon"] = ToolManager.get_tool_icon( tenant_id=self._application_generate_entity.app_config.tenant_id, @@ -189,41 +311,54 @@ class WorkflowResponseConverter: *, event: QueueNodeSucceededEvent | QueueNodeFailedEvent | QueueNodeExceptionEvent, task_id: str, - workflow_node_execution: WorkflowNodeExecution, ) -> NodeFinishStreamResponse | None: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_execution_id: - return None - if not workflow_node_execution.finished_at: + if event.node_type in {NodeType.ITERATION, NodeType.LOOP}: return None + run_id = self._ensure_workflow_run_id() + snapshot = self._pop_snapshot(event.node_execution_id) - json_converter = WorkflowRuntimeTypeConverter() + start_at = snapshot.start_at if snapshot else event.start_at + finished_at = naive_utc_now() + elapsed_time = (finished_at - start_at).total_seconds() + + inputs, inputs_truncated = self._truncate_mapping(event.inputs) + process_data, process_data_truncated = self._truncate_mapping(event.process_data) + encoded_outputs = self._encode_outputs(event.outputs) + outputs, outputs_truncated = self._truncate_mapping(encoded_outputs) + metadata = self._merge_metadata(event.execution_metadata, snapshot) + + if isinstance(event, QueueNodeSucceededEvent): + status = WorkflowNodeExecutionStatus.SUCCEEDED.value + error_message = event.error + elif isinstance(event, QueueNodeFailedEvent): + status = WorkflowNodeExecutionStatus.FAILED.value + error_message = event.error + else: + status = WorkflowNodeExecutionStatus.EXCEPTION.value + error_message = event.error return NodeFinishStreamResponse( task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_execution_id, + workflow_run_id=run_id, data=NodeFinishStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - index=workflow_node_execution.index, - title=workflow_node_execution.title, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.get_response_inputs(), - inputs_truncated=workflow_node_execution.inputs_truncated, - process_data=workflow_node_execution.get_response_process_data(), - process_data_truncated=workflow_node_execution.process_data_truncated, - outputs=json_converter.to_json_encodable(workflow_node_execution.get_response_outputs()), - outputs_truncated=workflow_node_execution.outputs_truncated, - status=workflow_node_execution.status, - error=workflow_node_execution.error, - elapsed_time=workflow_node_execution.elapsed_time, - execution_metadata=workflow_node_execution.metadata, - created_at=int(workflow_node_execution.created_at.timestamp()), - finished_at=int(workflow_node_execution.finished_at.timestamp()), - files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}), - parallel_id=event.parallel_id, + id=event.node_execution_id, + node_id=event.node_id, + node_type=event.node_type, + index=snapshot.index if snapshot else 0, + title=snapshot.title if snapshot else "", + inputs=inputs, + inputs_truncated=inputs_truncated, + process_data=process_data, + process_data_truncated=process_data_truncated, + outputs=outputs, + outputs_truncated=outputs_truncated, + status=status, + error=error_message, + elapsed_time=elapsed_time, + execution_metadata=metadata, + created_at=int(start_at.timestamp()), + finished_at=int(finished_at.timestamp()), + files=self.fetch_files_from_node_outputs(event.outputs or {}), iteration_id=event.in_iteration_id, loop_id=event.in_loop_id, ), @@ -234,44 +369,45 @@ class WorkflowResponseConverter: *, event: QueueNodeRetryEvent, task_id: str, - workflow_node_execution: WorkflowNodeExecution, - ) -> Union[NodeRetryStreamResponse, NodeFinishStreamResponse] | None: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_execution_id: - return None - if not workflow_node_execution.finished_at: + ) -> NodeRetryStreamResponse | None: + if event.node_type in {NodeType.ITERATION, NodeType.LOOP}: return None + run_id = self._ensure_workflow_run_id() - json_converter = WorkflowRuntimeTypeConverter() + snapshot = self._get_snapshot(event.node_execution_id) + if snapshot is None: + raise AssertionError("node retry event arrived without a stored snapshot") + finished_at = naive_utc_now() + elapsed_time = (finished_at - event.start_at).total_seconds() + + inputs, inputs_truncated = self._truncate_mapping(event.inputs) + process_data, process_data_truncated = self._truncate_mapping(event.process_data) + encoded_outputs = self._encode_outputs(event.outputs) + outputs, outputs_truncated = self._truncate_mapping(encoded_outputs) + metadata = self._merge_metadata(event.execution_metadata, snapshot) return NodeRetryStreamResponse( task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_execution_id, + workflow_run_id=run_id, data=NodeRetryStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - index=workflow_node_execution.index, - title=workflow_node_execution.title, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.get_response_inputs(), - inputs_truncated=workflow_node_execution.inputs_truncated, - process_data=workflow_node_execution.get_response_process_data(), - process_data_truncated=workflow_node_execution.process_data_truncated, - outputs=json_converter.to_json_encodable(workflow_node_execution.get_response_outputs()), - outputs_truncated=workflow_node_execution.outputs_truncated, - status=workflow_node_execution.status, - error=workflow_node_execution.error, - elapsed_time=workflow_node_execution.elapsed_time, - execution_metadata=workflow_node_execution.metadata, - created_at=int(workflow_node_execution.created_at.timestamp()), - finished_at=int(workflow_node_execution.finished_at.timestamp()), - files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}), - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, + id=event.node_execution_id, + node_id=event.node_id, + node_type=event.node_type, + index=snapshot.index, + title=snapshot.title, + inputs=inputs, + inputs_truncated=inputs_truncated, + process_data=process_data, + process_data_truncated=process_data_truncated, + outputs=outputs, + outputs_truncated=outputs_truncated, + status=WorkflowNodeExecutionStatus.RETRY.value, + error=event.error, + elapsed_time=elapsed_time, + execution_metadata=metadata, + created_at=int(snapshot.start_at.timestamp()), + finished_at=int(finished_at.timestamp()), + files=self.fetch_files_from_node_outputs(event.outputs or {}), iteration_id=event.in_iteration_id, loop_id=event.in_loop_id, retry_index=event.retry_index, @@ -379,8 +515,6 @@ class WorkflowResponseConverter: inputs=new_inputs, inputs_truncated=truncated, metadata=event.metadata or {}, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, ), ) @@ -405,9 +539,6 @@ class WorkflowResponseConverter: pre_loop_output={}, created_at=int(time.time()), extras={}, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parallel_mode_run_id=event.parallel_mode_run_id, ), ) @@ -446,8 +577,6 @@ class WorkflowResponseConverter: execution_metadata=event.metadata, finished_at=int(time.time()), steps=event.steps, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, ), ) diff --git a/api/core/app/apps/completion/generate_response_converter.py b/api/core/app/apps/completion/generate_response_converter.py index d7e9ebdf24..a4f574642d 100644 --- a/api/core/app/apps/completion/generate_response_converter.py +++ b/api/core/app/apps/completion/generate_response_converter.py @@ -112,7 +112,7 @@ class CompletionAppGenerateResponseConverter(AppGenerateResponseConverter): metadata = {} sub_stream_response_dict["metadata"] = cls._get_simple_metadata(metadata) response_chunk.update(sub_stream_response_dict) - if isinstance(sub_stream_response, ErrorStreamResponse): + elif isinstance(sub_stream_response, ErrorStreamResponse): data = cls._error_to_stream_response(sub_stream_response.err) response_chunk.update(data) else: diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index 170c6a274b..7a51b8f3a5 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -207,6 +207,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): from_source=from_source, from_end_user_id=end_user_id, from_account_id=account_id, + app_mode=app_config.app_mode, ) db.session.add(message) diff --git a/api/core/app/apps/pipeline/pipeline_generator.py b/api/core/app/apps/pipeline/pipeline_generator.py index bd077c4cb8..f8bfbce37a 100644 --- a/api/core/app/apps/pipeline/pipeline_generator.py +++ b/api/core/app/apps/pipeline/pipeline_generator.py @@ -255,7 +255,7 @@ class PipelineGenerator(BaseAppGenerator): json_text = json.dumps(text) upload_file = FileService(db.engine).upload_text(json_text, name, user.id, dataset.tenant_id) features = FeatureService.get_features(dataset.tenant_id) - if features.billing.subscription.plan == "sandbox": + if features.billing.enabled and features.billing.subscription.plan == "sandbox": tenant_pipeline_task_key = f"tenant_pipeline_task:{dataset.tenant_id}" tenant_self_pipeline_task_queue = f"tenant_self_pipeline_task_queue:{dataset.tenant_id}" @@ -352,6 +352,8 @@ class PipelineGenerator(BaseAppGenerator): "application_generate_entity": application_generate_entity, "workflow_thread_pool_id": workflow_thread_pool_id, "variable_loader": variable_loader, + "workflow_execution_repository": workflow_execution_repository, + "workflow_node_execution_repository": workflow_node_execution_repository, }, ) @@ -367,8 +369,6 @@ class PipelineGenerator(BaseAppGenerator): workflow=workflow, queue_manager=queue_manager, user=user, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, stream=streaming, draft_var_saver_factory=draft_var_saver_factory, ) @@ -573,6 +573,8 @@ class PipelineGenerator(BaseAppGenerator): queue_manager: AppQueueManager, context: contextvars.Context, variable_loader: VariableLoader, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, workflow_thread_pool_id: str | None = None, ) -> None: """ @@ -620,6 +622,8 @@ class PipelineGenerator(BaseAppGenerator): variable_loader=variable_loader, workflow=workflow, system_user_id=system_user_id, + workflow_execution_repository=workflow_execution_repository, + workflow_node_execution_repository=workflow_node_execution_repository, ) runner.run() @@ -648,8 +652,6 @@ class PipelineGenerator(BaseAppGenerator): workflow: Workflow, queue_manager: AppQueueManager, user: Union[Account, EndUser], - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, stream: bool = False, ) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: @@ -660,7 +662,6 @@ class PipelineGenerator(BaseAppGenerator): :param queue_manager: queue manager :param user: account or end user :param stream: is stream - :param workflow_node_execution_repository: optional repository for workflow node execution :return: """ # init generate task pipeline @@ -670,8 +671,6 @@ class PipelineGenerator(BaseAppGenerator): queue_manager=queue_manager, user=user, stream=stream, - workflow_node_execution_repository=workflow_node_execution_repository, - workflow_execution_repository=workflow_execution_repository, draft_var_saver_factory=draft_var_saver_factory, ) diff --git a/api/core/app/apps/pipeline/pipeline_runner.py b/api/core/app/apps/pipeline/pipeline_runner.py index 145f629c4d..4be9e01fbf 100644 --- a/api/core/app/apps/pipeline/pipeline_runner.py +++ b/api/core/app/apps/pipeline/pipeline_runner.py @@ -11,11 +11,14 @@ from core.app.entities.app_invoke_entities import ( ) from core.variables.variables import RAGPipelineVariable, RAGPipelineVariableInput from core.workflow.entities.graph_init_params import GraphInitParams -from core.workflow.entities.graph_runtime_state import GraphRuntimeState -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.enums import WorkflowType from core.workflow.graph import Graph +from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer from core.workflow.graph_events import GraphEngineEvent, GraphRunFailedEvent from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry @@ -40,6 +43,8 @@ class PipelineRunner(WorkflowBasedAppRunner): variable_loader: VariableLoader, workflow: Workflow, system_user_id: str, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, workflow_thread_pool_id: str | None = None, ) -> None: """ @@ -56,6 +61,8 @@ class PipelineRunner(WorkflowBasedAppRunner): self.workflow_thread_pool_id = workflow_thread_pool_id self._workflow = workflow self._sys_user_id = system_user_id + self._workflow_execution_repository = workflow_execution_repository + self._workflow_node_execution_repository = workflow_node_execution_repository def _get_app_id(self) -> str: return self.application_generate_entity.app_config.app_id @@ -116,7 +123,7 @@ class PipelineRunner(WorkflowBasedAppRunner): rag_pipeline_variables = [] if workflow.rag_pipeline_variables: for v in workflow.rag_pipeline_variables: - rag_pipeline_variable = RAGPipelineVariable(**v) + rag_pipeline_variable = RAGPipelineVariable.model_validate(v) if ( rag_pipeline_variable.belong_to_node_id in (self.application_generate_entity.start_node_id, "shared") @@ -163,6 +170,23 @@ class PipelineRunner(WorkflowBasedAppRunner): variable_pool=variable_pool, ) + self._queue_manager.graph_runtime_state = graph_runtime_state + + persistence_layer = WorkflowPersistenceLayer( + application_generate_entity=self.application_generate_entity, + workflow_info=PersistenceWorkflowInfo( + workflow_id=workflow.id, + workflow_type=WorkflowType(workflow.type), + version=workflow.version, + graph_data=workflow.graph_dict, + ), + workflow_execution_repository=self._workflow_execution_repository, + workflow_node_execution_repository=self._workflow_node_execution_repository, + trace_manager=self.application_generate_entity.trace_manager, + ) + + workflow_entry.graph_engine.layer(persistence_layer) + generator = workflow_entry.run() for event in generator: @@ -229,8 +253,8 @@ class PipelineRunner(WorkflowBasedAppRunner): workflow_id=workflow.id, graph_config=graph_config, user_id=self.application_generate_entity.user_id, - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 45d047434b..f22ef5431e 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -231,6 +231,8 @@ class WorkflowAppGenerator(BaseAppGenerator): "queue_manager": queue_manager, "context": context, "variable_loader": variable_loader, + "workflow_execution_repository": workflow_execution_repository, + "workflow_node_execution_repository": workflow_node_execution_repository, }, ) @@ -244,8 +246,6 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow=workflow, queue_manager=queue_manager, user=user, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, draft_var_saver_factory=draft_var_saver_factory, stream=streaming, ) @@ -424,6 +424,8 @@ class WorkflowAppGenerator(BaseAppGenerator): queue_manager: AppQueueManager, context: contextvars.Context, variable_loader: VariableLoader, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, ) -> None: """ Generate worker in a new thread. @@ -465,6 +467,8 @@ class WorkflowAppGenerator(BaseAppGenerator): variable_loader=variable_loader, workflow=workflow, system_user_id=system_user_id, + workflow_execution_repository=workflow_execution_repository, + workflow_node_execution_repository=workflow_node_execution_repository, ) try: @@ -493,8 +497,6 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow: Workflow, queue_manager: AppQueueManager, user: Union[Account, EndUser], - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, stream: bool = False, ) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: @@ -514,8 +516,6 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow=workflow, queue_manager=queue_manager, user=user, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, draft_var_saver_factory=draft_var_saver_factory, stream=stream, ) diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index 943ae8ab4e..3c9bf176b5 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -5,12 +5,13 @@ from typing import cast from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfig from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner -from core.app.entities.app_invoke_entities import ( - InvokeFrom, - WorkflowAppGenerateEntity, -) -from core.workflow.entities import GraphRuntimeState, VariablePool +from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity +from core.workflow.enums import WorkflowType from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel +from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry @@ -34,6 +35,8 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): variable_loader: VariableLoader, workflow: Workflow, system_user_id: str, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, ): super().__init__( queue_manager=queue_manager, @@ -43,6 +46,8 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): self.application_generate_entity = application_generate_entity self._workflow = workflow self._sys_user_id = system_user_id + self._workflow_execution_repository = workflow_execution_repository + self._workflow_node_execution_repository = workflow_node_execution_repository def run(self): """ @@ -51,6 +56,14 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): app_config = self.application_generate_entity.app_config app_config = cast(WorkflowAppConfig, app_config) + system_inputs = SystemVariable( + files=self.application_generate_entity.files, + user_id=self._sys_user_id, + app_id=app_config.app_id, + workflow_id=app_config.workflow_id, + workflow_execution_id=self.application_generate_entity.workflow_execution_id, + ) + # if only single iteration or single loop run is requested if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: graph, variable_pool, graph_runtime_state = self._prepare_single_node_execution( @@ -60,18 +73,9 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): ) else: inputs = self.application_generate_entity.inputs - files = self.application_generate_entity.files # Create a variable pool. - system_inputs = SystemVariable( - files=files, - user_id=self._sys_user_id, - app_id=app_config.app_id, - workflow_id=app_config.workflow_id, - workflow_execution_id=self.application_generate_entity.workflow_execution_id, - ) - variable_pool = VariablePool( system_variables=system_inputs, user_inputs=inputs, @@ -96,6 +100,8 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): channel_key = f"workflow:{task_id}:commands" command_channel = RedisChannel(redis_client, channel_key) + self._queue_manager.graph_runtime_state = graph_runtime_state + workflow_entry = WorkflowEntry( tenant_id=self._workflow.tenant_id, app_id=self._workflow.app_id, @@ -115,6 +121,21 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): command_channel=command_channel, ) + persistence_layer = WorkflowPersistenceLayer( + application_generate_entity=self.application_generate_entity, + workflow_info=PersistenceWorkflowInfo( + workflow_id=self._workflow.id, + workflow_type=WorkflowType(self._workflow.type), + version=self._workflow.version, + graph_data=self._workflow.graph_dict, + ), + workflow_execution_repository=self._workflow_execution_repository, + workflow_node_execution_repository=self._workflow_node_execution_repository, + trace_manager=self.application_generate_entity.trace_manager, + ) + + workflow_entry.graph_engine.layer(persistence_layer) + generator = workflow_entry.run() for event in generator: diff --git a/api/core/app/apps/workflow/generate_response_converter.py b/api/core/app/apps/workflow/generate_response_converter.py index 01ecf0298f..c64f44a603 100644 --- a/api/core/app/apps/workflow/generate_response_converter.py +++ b/api/core/app/apps/workflow/generate_response_converter.py @@ -89,7 +89,7 @@ class WorkflowAppGenerateResponseConverter(AppGenerateResponseConverter): data = cls._error_to_stream_response(sub_stream_response.err) response_chunk.update(data) elif isinstance(sub_stream_response, NodeStartStreamResponse | NodeFinishStreamResponse): - response_chunk.update(sub_stream_response.to_ignore_detail_dict()) # ty: ignore [unresolved-attribute] + response_chunk.update(sub_stream_response.to_ignore_detail_dict()) else: response_chunk.update(sub_stream_response.model_dump(mode="json")) yield response_chunk diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 56b0d91141..08e2fce48c 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -8,11 +8,9 @@ from sqlalchemy.orm import Session from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME from core.app.apps.base_app_queue_manager import AppQueueManager +from core.app.apps.common.graph_runtime_state_support import GraphRuntimeStateSupport from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter -from core.app.entities.app_invoke_entities import ( - InvokeFrom, - WorkflowAppGenerateEntity, -) +from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.queue_entities import ( AppQueueEvent, MessageQueueMessage, @@ -53,27 +51,20 @@ from core.app.entities.task_entities import ( from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk from core.ops.ops_trace_manager import TraceQueueManager -from core.workflow.entities import GraphRuntimeState, WorkflowExecution -from core.workflow.enums import WorkflowExecutionStatus, WorkflowType +from core.workflow.enums import WorkflowExecutionStatus from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory -from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository -from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState from core.workflow.system_variable import SystemVariable -from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager from extensions.ext_database import db -from models.account import Account +from models import Account from models.enums import CreatorUserRole from models.model import EndUser -from models.workflow import ( - Workflow, - WorkflowAppLog, - WorkflowAppLogCreatedFrom, -) +from models.workflow import Workflow, WorkflowAppLog, WorkflowAppLogCreatedFrom logger = logging.getLogger(__name__) -class WorkflowAppGenerateTaskPipeline: +class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport): """ WorkflowAppGenerateTaskPipeline is a class that generate stream output and state management for Application. """ @@ -85,8 +76,6 @@ class WorkflowAppGenerateTaskPipeline: queue_manager: AppQueueManager, user: Union[Account, EndUser], stream: bool, - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, ): self._base_task_pipeline = BasedGenerateTaskPipeline( @@ -99,42 +88,30 @@ class WorkflowAppGenerateTaskPipeline: self._user_id = user.id user_session_id = user.session_id self._created_by_role = CreatorUserRole.END_USER - elif isinstance(user, Account): + else: self._user_id = user.id user_session_id = user.id self._created_by_role = CreatorUserRole.ACCOUNT - else: - raise ValueError(f"Invalid user type: {type(user)}") - - self._workflow_cycle_manager = WorkflowCycleManager( - application_generate_entity=application_generate_entity, - workflow_system_variables=SystemVariable( - files=application_generate_entity.files, - user_id=user_session_id, - app_id=application_generate_entity.app_config.app_id, - workflow_id=workflow.id, - workflow_execution_id=application_generate_entity.workflow_execution_id, - ), - workflow_info=CycleManagerWorkflowInfo( - workflow_id=workflow.id, - workflow_type=WorkflowType(workflow.type), - version=workflow.version, - graph_data=workflow.graph_dict, - ), - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, - ) - - self._workflow_response_converter = WorkflowResponseConverter( - application_generate_entity=application_generate_entity, - user=user, - ) self._application_generate_entity = application_generate_entity self._workflow_features_dict = workflow.features_dict - self._workflow_run_id = "" + self._workflow_execution_id = "" self._invoke_from = queue_manager.invoke_from self._draft_var_saver_factory = draft_var_saver_factory + self._workflow = workflow + self._workflow_system_variables = SystemVariable( + files=application_generate_entity.files, + user_id=user_session_id, + app_id=application_generate_entity.app_config.app_id, + workflow_id=workflow.id, + workflow_execution_id=application_generate_entity.workflow_execution_id, + ) + self._workflow_response_converter = WorkflowResponseConverter( + application_generate_entity=application_generate_entity, + user=user, + system_variables=self._workflow_system_variables, + ) + self._graph_runtime_state: GraphRuntimeState | None = self._base_task_pipeline.queue_manager.graph_runtime_state def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: """ @@ -261,15 +238,9 @@ class WorkflowAppGenerateTaskPipeline: def _ensure_workflow_initialized(self): """Fluent validation for workflow state.""" - if not self._workflow_run_id: + if not self._workflow_execution_id: raise ValueError("workflow run not initialized.") - def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState: - """Fluent validation for graph runtime state.""" - if not graph_runtime_state: - raise ValueError("graph runtime state not initialized.") - return graph_runtime_state - def _handle_ping_event(self, event: QueuePingEvent, **kwargs) -> Generator[PingStreamResponse, None, None]: """Handle ping events.""" yield self._base_task_pipeline.ping_stream_response() @@ -283,12 +254,14 @@ class WorkflowAppGenerateTaskPipeline: self, event: QueueWorkflowStartedEvent, **kwargs ) -> Generator[StreamResponse, None, None]: """Handle workflow started events.""" - # init workflow run - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start() - self._workflow_run_id = workflow_execution.id_ + runtime_state = self._resolve_graph_runtime_state() + + run_id = self._extract_workflow_run_id(runtime_state) + self._workflow_execution_id = run_id start_resp = self._workflow_response_converter.workflow_start_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, + workflow_run_id=run_id, + workflow_id=self._workflow.id, ) yield start_resp @@ -296,14 +269,9 @@ class WorkflowAppGenerateTaskPipeline: """Handle node retry events.""" self._ensure_workflow_initialized() - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried( - workflow_execution_id=self._workflow_run_id, - event=event, - ) response = self._workflow_response_converter.workflow_node_retry_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if response: @@ -315,13 +283,9 @@ class WorkflowAppGenerateTaskPipeline: """Handle node started events.""" self._ensure_workflow_initialized() - workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start( - workflow_execution_id=self._workflow_run_id, event=event - ) node_start_response = self._workflow_response_converter.workflow_node_start_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if node_start_response: @@ -331,14 +295,12 @@ class WorkflowAppGenerateTaskPipeline: self, event: QueueNodeSucceededEvent, **kwargs ) -> Generator[StreamResponse, None, None]: """Handle node succeeded events.""" - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(event=event) node_success_response = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) - self._save_output_for_event(event, workflow_node_execution.id) + self._save_output_for_event(event, event.node_execution_id) if node_success_response: yield node_success_response @@ -349,17 +311,13 @@ class WorkflowAppGenerateTaskPipeline: **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle various node failure events.""" - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed( - event=event, - ) node_failed_response = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if isinstance(event, QueueNodeExceptionEvent): - self._save_output_for_event(event, workflow_node_execution.id) + self._save_output_for_event(event, event.node_execution_id) if node_failed_response: yield node_failed_response @@ -372,7 +330,7 @@ class WorkflowAppGenerateTaskPipeline: iter_start_resp = self._workflow_response_converter.workflow_iteration_start_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield iter_start_resp @@ -385,7 +343,7 @@ class WorkflowAppGenerateTaskPipeline: iter_next_resp = self._workflow_response_converter.workflow_iteration_next_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield iter_next_resp @@ -398,7 +356,7 @@ class WorkflowAppGenerateTaskPipeline: iter_finish_resp = self._workflow_response_converter.workflow_iteration_completed_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield iter_finish_resp @@ -409,7 +367,7 @@ class WorkflowAppGenerateTaskPipeline: loop_start_resp = self._workflow_response_converter.workflow_loop_start_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield loop_start_resp @@ -420,7 +378,7 @@ class WorkflowAppGenerateTaskPipeline: loop_next_resp = self._workflow_response_converter.workflow_loop_next_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield loop_next_resp @@ -433,7 +391,7 @@ class WorkflowAppGenerateTaskPipeline: loop_finish_resp = self._workflow_response_converter.workflow_loop_completed_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield loop_finish_resp @@ -442,33 +400,22 @@ class WorkflowAppGenerateTaskPipeline: self, event: QueueWorkflowSucceededEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow succeeded events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) + validated_state = self._ensure_graph_runtime_initialized() + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow.id, + status=WorkflowExecutionStatus.SUCCEEDED, + graph_runtime_state=validated_state, + ) with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - outputs=event.outputs, - conversation_id=None, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - - # save workflow app log - self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + self._save_workflow_app_log(session=session, workflow_run_id=self._workflow_execution_id) yield workflow_finish_resp @@ -476,34 +423,23 @@ class WorkflowAppGenerateTaskPipeline: self, event: QueueWorkflowPartialSuccessEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow partial success events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) + validated_state = self._ensure_graph_runtime_initialized() + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow.id, + status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED, + graph_runtime_state=validated_state, + exceptions_count=event.exceptions_count, + ) with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - outputs=event.outputs, - exceptions_count=event.exceptions_count, - conversation_id=None, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - - # save workflow app log - self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + self._save_workflow_app_log(session=session, workflow_run_id=self._workflow_execution_id) yield workflow_finish_resp @@ -511,37 +447,33 @@ class WorkflowAppGenerateTaskPipeline: self, event: Union[QueueWorkflowFailedEvent, QueueStopEvent], *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow failed and stop events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) + validated_state = self._ensure_graph_runtime_initialized() + + if isinstance(event, QueueWorkflowFailedEvent): + status = WorkflowExecutionStatus.FAILED + error = event.error + exceptions_count = event.exceptions_count + else: + status = WorkflowExecutionStatus.STOPPED + error = event.get_stop_reason() + exceptions_count = 0 + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow.id, + status=status, + graph_runtime_state=validated_state, + error=error, + exceptions_count=exceptions_count, + ) with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - status=WorkflowExecutionStatus.FAILED - if isinstance(event, QueueWorkflowFailedEvent) - else WorkflowExecutionStatus.STOPPED, - error_message=event.error if isinstance(event, QueueWorkflowFailedEvent) else event.get_stop_reason(), - conversation_id=None, - trace_manager=trace_manager, - exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - - # save workflow app log - self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + self._save_workflow_app_log(session=session, workflow_run_id=self._workflow_execution_id) yield workflow_finish_resp @@ -601,7 +533,6 @@ class WorkflowAppGenerateTaskPipeline: self, event: AppQueueEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, tts_publisher: AppGeneratorTTSPublisher | None = None, trace_manager: TraceQueueManager | None = None, queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, @@ -614,7 +545,6 @@ class WorkflowAppGenerateTaskPipeline: if handler := handlers.get(event_type): yield from handler( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -631,7 +561,6 @@ class WorkflowAppGenerateTaskPipeline: ): yield from self._handle_node_failed_events( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -642,7 +571,6 @@ class WorkflowAppGenerateTaskPipeline: if isinstance(event, (QueueWorkflowFailedEvent, QueueStopEvent)): yield from self._handle_workflow_failed_and_stop_events( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -661,15 +589,12 @@ class WorkflowAppGenerateTaskPipeline: Process stream response using elegant Fluent Python patterns. Maintains exact same functionality as original 44-if-statement version. """ - # Initialize graph runtime state - graph_runtime_state = None - for queue_message in self._base_task_pipeline.queue_manager.listen(): event = queue_message.event match event: case QueueWorkflowStartedEvent(): - graph_runtime_state = event.graph_runtime_state + self._resolve_graph_runtime_state() yield from self._handle_workflow_started_event(event) case QueueTextChunkEvent(): @@ -681,12 +606,19 @@ class WorkflowAppGenerateTaskPipeline: yield from self._handle_error_event(event) break + case QueueWorkflowFailedEvent(): + yield from self._handle_workflow_failed_and_stop_events(event) + break + + case QueueStopEvent(): + yield from self._handle_workflow_failed_and_stop_events(event) + break + # Handle all other events through elegant dispatch case _: if responses := list( self._dispatch_event( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -697,7 +629,7 @@ class WorkflowAppGenerateTaskPipeline: if tts_publisher: tts_publisher.publish(None) - def _save_workflow_app_log(self, *, session: Session, workflow_execution: WorkflowExecution): + def _save_workflow_app_log(self, *, session: Session, workflow_run_id: str | None): invoke_from = self._application_generate_entity.invoke_from if invoke_from == InvokeFrom.SERVICE_API: created_from = WorkflowAppLogCreatedFrom.SERVICE_API @@ -709,11 +641,14 @@ class WorkflowAppGenerateTaskPipeline: # not save log for debugging return + if not workflow_run_id: + return + workflow_app_log = WorkflowAppLog() workflow_app_log.tenant_id = self._application_generate_entity.app_config.tenant_id workflow_app_log.app_id = self._application_generate_entity.app_config.app_id - workflow_app_log.workflow_id = workflow_execution.workflow_id - workflow_app_log.workflow_run_id = workflow_execution.id_ + workflow_app_log.workflow_id = self._workflow.id + workflow_app_log.workflow_run_id = workflow_run_id workflow_app_log.created_from = created_from.value workflow_app_log.created_by_role = self._created_by_role workflow_app_log.created_by = self._user_id diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 564daba86d..5e2bd17f8c 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -25,7 +25,7 @@ from core.app.entities.queue_entities import ( QueueWorkflowStartedEvent, QueueWorkflowSucceededEvent, ) -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_events import ( GraphEngineEvent, @@ -54,6 +54,7 @@ from core.workflow.graph_events.graph import GraphRunAbortedEvent from core.workflow.nodes import NodeType from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool from core.workflow.workflow_entry import WorkflowEntry @@ -100,8 +101,8 @@ class WorkflowBasedAppRunner: workflow_id=workflow_id, graph_config=graph_config, user_id=user_id, - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) @@ -244,8 +245,8 @@ class WorkflowBasedAppRunner: workflow_id=workflow.id, graph_config=graph_config, user_id="", - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) @@ -346,9 +347,7 @@ class WorkflowBasedAppRunner: :param event: event """ if isinstance(event, GraphRunStartedEvent): - self._publish_event( - QueueWorkflowStartedEvent(graph_runtime_state=workflow_entry.graph_engine.graph_runtime_state) - ) + self._publish_event(QueueWorkflowStartedEvent()) elif isinstance(event, GraphRunSucceededEvent): self._publish_event(QueueWorkflowSucceededEvent(outputs=event.outputs)) elif isinstance(event, GraphRunPartialSucceededEvent): @@ -372,7 +371,6 @@ class WorkflowBasedAppRunner: node_title=event.node_title, node_type=event.node_type, start_at=event.start_at, - predecessor_node_id=event.predecessor_node_id, in_iteration_id=event.in_iteration_id, in_loop_id=event.in_loop_id, inputs=inputs, @@ -393,7 +391,6 @@ class WorkflowBasedAppRunner: node_title=event.node_title, node_type=event.node_type, start_at=event.start_at, - predecessor_node_id=event.predecessor_node_id, in_iteration_id=event.in_iteration_id, in_loop_id=event.in_loop_id, agent_strategy=event.agent_strategy, @@ -494,7 +491,6 @@ class WorkflowBasedAppRunner: start_at=event.start_at, node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps, inputs=event.inputs, - predecessor_node_id=event.predecessor_node_id, metadata=event.metadata, ) ) @@ -536,7 +532,6 @@ class WorkflowBasedAppRunner: start_at=event.start_at, node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps, inputs=event.inputs, - predecessor_node_id=event.predecessor_node_id, metadata=event.metadata, ) ) diff --git a/api/core/app/entities/queue_entities.py b/api/core/app/entities/queue_entities.py index 76d22d8ac3..77d6bf03b4 100644 --- a/api/core/app/entities/queue_entities.py +++ b/api/core/app/entities/queue_entities.py @@ -3,11 +3,11 @@ from datetime import datetime from enum import StrEnum, auto from typing import Any -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk from core.rag.entities.citation_metadata import RetrievalSourceMetadata -from core.workflow.entities import AgentNodeStrategyInit, GraphRuntimeState +from core.workflow.entities import AgentNodeStrategyInit from core.workflow.enums import WorkflowNodeExecutionMetadataKey from core.workflow.nodes import NodeType @@ -54,6 +54,7 @@ class AppQueueEvent(BaseModel): """ event: QueueEvent + model_config = ConfigDict(arbitrary_types_allowed=True) class QueueLLMChunkEvent(AppQueueEvent): @@ -80,7 +81,6 @@ class QueueIterationStartEvent(AppQueueEvent): node_run_index: int inputs: Mapping[str, object] = Field(default_factory=dict) - predecessor_node_id: str | None = None metadata: Mapping[str, object] = Field(default_factory=dict) @@ -132,19 +132,10 @@ class QueueLoopStartEvent(AppQueueEvent): node_id: str node_type: NodeType node_title: str - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" start_at: datetime node_run_index: int inputs: Mapping[str, object] = Field(default_factory=dict) - predecessor_node_id: str | None = None metadata: Mapping[str, object] = Field(default_factory=dict) @@ -160,16 +151,6 @@ class QueueLoopNextEvent(AppQueueEvent): node_id: str node_type: NodeType node_title: str - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" - parallel_mode_run_id: str | None = None - """iteration run in parallel mode run id""" node_run_index: int output: Any = None # output for the current loop @@ -185,14 +166,6 @@ class QueueLoopCompletedEvent(AppQueueEvent): node_id: str node_type: NodeType node_title: str - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" start_at: datetime node_run_index: int @@ -285,12 +258,9 @@ class QueueAdvancedChatMessageEndEvent(AppQueueEvent): class QueueWorkflowStartedEvent(AppQueueEvent): - """ - QueueWorkflowStartedEvent entity - """ + """QueueWorkflowStartedEvent entity.""" event: QueueEvent = QueueEvent.WORKFLOW_STARTED - graph_runtime_state: GraphRuntimeState class QueueWorkflowSucceededEvent(AppQueueEvent): @@ -334,15 +304,9 @@ class QueueNodeStartedEvent(AppQueueEvent): node_title: str node_type: NodeType node_run_index: int = 1 # FIXME(-LAN-): may not used - predecessor_node_id: str | None = None - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parent_parallel_id: str | None = None - parent_parallel_start_node_id: str | None = None in_iteration_id: str | None = None in_loop_id: str | None = None start_at: datetime - parallel_mode_run_id: str | None = None agent_strategy: AgentNodeStrategyInit | None = None # FIXME(-LAN-): only for ToolNode, need to refactor @@ -360,14 +324,6 @@ class QueueNodeSucceededEvent(AppQueueEvent): node_execution_id: str node_id: str node_type: NodeType - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" in_iteration_id: str | None = None """iteration id if node is in iteration""" in_loop_id: str | None = None @@ -423,14 +379,6 @@ class QueueNodeExceptionEvent(AppQueueEvent): node_execution_id: str node_id: str node_type: NodeType - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" in_iteration_id: str | None = None """iteration id if node is in iteration""" in_loop_id: str | None = None @@ -455,7 +403,6 @@ class QueueNodeFailedEvent(AppQueueEvent): node_execution_id: str node_id: str node_type: NodeType - parallel_id: str | None = None in_iteration_id: str | None = None """iteration id if node is in iteration""" in_loop_id: str | None = None diff --git a/api/core/app/entities/task_entities.py b/api/core/app/entities/task_entities.py index 31dc1eea89..72a92add04 100644 --- a/api/core/app/entities/task_entities.py +++ b/api/core/app/entities/task_entities.py @@ -257,13 +257,8 @@ class NodeStartStreamResponse(StreamResponse): inputs_truncated: bool = False created_at: int extras: dict[str, object] = Field(default_factory=dict) - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parent_parallel_id: str | None = None - parent_parallel_start_node_id: str | None = None iteration_id: str | None = None loop_id: str | None = None - parallel_run_id: str | None = None agent_strategy: AgentNodeStrategyInit | None = None event: StreamEvent = StreamEvent.NODE_STARTED @@ -285,10 +280,6 @@ class NodeStartStreamResponse(StreamResponse): "inputs": None, "created_at": self.data.created_at, "extras": {}, - "parallel_id": self.data.parallel_id, - "parallel_start_node_id": self.data.parallel_start_node_id, - "parent_parallel_id": self.data.parent_parallel_id, - "parent_parallel_start_node_id": self.data.parent_parallel_start_node_id, "iteration_id": self.data.iteration_id, "loop_id": self.data.loop_id, }, @@ -324,10 +315,6 @@ class NodeFinishStreamResponse(StreamResponse): created_at: int finished_at: int files: Sequence[Mapping[str, Any]] | None = [] - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parent_parallel_id: str | None = None - parent_parallel_start_node_id: str | None = None iteration_id: str | None = None loop_id: str | None = None @@ -357,10 +344,6 @@ class NodeFinishStreamResponse(StreamResponse): "created_at": self.data.created_at, "finished_at": self.data.finished_at, "files": [], - "parallel_id": self.data.parallel_id, - "parallel_start_node_id": self.data.parallel_start_node_id, - "parent_parallel_id": self.data.parent_parallel_id, - "parent_parallel_start_node_id": self.data.parent_parallel_start_node_id, "iteration_id": self.data.iteration_id, "loop_id": self.data.loop_id, }, @@ -396,10 +379,6 @@ class NodeRetryStreamResponse(StreamResponse): created_at: int finished_at: int files: Sequence[Mapping[str, Any]] | None = [] - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parent_parallel_id: str | None = None - parent_parallel_start_node_id: str | None = None iteration_id: str | None = None loop_id: str | None = None retry_index: int = 0 @@ -430,10 +409,6 @@ class NodeRetryStreamResponse(StreamResponse): "created_at": self.data.created_at, "finished_at": self.data.finished_at, "files": [], - "parallel_id": self.data.parallel_id, - "parallel_start_node_id": self.data.parallel_start_node_id, - "parent_parallel_id": self.data.parent_parallel_id, - "parent_parallel_start_node_id": self.data.parent_parallel_start_node_id, "iteration_id": self.data.iteration_id, "loop_id": self.data.loop_id, "retry_index": self.data.retry_index, @@ -541,8 +516,6 @@ class LoopNodeStartStreamResponse(StreamResponse): metadata: Mapping = {} inputs: Mapping = {} inputs_truncated: bool = False - parallel_id: str | None = None - parallel_start_node_id: str | None = None event: StreamEvent = StreamEvent.LOOP_STARTED workflow_run_id: str @@ -567,9 +540,6 @@ class LoopNodeNextStreamResponse(StreamResponse): created_at: int pre_loop_output: Any = None extras: Mapping[str, object] = Field(default_factory=dict) - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parallel_mode_run_id: str | None = None event: StreamEvent = StreamEvent.LOOP_NEXT workflow_run_id: str @@ -603,8 +573,6 @@ class LoopNodeCompletedStreamResponse(StreamResponse): execution_metadata: Mapping[str, object] = Field(default_factory=dict) finished_at: int steps: int - parallel_id: str | None = None - parallel_start_node_id: str | None = None event: StreamEvent = StreamEvent.LOOP_COMPLETED workflow_run_id: str diff --git a/api/core/app/features/rate_limiting/rate_limit.py b/api/core/app/features/rate_limiting/rate_limit.py index ffa10cd43c..565905be0d 100644 --- a/api/core/app/features/rate_limiting/rate_limit.py +++ b/api/core/app/features/rate_limiting/rate_limit.py @@ -98,7 +98,7 @@ class RateLimit: else: return RateLimitGenerator( rate_limit=self, - generator=generator, # ty: ignore [invalid-argument-type] + generator=generator, request_id=request_id, ) diff --git a/api/core/app/task_pipeline/based_generate_task_pipeline.py b/api/core/app/task_pipeline/based_generate_task_pipeline.py index 45e3c0006b..26c7e60a4c 100644 --- a/api/core/app/task_pipeline/based_generate_task_pipeline.py +++ b/api/core/app/task_pipeline/based_generate_task_pipeline.py @@ -49,7 +49,7 @@ class BasedGenerateTaskPipeline: if isinstance(e, InvokeAuthorizationError): err = InvokeAuthorizationError("Incorrect API key provided") elif isinstance(e, InvokeError | ValueError): - err = e # ty: ignore [invalid-assignment] + err = e else: description = getattr(e, "description", None) err = Exception(description if description is not None else str(e)) diff --git a/api/core/app/task_pipeline/message_cycle_manager.py b/api/core/app/task_pipeline/message_cycle_manager.py index 0004fb592e..7a384e5c92 100644 --- a/api/core/app/task_pipeline/message_cycle_manager.py +++ b/api/core/app/task_pipeline/message_cycle_manager.py @@ -107,7 +107,6 @@ class MessageCycleManager: if dify_config.DEBUG: logger.exception("generate conversation name failed, conversation_id: %s", conversation_id) - db.session.merge(conversation) db.session.commit() db.session.close() diff --git a/api/core/datasource/__base/datasource_runtime.py b/api/core/datasource/__base/datasource_runtime.py index b7f280208a..c5d6c1d771 100644 --- a/api/core/datasource/__base/datasource_runtime.py +++ b/api/core/datasource/__base/datasource_runtime.py @@ -1,7 +1,6 @@ from typing import TYPE_CHECKING, Any, Optional -from openai import BaseModel -from pydantic import Field +from pydantic import BaseModel, Field # Import InvokeFrom locally to avoid circular import from core.app.entities.app_invoke_entities import InvokeFrom diff --git a/api/core/datasource/datasource_manager.py b/api/core/datasource/datasource_manager.py index 47d297e194..002415a7db 100644 --- a/api/core/datasource/datasource_manager.py +++ b/api/core/datasource/datasource_manager.py @@ -1,11 +1,9 @@ import logging from threading import Lock -from typing import Union import contexts from core.datasource.__base.datasource_plugin import DatasourcePlugin from core.datasource.__base.datasource_provider import DatasourcePluginProviderController -from core.datasource.entities.common_entities import I18nObject from core.datasource.entities.datasource_entities import DatasourceProviderType from core.datasource.errors import DatasourceProviderNotFoundError from core.datasource.local_file.local_file_provider import LocalFileDatasourcePluginProviderController @@ -18,11 +16,6 @@ logger = logging.getLogger(__name__) class DatasourceManager: - _builtin_provider_lock = Lock() - _hardcoded_providers: dict[str, DatasourcePluginProviderController] = {} - _builtin_providers_loaded = False - _builtin_tools_labels: dict[str, Union[I18nObject, None]] = {} - @classmethod def get_datasource_plugin_provider( cls, provider_id: str, tenant_id: str, datasource_type: DatasourceProviderType diff --git a/api/core/datasource/entities/api_entities.py b/api/core/datasource/entities/api_entities.py index cdefcc4506..1179537570 100644 --- a/api/core/datasource/entities/api_entities.py +++ b/api/core/datasource/entities/api_entities.py @@ -49,7 +49,7 @@ class DatasourceProviderApiEntity(BaseModel): for datasource in datasources: if datasource.get("parameters"): for parameter in datasource.get("parameters"): - if parameter.get("type") == DatasourceParameter.DatasourceParameterType.SYSTEM_FILES.value: + if parameter.get("type") == DatasourceParameter.DatasourceParameterType.SYSTEM_FILES: parameter["type"] = "files" # ------------- diff --git a/api/core/datasource/entities/common_entities.py b/api/core/datasource/entities/common_entities.py index ac36d83ae3..3c64632dbb 100644 --- a/api/core/datasource/entities/common_entities.py +++ b/api/core/datasource/entities/common_entities.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, model_validator class I18nObject(BaseModel): @@ -11,11 +11,12 @@ class I18nObject(BaseModel): pt_BR: str | None = Field(default=None) ja_JP: str | None = Field(default=None) - def __init__(self, **data): - super().__init__(**data) + @model_validator(mode="after") + def _(self): self.zh_Hans = self.zh_Hans or self.en_US self.pt_BR = self.pt_BR or self.en_US self.ja_JP = self.ja_JP or self.en_US + return self def to_dict(self) -> dict: return {"zh_Hans": self.zh_Hans, "en_US": self.en_US, "pt_BR": self.pt_BR, "ja_JP": self.ja_JP} diff --git a/api/core/datasource/entities/datasource_entities.py b/api/core/datasource/entities/datasource_entities.py index ac4f51ac75..260dcf04f5 100644 --- a/api/core/datasource/entities/datasource_entities.py +++ b/api/core/datasource/entities/datasource_entities.py @@ -1,5 +1,5 @@ import enum -from enum import Enum +from enum import StrEnum from typing import Any from pydantic import BaseModel, Field, ValidationInfo, field_validator @@ -54,16 +54,16 @@ class DatasourceParameter(PluginParameter): removes TOOLS_SELECTOR from PluginParameterType """ - STRING = PluginParameterType.STRING.value - NUMBER = PluginParameterType.NUMBER.value - BOOLEAN = PluginParameterType.BOOLEAN.value - SELECT = PluginParameterType.SELECT.value - SECRET_INPUT = PluginParameterType.SECRET_INPUT.value - FILE = PluginParameterType.FILE.value - FILES = PluginParameterType.FILES.value + STRING = PluginParameterType.STRING + NUMBER = PluginParameterType.NUMBER + BOOLEAN = PluginParameterType.BOOLEAN + SELECT = PluginParameterType.SELECT + SECRET_INPUT = PluginParameterType.SECRET_INPUT + FILE = PluginParameterType.FILE + FILES = PluginParameterType.FILES # deprecated, should not use. - SYSTEM_FILES = PluginParameterType.SYSTEM_FILES.value + SYSTEM_FILES = PluginParameterType.SYSTEM_FILES def as_normal_type(self): return as_normal_type(self) @@ -218,7 +218,7 @@ class DatasourceLabel(BaseModel): icon: str = Field(..., description="The icon of the tool") -class DatasourceInvokeFrom(Enum): +class DatasourceInvokeFrom(StrEnum): """ Enum class for datasource invoke """ diff --git a/api/core/entities/mcp_provider.py b/api/core/entities/mcp_provider.py new file mode 100644 index 0000000000..4ac39cef02 --- /dev/null +++ b/api/core/entities/mcp_provider.py @@ -0,0 +1,328 @@ +import json +from datetime import datetime +from enum import StrEnum +from typing import TYPE_CHECKING, Any +from urllib.parse import urlparse + +from pydantic import BaseModel + +from configs import dify_config +from core.entities.provider_entities import BasicProviderConfig +from core.file import helpers as file_helpers +from core.helper import encrypter +from core.helper.provider_cache import NoOpProviderCredentialCache +from core.mcp.types import OAuthClientInformation, OAuthClientMetadata, OAuthTokens +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_entities import ToolProviderType +from core.tools.utils.encryption import create_provider_encrypter + +if TYPE_CHECKING: + from models.tools import MCPToolProvider + +# Constants +CLIENT_NAME = "Dify" +CLIENT_URI = "https://github.com/langgenius/dify" +DEFAULT_TOKEN_TYPE = "Bearer" +DEFAULT_EXPIRES_IN = 3600 +MASK_CHAR = "*" +MIN_UNMASK_LENGTH = 6 + + +class MCPSupportGrantType(StrEnum): + """The supported grant types for MCP""" + + AUTHORIZATION_CODE = "authorization_code" + CLIENT_CREDENTIALS = "client_credentials" + REFRESH_TOKEN = "refresh_token" + + +class MCPAuthentication(BaseModel): + client_id: str + client_secret: str | None = None + + +class MCPConfiguration(BaseModel): + timeout: float = 30 + sse_read_timeout: float = 300 + + +class MCPProviderEntity(BaseModel): + """MCP Provider domain entity for business logic operations""" + + # Basic identification + id: str + provider_id: str # server_identifier + name: str + tenant_id: str + user_id: str + + # Server connection info + server_url: str # encrypted URL + headers: dict[str, str] # encrypted headers + timeout: float + sse_read_timeout: float + + # Authentication related + authed: bool + credentials: dict[str, Any] # encrypted credentials + code_verifier: str | None = None # for OAuth + + # Tools and display info + tools: list[dict[str, Any]] # parsed tools list + icon: str | dict[str, str] # parsed icon + + # Timestamps + created_at: datetime + updated_at: datetime + + @classmethod + def from_db_model(cls, db_provider: "MCPToolProvider") -> "MCPProviderEntity": + """Create entity from database model with decryption""" + + return cls( + id=db_provider.id, + provider_id=db_provider.server_identifier, + name=db_provider.name, + tenant_id=db_provider.tenant_id, + user_id=db_provider.user_id, + server_url=db_provider.server_url, + headers=db_provider.headers, + timeout=db_provider.timeout, + sse_read_timeout=db_provider.sse_read_timeout, + authed=db_provider.authed, + credentials=db_provider.credentials, + tools=db_provider.tool_dict, + icon=db_provider.icon or "", + created_at=db_provider.created_at, + updated_at=db_provider.updated_at, + ) + + @property + def redirect_url(self) -> str: + """OAuth redirect URL""" + return dify_config.CONSOLE_API_URL + "/console/api/mcp/oauth/callback" + + @property + def client_metadata(self) -> OAuthClientMetadata: + """Metadata about this OAuth client.""" + # Get grant type from credentials + credentials = self.decrypt_credentials() + + # Try to get grant_type from different locations + grant_type = credentials.get("grant_type", MCPSupportGrantType.AUTHORIZATION_CODE) + + # For nested structure, check if client_information has grant_types + if "client_information" in credentials and isinstance(credentials["client_information"], dict): + client_info = credentials["client_information"] + # If grant_types is specified in client_information, use it to determine grant_type + if "grant_types" in client_info and isinstance(client_info["grant_types"], list): + if "client_credentials" in client_info["grant_types"]: + grant_type = MCPSupportGrantType.CLIENT_CREDENTIALS + elif "authorization_code" in client_info["grant_types"]: + grant_type = MCPSupportGrantType.AUTHORIZATION_CODE + + # Configure based on grant type + is_client_credentials = grant_type == MCPSupportGrantType.CLIENT_CREDENTIALS + + grant_types = ["refresh_token"] + grant_types.append("client_credentials" if is_client_credentials else "authorization_code") + + response_types = [] if is_client_credentials else ["code"] + redirect_uris = [] if is_client_credentials else [self.redirect_url] + + return OAuthClientMetadata( + redirect_uris=redirect_uris, + token_endpoint_auth_method="none", + grant_types=grant_types, + response_types=response_types, + client_name=CLIENT_NAME, + client_uri=CLIENT_URI, + ) + + @property + def provider_icon(self) -> dict[str, str] | str: + """Get provider icon, handling both dict and string formats""" + if isinstance(self.icon, dict): + return self.icon + try: + return json.loads(self.icon) + except (json.JSONDecodeError, TypeError): + # If not JSON, assume it's a file path + return file_helpers.get_signed_file_url(self.icon) + + def to_api_response(self, user_name: str | None = None, include_sensitive: bool = True) -> dict[str, Any]: + """Convert to API response format + + Args: + user_name: User name to display + include_sensitive: If False, skip expensive decryption operations (for list view optimization) + """ + response = { + "id": self.id, + "author": user_name or "Anonymous", + "name": self.name, + "icon": self.provider_icon, + "type": ToolProviderType.MCP.value, + "is_team_authorization": self.authed, + "server_url": self.masked_server_url(), + "server_identifier": self.provider_id, + "updated_at": int(self.updated_at.timestamp()), + "label": I18nObject(en_US=self.name, zh_Hans=self.name).to_dict(), + "description": I18nObject(en_US="", zh_Hans="").to_dict(), + } + + # Add configuration + response["configuration"] = { + "timeout": str(self.timeout), + "sse_read_timeout": str(self.sse_read_timeout), + } + + # Skip expensive operations when sensitive data is not needed (e.g., list view) + if not include_sensitive: + response["masked_headers"] = {} + response["is_dynamic_registration"] = True + else: + # Add masked headers + response["masked_headers"] = self.masked_headers() + + # Add authentication info if available + masked_creds = self.masked_credentials() + if masked_creds: + response["authentication"] = masked_creds + response["is_dynamic_registration"] = self.credentials.get("client_information", {}).get( + "is_dynamic_registration", True + ) + + return response + + def retrieve_client_information(self) -> OAuthClientInformation | None: + """OAuth client information if available""" + credentials = self.decrypt_credentials() + if not credentials: + return None + + # Check if we have nested client_information structure + if "client_information" not in credentials: + return None + client_info_data = credentials["client_information"] + if isinstance(client_info_data, dict): + if "encrypted_client_secret" in client_info_data: + client_info_data["client_secret"] = encrypter.decrypt_token( + self.tenant_id, client_info_data["encrypted_client_secret"] + ) + return OAuthClientInformation.model_validate(client_info_data) + return None + + def retrieve_tokens(self) -> OAuthTokens | None: + """OAuth tokens if available""" + if not self.credentials: + return None + credentials = self.decrypt_credentials() + return OAuthTokens( + access_token=credentials.get("access_token", ""), + token_type=credentials.get("token_type", DEFAULT_TOKEN_TYPE), + expires_in=int(credentials.get("expires_in", str(DEFAULT_EXPIRES_IN)) or DEFAULT_EXPIRES_IN), + refresh_token=credentials.get("refresh_token", ""), + ) + + def masked_server_url(self) -> str: + """Masked server URL for display""" + parsed = urlparse(self.decrypt_server_url()) + if parsed.path and parsed.path != "/": + masked = parsed._replace(path="/******") + return masked.geturl() + return parsed.geturl() + + def _mask_value(self, value: str) -> str: + """Mask a sensitive value for display""" + if len(value) > MIN_UNMASK_LENGTH: + return value[:2] + MASK_CHAR * (len(value) - 4) + value[-2:] + else: + return MASK_CHAR * len(value) + + def masked_headers(self) -> dict[str, str]: + """Masked headers for display""" + return {key: self._mask_value(value) for key, value in self.decrypt_headers().items()} + + def masked_credentials(self) -> dict[str, str]: + """Masked credentials for display""" + credentials = self.decrypt_credentials() + if not credentials: + return {} + + masked = {} + + if "client_information" not in credentials or not isinstance(credentials["client_information"], dict): + return {} + client_info = credentials["client_information"] + # Mask sensitive fields from nested structure + if client_info.get("client_id"): + masked["client_id"] = self._mask_value(client_info["client_id"]) + if client_info.get("encrypted_client_secret"): + masked["client_secret"] = self._mask_value( + encrypter.decrypt_token(self.tenant_id, client_info["encrypted_client_secret"]) + ) + if client_info.get("client_secret"): + masked["client_secret"] = self._mask_value(client_info["client_secret"]) + return masked + + def decrypt_server_url(self) -> str: + """Decrypt server URL""" + return encrypter.decrypt_token(self.tenant_id, self.server_url) + + def _decrypt_dict(self, data: dict[str, Any]) -> dict[str, Any]: + """Generic method to decrypt dictionary fields""" + if not data: + return {} + + # Only decrypt fields that are actually encrypted + # For nested structures, client_information is not encrypted as a whole + encrypted_fields = [] + for key, value in data.items(): + # Skip nested objects - they are not encrypted + if isinstance(value, dict): + continue + # Only process string values that might be encrypted + if isinstance(value, str) and value: + encrypted_fields.append(key) + + if not encrypted_fields: + return data + + # Create dynamic config only for encrypted fields + config = [BasicProviderConfig(type=BasicProviderConfig.Type.SECRET_INPUT, name=key) for key in encrypted_fields] + + encrypter_instance, _ = create_provider_encrypter( + tenant_id=self.tenant_id, + config=config, + cache=NoOpProviderCredentialCache(), + ) + + # Decrypt only the encrypted fields + decrypted_data = encrypter_instance.decrypt({k: data[k] for k in encrypted_fields}) + + # Merge decrypted data with original data (preserving non-encrypted fields) + result = data.copy() + result.update(decrypted_data) + + return result + + def decrypt_headers(self) -> dict[str, Any]: + """Decrypt headers""" + return self._decrypt_dict(self.headers) + + def decrypt_credentials(self) -> dict[str, Any]: + """Decrypt credentials""" + return self._decrypt_dict(self.credentials) + + def decrypt_authentication(self) -> dict[str, Any]: + """Decrypt authentication""" + # Option 1: if headers is provided, use it and don't need to get token + headers = self.decrypt_headers() + + # Option 2: Add OAuth token if authed and no headers provided + if not self.headers and self.authed: + token = self.retrieve_tokens() + if token: + headers["Authorization"] = f"{token.token_type.capitalize()} {token.access_token}" + return headers diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index 111de89178..b10838f8c9 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -5,7 +5,7 @@ from collections import defaultdict from collections.abc import Iterator, Sequence from json import JSONDecodeError -from pydantic import BaseModel, ConfigDict, Field +from pydantic import BaseModel, ConfigDict, Field, model_validator from sqlalchemy import func, select from sqlalchemy.orm import Session @@ -73,9 +73,8 @@ class ProviderConfiguration(BaseModel): # pydantic configs model_config = ConfigDict(protected_namespaces=()) - def __init__(self, **data): - super().__init__(**data) - + @model_validator(mode="after") + def _(self): if self.provider.provider not in original_provider_configurate_methods: original_provider_configurate_methods[self.provider.provider] = [] for configurate_method in self.provider.configurate_methods: @@ -90,6 +89,7 @@ class ProviderConfiguration(BaseModel): and ConfigurateMethod.PREDEFINED_MODEL not in self.provider.configurate_methods ): self.provider.configurate_methods.append(ConfigurateMethod.PREDEFINED_MODEL) + return self def get_current_credentials(self, model_type: ModelType, model: str) -> dict | None: """ @@ -207,7 +207,7 @@ class ProviderConfiguration(BaseModel): """ stmt = select(Provider).where( Provider.tenant_id == self.tenant_id, - Provider.provider_type == ProviderType.CUSTOM.value, + Provider.provider_type == ProviderType.CUSTOM, Provider.provider_name.in_(self._get_provider_names()), ) @@ -458,7 +458,7 @@ class ProviderConfiguration(BaseModel): provider_record = Provider( tenant_id=self.tenant_id, provider_name=self.provider.provider, - provider_type=ProviderType.CUSTOM.value, + provider_type=ProviderType.CUSTOM, is_valid=True, credential_id=new_record.id, ) @@ -472,6 +472,9 @@ class ProviderConfiguration(BaseModel): provider_model_credentials_cache.delete() self.switch_preferred_provider_type(provider_type=ProviderType.CUSTOM, session=session) + else: + # some historical data may have a provider record but not be set as valid + provider_record.is_valid = True session.commit() except Exception: @@ -1145,6 +1148,15 @@ class ProviderConfiguration(BaseModel): raise ValueError("Can't add same credential") provider_model_record.credential_id = credential_record.id provider_model_record.updated_at = naive_utc_now() + + # clear cache + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + provider_model_credentials_cache.delete() + session.add(provider_model_record) session.commit() @@ -1178,6 +1190,14 @@ class ProviderConfiguration(BaseModel): session.add(provider_model_record) session.commit() + # clear cache + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + provider_model_credentials_cache.delete() + def delete_custom_model(self, model_type: ModelType, model: str): """ Delete custom model. @@ -1414,7 +1434,7 @@ class ProviderConfiguration(BaseModel): """ secret_input_form_variables = [] for credential_form_schema in credential_form_schemas: - if credential_form_schema.type.value == FormType.SECRET_INPUT.value: + if credential_form_schema.type == FormType.SECRET_INPUT: secret_input_form_variables.append(credential_form_schema.variable) return secret_input_form_variables @@ -1848,7 +1868,7 @@ class ProviderConfigurations(BaseModel): if "/" not in key: key = str(ModelProviderID(key)) - return self.configurations.get(key, default) # type: ignore + return self.configurations.get(key, default) class ProviderModelBundle(BaseModel): diff --git a/api/core/extension/api_based_extension_requestor.py b/api/core/extension/api_based_extension_requestor.py index fab9ae44e9..f9e6099049 100644 --- a/api/core/extension/api_based_extension_requestor.py +++ b/api/core/extension/api_based_extension_requestor.py @@ -1,13 +1,13 @@ from typing import cast -import requests +import httpx from configs import dify_config from models.api_based_extension import APIBasedExtensionPoint class APIBasedExtensionRequestor: - timeout: tuple[int, int] = (5, 60) + timeout: httpx.Timeout = httpx.Timeout(60.0, connect=5.0) """timeout for request connect and read""" def __init__(self, api_endpoint: str, api_key: str): @@ -27,25 +27,23 @@ class APIBasedExtensionRequestor: url = self.api_endpoint try: - # proxy support for security - proxies = None + mounts: dict[str, httpx.BaseTransport] | None = None if dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL: - proxies = { - "http": dify_config.SSRF_PROXY_HTTP_URL, - "https": dify_config.SSRF_PROXY_HTTPS_URL, + mounts = { + "http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL), + "https://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTPS_URL), } - response = requests.request( - method="POST", - url=url, - json={"point": point.value, "params": params}, - headers=headers, - timeout=self.timeout, - proxies=proxies, - ) - except requests.Timeout: + with httpx.Client(mounts=mounts, timeout=self.timeout) as client: + response = client.request( + method="POST", + url=url, + json={"point": point.value, "params": params}, + headers=headers, + ) + except httpx.TimeoutException: raise ValueError("request timeout") - except requests.ConnectionError: + except httpx.RequestError: raise ValueError("request connection error") if response.status_code != 200: diff --git a/api/core/helper/code_executor/code_executor.py b/api/core/helper/code_executor/code_executor.py index 0c1d03dc13..f92278f9e2 100644 --- a/api/core/helper/code_executor/code_executor.py +++ b/api/core/helper/code_executor/code_executor.py @@ -131,7 +131,7 @@ class CodeExecutor: if (code := response_data.get("code")) != 0: raise CodeExecutionError(f"Got error code: {code}. Got error msg: {response_data.get('message')}") - response_code = CodeExecutionResponse(**response_data) + response_code = CodeExecutionResponse.model_validate(response_data) if response_code.data.error: raise CodeExecutionError(response_code.data.error) diff --git a/api/core/helper/marketplace.py b/api/core/helper/marketplace.py index 10f304c087..bddb864a95 100644 --- a/api/core/helper/marketplace.py +++ b/api/core/helper/marketplace.py @@ -26,7 +26,7 @@ def batch_fetch_plugin_manifests(plugin_ids: list[str]) -> Sequence[MarketplaceP response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version}) response.raise_for_status() - return [MarketplacePluginDeclaration(**plugin) for plugin in response.json()["data"]["plugins"]] + return [MarketplacePluginDeclaration.model_validate(plugin) for plugin in response.json()["data"]["plugins"]] def batch_fetch_plugin_manifests_ignore_deserialization_error( @@ -41,7 +41,7 @@ def batch_fetch_plugin_manifests_ignore_deserialization_error( result: list[MarketplacePluginDeclaration] = [] for plugin in response.json()["data"]["plugins"]: try: - result.append(MarketplacePluginDeclaration(**plugin)) + result.append(MarketplacePluginDeclaration.model_validate(plugin)) except Exception: pass diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py index 6a2f27b8ba..2bada85582 100644 --- a/api/core/helper/module_import_helper.py +++ b/api/core/helper/module_import_helper.py @@ -20,7 +20,7 @@ def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_laz else: # Refer to: https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly # FIXME: mypy does not support the type of spec.loader - spec = importlib.util.spec_from_file_location(module_name, py_file_path) # type: ignore + spec = importlib.util.spec_from_file_location(module_name, py_file_path) # type: ignore[assignment] if not spec or not spec.loader: raise Exception(f"Failed to load module {module_name} from {py_file_path!r}") if use_lazy_loader: diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index ee37024260..36b38b7b45 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -20,7 +20,7 @@ from core.rag.cleaner.clean_processor import CleanProcessor from core.rag.datasource.keyword.keyword_factory import Keyword from core.rag.docstore.dataset_docstore import DatasetDocumentStore from core.rag.extractor.entity.datasource_type import DatasourceType -from core.rag.extractor.entity.extract_setting import ExtractSetting +from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.index_processor.index_processor_factory import IndexProcessorFactory @@ -49,62 +49,80 @@ class IndexingRunner: self.storage = storage self.model_manager = ModelManager() + def _handle_indexing_error(self, document_id: str, error: Exception) -> None: + """Handle indexing errors by updating document status.""" + logger.exception("consume document failed") + document = db.session.get(DatasetDocument, document_id) + if document: + document.indexing_status = "error" + error_message = getattr(error, "description", str(error)) + document.error = str(error_message) + document.stopped_at = naive_utc_now() + db.session.commit() + def run(self, dataset_documents: list[DatasetDocument]): """Run the indexing process.""" for dataset_document in dataset_documents: + document_id = dataset_document.id try: + # Re-query the document to ensure it's bound to the current session + requeried_document = db.session.get(DatasetDocument, document_id) + if not requeried_document: + logger.warning("Document not found, skipping document id: %s", document_id) + continue + # get dataset - dataset = db.session.query(Dataset).filter_by(id=dataset_document.dataset_id).first() + dataset = db.session.query(Dataset).filter_by(id=requeried_document.dataset_id).first() if not dataset: raise ValueError("no dataset found") # get the process rule stmt = select(DatasetProcessRule).where( - DatasetProcessRule.id == dataset_document.dataset_process_rule_id + DatasetProcessRule.id == requeried_document.dataset_process_rule_id ) processing_rule = db.session.scalar(stmt) if not processing_rule: raise ValueError("no process rule found") - index_type = dataset_document.doc_form + index_type = requeried_document.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() # extract - text_docs = self._extract(index_processor, dataset_document, processing_rule.to_dict()) + text_docs = self._extract(index_processor, requeried_document, processing_rule.to_dict()) # transform documents = self._transform( - index_processor, dataset, text_docs, dataset_document.doc_language, processing_rule.to_dict() + index_processor, dataset, text_docs, requeried_document.doc_language, processing_rule.to_dict() ) # save segment - self._load_segments(dataset, dataset_document, documents) + self._load_segments(dataset, requeried_document, documents) # load self._load( index_processor=index_processor, dataset=dataset, - dataset_document=dataset_document, + dataset_document=requeried_document, documents=documents, ) except DocumentIsPausedError: - raise DocumentIsPausedError(f"Document paused, document id: {dataset_document.id}") + raise DocumentIsPausedError(f"Document paused, document id: {document_id}") except ProviderTokenNotInitError as e: - dataset_document.indexing_status = "error" - dataset_document.error = str(e.description) - dataset_document.stopped_at = naive_utc_now() - db.session.commit() + self._handle_indexing_error(document_id, e) except ObjectDeletedError: - logger.warning("Document deleted, document id: %s", dataset_document.id) + logger.warning("Document deleted, document id: %s", document_id) except Exception as e: - logger.exception("consume document failed") - dataset_document.indexing_status = "error" - dataset_document.error = str(e) - dataset_document.stopped_at = naive_utc_now() - db.session.commit() + self._handle_indexing_error(document_id, e) def run_in_splitting_status(self, dataset_document: DatasetDocument): """Run the indexing process when the index_status is splitting.""" + document_id = dataset_document.id try: + # Re-query the document to ensure it's bound to the current session + requeried_document = db.session.get(DatasetDocument, document_id) + if not requeried_document: + logger.warning("Document not found: %s", document_id) + return + # get dataset - dataset = db.session.query(Dataset).filter_by(id=dataset_document.dataset_id).first() + dataset = db.session.query(Dataset).filter_by(id=requeried_document.dataset_id).first() if not dataset: raise ValueError("no dataset found") @@ -112,57 +130,60 @@ class IndexingRunner: # get exist document_segment list and delete document_segments = ( db.session.query(DocumentSegment) - .filter_by(dataset_id=dataset.id, document_id=dataset_document.id) + .filter_by(dataset_id=dataset.id, document_id=requeried_document.id) .all() ) for document_segment in document_segments: db.session.delete(document_segment) - if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX: + if requeried_document.doc_form == IndexType.PARENT_CHILD_INDEX: # delete child chunks db.session.query(ChildChunk).where(ChildChunk.segment_id == document_segment.id).delete() db.session.commit() # get the process rule - stmt = select(DatasetProcessRule).where(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) + stmt = select(DatasetProcessRule).where(DatasetProcessRule.id == requeried_document.dataset_process_rule_id) processing_rule = db.session.scalar(stmt) if not processing_rule: raise ValueError("no process rule found") - index_type = dataset_document.doc_form + index_type = requeried_document.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() # extract - text_docs = self._extract(index_processor, dataset_document, processing_rule.to_dict()) + text_docs = self._extract(index_processor, requeried_document, processing_rule.to_dict()) # transform documents = self._transform( - index_processor, dataset, text_docs, dataset_document.doc_language, processing_rule.to_dict() + index_processor, dataset, text_docs, requeried_document.doc_language, processing_rule.to_dict() ) # save segment - self._load_segments(dataset, dataset_document, documents) + self._load_segments(dataset, requeried_document, documents) # load self._load( - index_processor=index_processor, dataset=dataset, dataset_document=dataset_document, documents=documents + index_processor=index_processor, + dataset=dataset, + dataset_document=requeried_document, + documents=documents, ) except DocumentIsPausedError: - raise DocumentIsPausedError(f"Document paused, document id: {dataset_document.id}") + raise DocumentIsPausedError(f"Document paused, document id: {document_id}") except ProviderTokenNotInitError as e: - dataset_document.indexing_status = "error" - dataset_document.error = str(e.description) - dataset_document.stopped_at = naive_utc_now() - db.session.commit() + self._handle_indexing_error(document_id, e) except Exception as e: - logger.exception("consume document failed") - dataset_document.indexing_status = "error" - dataset_document.error = str(e) - dataset_document.stopped_at = naive_utc_now() - db.session.commit() + self._handle_indexing_error(document_id, e) def run_in_indexing_status(self, dataset_document: DatasetDocument): """Run the indexing process when the index_status is indexing.""" + document_id = dataset_document.id try: + # Re-query the document to ensure it's bound to the current session + requeried_document = db.session.get(DatasetDocument, document_id) + if not requeried_document: + logger.warning("Document not found: %s", document_id) + return + # get dataset - dataset = db.session.query(Dataset).filter_by(id=dataset_document.dataset_id).first() + dataset = db.session.query(Dataset).filter_by(id=requeried_document.dataset_id).first() if not dataset: raise ValueError("no dataset found") @@ -170,7 +191,7 @@ class IndexingRunner: # get exist document_segment list and delete document_segments = ( db.session.query(DocumentSegment) - .filter_by(dataset_id=dataset.id, document_id=dataset_document.id) + .filter_by(dataset_id=dataset.id, document_id=requeried_document.id) .all() ) @@ -188,7 +209,7 @@ class IndexingRunner: "dataset_id": document_segment.dataset_id, }, ) - if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX: + if requeried_document.doc_form == IndexType.PARENT_CHILD_INDEX: child_chunks = document_segment.get_child_chunks() if child_chunks: child_documents = [] @@ -206,24 +227,20 @@ class IndexingRunner: document.children = child_documents documents.append(document) # build index - index_type = dataset_document.doc_form + index_type = requeried_document.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() self._load( - index_processor=index_processor, dataset=dataset, dataset_document=dataset_document, documents=documents + index_processor=index_processor, + dataset=dataset, + dataset_document=requeried_document, + documents=documents, ) except DocumentIsPausedError: - raise DocumentIsPausedError(f"Document paused, document id: {dataset_document.id}") + raise DocumentIsPausedError(f"Document paused, document id: {document_id}") except ProviderTokenNotInitError as e: - dataset_document.indexing_status = "error" - dataset_document.error = str(e.description) - dataset_document.stopped_at = naive_utc_now() - db.session.commit() + self._handle_indexing_error(document_id, e) except Exception as e: - logger.exception("consume document failed") - dataset_document.indexing_status = "error" - dataset_document.error = str(e) - dataset_document.stopped_at = naive_utc_now() - db.session.commit() + self._handle_indexing_error(document_id, e) def indexing_estimate( self, @@ -343,7 +360,7 @@ class IndexingRunner: if file_detail: extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, + datasource_type=DatasourceType.FILE, upload_file=file_detail, document_model=dataset_document.doc_form, ) @@ -356,15 +373,17 @@ class IndexingRunner: ): raise ValueError("no notion import info found") extract_setting = ExtractSetting( - datasource_type=DatasourceType.NOTION.value, - notion_info={ - "credential_id": data_source_info["credential_id"], - "notion_workspace_id": data_source_info["notion_workspace_id"], - "notion_obj_id": data_source_info["notion_page_id"], - "notion_page_type": data_source_info["type"], - "document": dataset_document, - "tenant_id": dataset_document.tenant_id, - }, + datasource_type=DatasourceType.NOTION, + notion_info=NotionInfo.model_validate( + { + "credential_id": data_source_info["credential_id"], + "notion_workspace_id": data_source_info["notion_workspace_id"], + "notion_obj_id": data_source_info["notion_page_id"], + "notion_page_type": data_source_info["type"], + "document": dataset_document, + "tenant_id": dataset_document.tenant_id, + } + ), document_model=dataset_document.doc_form, ) text_docs = index_processor.extract(extract_setting, process_rule_mode=process_rule["mode"]) @@ -377,15 +396,17 @@ class IndexingRunner: ): raise ValueError("no website import info found") extract_setting = ExtractSetting( - datasource_type=DatasourceType.WEBSITE.value, - website_info={ - "provider": data_source_info["provider"], - "job_id": data_source_info["job_id"], - "tenant_id": dataset_document.tenant_id, - "url": data_source_info["url"], - "mode": data_source_info["mode"], - "only_main_content": data_source_info["only_main_content"], - }, + datasource_type=DatasourceType.WEBSITE, + website_info=WebsiteInfo.model_validate( + { + "provider": data_source_info["provider"], + "job_id": data_source_info["job_id"], + "tenant_id": dataset_document.tenant_id, + "url": data_source_info["url"], + "mode": data_source_info["mode"], + "only_main_content": data_source_info["only_main_content"], + } + ), document_model=dataset_document.doc_form, ) text_docs = index_processor.extract(extract_setting, process_rule_mode=process_rule["mode"]) @@ -394,7 +415,6 @@ class IndexingRunner: document_id=dataset_document.id, after_indexing_status="splitting", extra_update_params={ - DatasetDocument.word_count: sum(len(text_doc.page_content) for text_doc in text_docs), DatasetDocument.parsing_completed_at: naive_utc_now(), }, ) @@ -734,6 +754,7 @@ class IndexingRunner: extra_update_params={ DatasetDocument.cleaning_completed_at: cur_time, DatasetDocument.splitting_completed_at: cur_time, + DatasetDocument.word_count: sum(len(doc.page_content) for doc in documents), }, ) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index e64ac25ab1..bd893b17f1 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -100,7 +100,7 @@ class LLMGenerator: return name @classmethod - def generate_suggested_questions_after_answer(cls, tenant_id: str, histories: str): + def generate_suggested_questions_after_answer(cls, tenant_id: str, histories: str) -> Sequence[str]: output_parser = SuggestedQuestionsAfterAnswerOutputParser() format_instructions = output_parser.get_format_instructions() @@ -119,6 +119,8 @@ class LLMGenerator: prompt_messages = [UserPromptMessage(content=prompt)] + questions: Sequence[str] = [] + try: response: LLMResult = model_instance.invoke_llm( prompt_messages=list(prompt_messages), diff --git a/api/core/llm_generator/output_parser/structured_output.py b/api/core/llm_generator/output_parser/structured_output.py index 1e302b7668..686529c3ca 100644 --- a/api/core/llm_generator/output_parser/structured_output.py +++ b/api/core/llm_generator/output_parser/structured_output.py @@ -224,8 +224,8 @@ def _handle_native_json_schema( # Set appropriate response format if required by the model for rule in rules: - if rule.name == "response_format" and ResponseFormat.JSON_SCHEMA.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON_SCHEMA.value + if rule.name == "response_format" and ResponseFormat.JSON_SCHEMA in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON_SCHEMA return model_parameters @@ -239,10 +239,10 @@ def _set_response_format(model_parameters: dict, rules: list): """ for rule in rules: if rule.name == "response_format": - if ResponseFormat.JSON.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON.value - elif ResponseFormat.JSON_OBJECT.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON_OBJECT.value + if ResponseFormat.JSON in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON + elif ResponseFormat.JSON_OBJECT in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON_OBJECT def _handle_prompt_based_schema( diff --git a/api/core/llm_generator/output_parser/suggested_questions_after_answer.py b/api/core/llm_generator/output_parser/suggested_questions_after_answer.py index e78859cc1a..eec771181f 100644 --- a/api/core/llm_generator/output_parser/suggested_questions_after_answer.py +++ b/api/core/llm_generator/output_parser/suggested_questions_after_answer.py @@ -1,17 +1,26 @@ import json +import logging import re +from collections.abc import Sequence from core.llm_generator.prompts import SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT +logger = logging.getLogger(__name__) + class SuggestedQuestionsAfterAnswerOutputParser: def get_format_instructions(self) -> str: return SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT - def parse(self, text: str): + def parse(self, text: str) -> Sequence[str]: action_match = re.search(r"\[.*?\]", text.strip(), re.DOTALL) + questions: list[str] = [] if action_match is not None: - json_obj = json.loads(action_match.group(0).strip()) - else: - json_obj = [] - return json_obj + try: + json_obj = json.loads(action_match.group(0).strip()) + except json.JSONDecodeError as exc: + logger.warning("Failed to decode suggested questions payload: %s", exc) + else: + if isinstance(json_obj, list): + questions = [question for question in json_obj if isinstance(question, str)] + return questions diff --git a/api/core/mcp/auth/auth_flow.py b/api/core/mcp/auth/auth_flow.py index 7d938a8a7d..951c22f6dd 100644 --- a/api/core/mcp/auth/auth_flow.py +++ b/api/core/mcp/auth/auth_flow.py @@ -6,11 +6,15 @@ import secrets import urllib.parse from urllib.parse import urljoin, urlparse -import httpx -from pydantic import BaseModel, ValidationError +from httpx import ConnectError, HTTPStatusError, RequestError +from pydantic import ValidationError -from core.mcp.auth.auth_provider import OAuthClientProvider +from core.entities.mcp_provider import MCPProviderEntity, MCPSupportGrantType +from core.helper import ssrf_proxy +from core.mcp.entities import AuthAction, AuthActionType, AuthResult, OAuthCallbackState +from core.mcp.error import MCPRefreshTokenError from core.mcp.types import ( + LATEST_PROTOCOL_VERSION, OAuthClientInformation, OAuthClientInformationFull, OAuthClientMetadata, @@ -19,21 +23,10 @@ from core.mcp.types import ( ) from extensions.ext_redis import redis_client -LATEST_PROTOCOL_VERSION = "1.0" OAUTH_STATE_EXPIRY_SECONDS = 5 * 60 # 5 minutes expiry OAUTH_STATE_REDIS_KEY_PREFIX = "oauth_state:" -class OAuthCallbackState(BaseModel): - provider_id: str - tenant_id: str - server_url: str - metadata: OAuthMetadata | None = None - client_information: OAuthClientInformation - code_verifier: str - redirect_uri: str - - def generate_pkce_challenge() -> tuple[str, str]: """Generate PKCE challenge and verifier.""" code_verifier = base64.urlsafe_b64encode(os.urandom(40)).decode("utf-8") @@ -80,8 +73,13 @@ def _retrieve_redis_state(state_key: str) -> OAuthCallbackState: raise ValueError(f"Invalid state parameter: {str(e)}") -def handle_callback(state_key: str, authorization_code: str) -> OAuthCallbackState: - """Handle the callback from the OAuth provider.""" +def handle_callback(state_key: str, authorization_code: str) -> tuple[OAuthCallbackState, OAuthTokens]: + """ + Handle the callback from the OAuth provider. + + Returns: + A tuple of (callback_state, tokens) that can be used by the caller to save data. + """ # Retrieve state data from Redis (state is automatically deleted after retrieval) full_state_data = _retrieve_redis_state(state_key) @@ -93,30 +91,32 @@ def handle_callback(state_key: str, authorization_code: str) -> OAuthCallbackSta full_state_data.code_verifier, full_state_data.redirect_uri, ) - provider = OAuthClientProvider(full_state_data.provider_id, full_state_data.tenant_id, for_list=True) - provider.save_tokens(tokens) - return full_state_data + + return full_state_data, tokens def check_support_resource_discovery(server_url: str) -> tuple[bool, str]: """Check if the server supports OAuth 2.0 Resource Discovery.""" - b_scheme, b_netloc, b_path, _, b_query, b_fragment = urlparse(server_url, "", True) - url_for_resource_discovery = f"{b_scheme}://{b_netloc}/.well-known/oauth-protected-resource{b_path}" + b_scheme, b_netloc, _, _, b_query, b_fragment = urlparse(server_url, "", True) + url_for_resource_discovery = f"{b_scheme}://{b_netloc}/.well-known/oauth-protected-resource" if b_query: url_for_resource_discovery += f"?{b_query}" if b_fragment: url_for_resource_discovery += f"#{b_fragment}" try: headers = {"MCP-Protocol-Version": LATEST_PROTOCOL_VERSION, "User-Agent": "Dify"} - response = httpx.get(url_for_resource_discovery, headers=headers) + response = ssrf_proxy.get(url_for_resource_discovery, headers=headers) if 200 <= response.status_code < 300: body = response.json() - if "authorization_server_url" in body: + # Support both singular and plural forms + if body.get("authorization_servers"): + return True, body["authorization_servers"][0] + elif body.get("authorization_server_url"): return True, body["authorization_server_url"][0] else: return False, "" return False, "" - except httpx.RequestError: + except RequestError: # Not support resource discovery, fall back to well-known OAuth metadata return False, "" @@ -126,27 +126,37 @@ def discover_oauth_metadata(server_url: str, protocol_version: str | None = None # First check if the server supports OAuth 2.0 Resource Discovery support_resource_discovery, oauth_discovery_url = check_support_resource_discovery(server_url) if support_resource_discovery: - url = oauth_discovery_url + # The oauth_discovery_url is the authorization server base URL + # Try OpenID Connect discovery first (more common), then OAuth 2.0 + urls_to_try = [ + urljoin(oauth_discovery_url + "/", ".well-known/oauth-authorization-server"), + urljoin(oauth_discovery_url + "/", ".well-known/openid-configuration"), + ] else: - url = urljoin(server_url, "/.well-known/oauth-authorization-server") + urls_to_try = [urljoin(server_url, "/.well-known/oauth-authorization-server")] - try: - headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION} - response = httpx.get(url, headers=headers) - if response.status_code == 404: - return None - if not response.is_success: - raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata") - return OAuthMetadata.model_validate(response.json()) - except httpx.RequestError as e: - if isinstance(e, httpx.ConnectError): - response = httpx.get(url) + headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION} + + for url in urls_to_try: + try: + response = ssrf_proxy.get(url, headers=headers) if response.status_code == 404: - return None + continue if not response.is_success: - raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata") + response.raise_for_status() return OAuthMetadata.model_validate(response.json()) - raise + except (RequestError, HTTPStatusError) as e: + if isinstance(e, ConnectError): + response = ssrf_proxy.get(url) + if response.status_code == 404: + continue # Try next URL + if not response.is_success: + raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata") + return OAuthMetadata.model_validate(response.json()) + # For other errors, try next URL + continue + + return None # No metadata found def start_authorization( @@ -213,7 +223,7 @@ def exchange_authorization( redirect_uri: str, ) -> OAuthTokens: """Exchanges an authorization code for an access token.""" - grant_type = "authorization_code" + grant_type = MCPSupportGrantType.AUTHORIZATION_CODE.value if metadata: token_url = metadata.token_endpoint @@ -233,7 +243,7 @@ def exchange_authorization( if client_information.client_secret: params["client_secret"] = client_information.client_secret - response = httpx.post(token_url, data=params) + response = ssrf_proxy.post(token_url, data=params) if not response.is_success: raise ValueError(f"Token exchange failed: HTTP {response.status_code}") return OAuthTokens.model_validate(response.json()) @@ -246,7 +256,7 @@ def refresh_authorization( refresh_token: str, ) -> OAuthTokens: """Exchange a refresh token for an updated access token.""" - grant_type = "refresh_token" + grant_type = MCPSupportGrantType.REFRESH_TOKEN.value if metadata: token_url = metadata.token_endpoint @@ -263,10 +273,55 @@ def refresh_authorization( if client_information.client_secret: params["client_secret"] = client_information.client_secret - - response = httpx.post(token_url, data=params) + try: + response = ssrf_proxy.post(token_url, data=params) + except ssrf_proxy.MaxRetriesExceededError as e: + raise MCPRefreshTokenError(e) from e if not response.is_success: - raise ValueError(f"Token refresh failed: HTTP {response.status_code}") + raise MCPRefreshTokenError(response.text) + return OAuthTokens.model_validate(response.json()) + + +def client_credentials_flow( + server_url: str, + metadata: OAuthMetadata | None, + client_information: OAuthClientInformation, + scope: str | None = None, +) -> OAuthTokens: + """Execute Client Credentials Flow to get access token.""" + grant_type = MCPSupportGrantType.CLIENT_CREDENTIALS.value + + if metadata: + token_url = metadata.token_endpoint + if metadata.grant_types_supported and grant_type not in metadata.grant_types_supported: + raise ValueError(f"Incompatible auth server: does not support grant type {grant_type}") + else: + token_url = urljoin(server_url, "/token") + + # Support both Basic Auth and body parameters for client authentication + headers = {"Content-Type": "application/x-www-form-urlencoded"} + data = {"grant_type": grant_type} + + if scope: + data["scope"] = scope + + # If client_secret is provided, use Basic Auth (preferred method) + if client_information.client_secret: + credentials = f"{client_information.client_id}:{client_information.client_secret}" + encoded_credentials = base64.b64encode(credentials.encode()).decode() + headers["Authorization"] = f"Basic {encoded_credentials}" + else: + # Fall back to including credentials in the body + data["client_id"] = client_information.client_id + if client_information.client_secret: + data["client_secret"] = client_information.client_secret + + response = ssrf_proxy.post(token_url, headers=headers, data=data) + if not response.is_success: + raise ValueError( + f"Client credentials token request failed: HTTP {response.status_code}, Response: {response.text}" + ) + return OAuthTokens.model_validate(response.json()) @@ -283,7 +338,7 @@ def register_client( else: registration_url = urljoin(server_url, "/register") - response = httpx.post( + response = ssrf_proxy.post( registration_url, json=client_metadata.model_dump(), headers={"Content-Type": "application/json"}, @@ -294,28 +349,111 @@ def register_client( def auth( - provider: OAuthClientProvider, - server_url: str, + provider: MCPProviderEntity, authorization_code: str | None = None, state_param: str | None = None, - for_list: bool = False, -) -> dict[str, str]: - """Orchestrates the full auth flow with a server using secure Redis state storage.""" - metadata = discover_oauth_metadata(server_url) +) -> AuthResult: + """ + Orchestrates the full auth flow with a server using secure Redis state storage. + + This function performs only network operations and returns actions that need + to be performed by the caller (such as saving data to database). + + Args: + provider: The MCP provider entity + authorization_code: Optional authorization code from OAuth callback + state_param: Optional state parameter from OAuth callback + + Returns: + AuthResult containing actions to be performed and response data + """ + actions: list[AuthAction] = [] + server_url = provider.decrypt_server_url() + server_metadata = discover_oauth_metadata(server_url) + client_metadata = provider.client_metadata + provider_id = provider.id + tenant_id = provider.tenant_id + client_information = provider.retrieve_client_information() + redirect_url = provider.redirect_url + + # Determine grant type based on server metadata + if not server_metadata: + raise ValueError("Failed to discover OAuth metadata from server") + + supported_grant_types = server_metadata.grant_types_supported or [] + + # Convert to lowercase for comparison + supported_grant_types_lower = [gt.lower() for gt in supported_grant_types] + + # Determine which grant type to use + effective_grant_type = None + if MCPSupportGrantType.AUTHORIZATION_CODE.value in supported_grant_types_lower: + effective_grant_type = MCPSupportGrantType.AUTHORIZATION_CODE.value + else: + effective_grant_type = MCPSupportGrantType.CLIENT_CREDENTIALS.value + + # Get stored credentials + credentials = provider.decrypt_credentials() - # Handle client registration if needed - client_information = provider.client_information() if not client_information: if authorization_code is not None: raise ValueError("Existing OAuth client information is required when exchanging an authorization code") + + # For client credentials flow, we don't need to register client dynamically + if effective_grant_type == MCPSupportGrantType.CLIENT_CREDENTIALS.value: + # Client should provide client_id and client_secret directly + raise ValueError("Client credentials flow requires client_id and client_secret to be provided") + try: - full_information = register_client(server_url, metadata, provider.client_metadata) - except httpx.RequestError as e: + full_information = register_client(server_url, server_metadata, client_metadata) + except RequestError as e: raise ValueError(f"Could not register OAuth client: {e}") - provider.save_client_information(full_information) + + # Return action to save client information + actions.append( + AuthAction( + action_type=AuthActionType.SAVE_CLIENT_INFO, + data={"client_information": full_information.model_dump()}, + provider_id=provider_id, + tenant_id=tenant_id, + ) + ) + client_information = full_information - # Exchange authorization code for tokens + # Handle client credentials flow + if effective_grant_type == MCPSupportGrantType.CLIENT_CREDENTIALS.value: + # Direct token request without user interaction + try: + scope = credentials.get("scope") + tokens = client_credentials_flow( + server_url, + server_metadata, + client_information, + scope, + ) + + # Return action to save tokens and grant type + token_data = tokens.model_dump() + token_data["grant_type"] = MCPSupportGrantType.CLIENT_CREDENTIALS.value + + actions.append( + AuthAction( + action_type=AuthActionType.SAVE_TOKENS, + data=token_data, + provider_id=provider_id, + tenant_id=tenant_id, + ) + ) + + return AuthResult(actions=actions, response={"result": "success"}) + except (RequestError, ValueError, KeyError) as e: + # RequestError: HTTP request failed + # ValueError: Invalid response data + # KeyError: Missing required fields in response + raise ValueError(f"Client credentials flow failed: {e}") + + # Exchange authorization code for tokens (Authorization Code flow) if authorization_code is not None: if not state_param: raise ValueError("State parameter is required when exchanging authorization code") @@ -335,35 +473,69 @@ def auth( tokens = exchange_authorization( server_url, - metadata, + server_metadata, client_information, authorization_code, code_verifier, redirect_uri, ) - provider.save_tokens(tokens) - return {"result": "success"} - provider_tokens = provider.tokens() + # Return action to save tokens + actions.append( + AuthAction( + action_type=AuthActionType.SAVE_TOKENS, + data=tokens.model_dump(), + provider_id=provider_id, + tenant_id=tenant_id, + ) + ) + + return AuthResult(actions=actions, response={"result": "success"}) + + provider_tokens = provider.retrieve_tokens() # Handle token refresh or new authorization if provider_tokens and provider_tokens.refresh_token: try: - new_tokens = refresh_authorization(server_url, metadata, client_information, provider_tokens.refresh_token) - provider.save_tokens(new_tokens) - return {"result": "success"} - except Exception as e: + new_tokens = refresh_authorization( + server_url, server_metadata, client_information, provider_tokens.refresh_token + ) + + # Return action to save new tokens + actions.append( + AuthAction( + action_type=AuthActionType.SAVE_TOKENS, + data=new_tokens.model_dump(), + provider_id=provider_id, + tenant_id=tenant_id, + ) + ) + + return AuthResult(actions=actions, response={"result": "success"}) + except (RequestError, ValueError, KeyError) as e: + # RequestError: HTTP request failed + # ValueError: Invalid response data + # KeyError: Missing required fields in response raise ValueError(f"Could not refresh OAuth tokens: {e}") - # Start new authorization flow + # Start new authorization flow (only for authorization code flow) authorization_url, code_verifier = start_authorization( server_url, - metadata, + server_metadata, client_information, - provider.redirect_url, - provider.mcp_provider.id, - provider.mcp_provider.tenant_id, + redirect_url, + provider_id, + tenant_id, ) - provider.save_code_verifier(code_verifier) - return {"authorization_url": authorization_url} + # Return action to save code verifier + actions.append( + AuthAction( + action_type=AuthActionType.SAVE_CODE_VERIFIER, + data={"code_verifier": code_verifier}, + provider_id=provider_id, + tenant_id=tenant_id, + ) + ) + + return AuthResult(actions=actions, response={"authorization_url": authorization_url}) diff --git a/api/core/mcp/auth/auth_provider.py b/api/core/mcp/auth/auth_provider.py deleted file mode 100644 index 3a550eb1b6..0000000000 --- a/api/core/mcp/auth/auth_provider.py +++ /dev/null @@ -1,77 +0,0 @@ -from configs import dify_config -from core.mcp.types import ( - OAuthClientInformation, - OAuthClientInformationFull, - OAuthClientMetadata, - OAuthTokens, -) -from models.tools import MCPToolProvider -from services.tools.mcp_tools_manage_service import MCPToolManageService - - -class OAuthClientProvider: - mcp_provider: MCPToolProvider - - def __init__(self, provider_id: str, tenant_id: str, for_list: bool = False): - if for_list: - self.mcp_provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id) - else: - self.mcp_provider = MCPToolManageService.get_mcp_provider_by_server_identifier(provider_id, tenant_id) - - @property - def redirect_url(self) -> str: - """The URL to redirect the user agent to after authorization.""" - return dify_config.CONSOLE_API_URL + "/console/api/mcp/oauth/callback" - - @property - def client_metadata(self) -> OAuthClientMetadata: - """Metadata about this OAuth client.""" - return OAuthClientMetadata( - redirect_uris=[self.redirect_url], - token_endpoint_auth_method="none", - grant_types=["authorization_code", "refresh_token"], - response_types=["code"], - client_name="Dify", - client_uri="https://github.com/langgenius/dify", - ) - - def client_information(self) -> OAuthClientInformation | None: - """Loads information about this OAuth client.""" - client_information = self.mcp_provider.decrypted_credentials.get("client_information", {}) - if not client_information: - return None - return OAuthClientInformation.model_validate(client_information) - - def save_client_information(self, client_information: OAuthClientInformationFull): - """Saves client information after dynamic registration.""" - MCPToolManageService.update_mcp_provider_credentials( - self.mcp_provider, - {"client_information": client_information.model_dump()}, - ) - - def tokens(self) -> OAuthTokens | None: - """Loads any existing OAuth tokens for the current session.""" - credentials = self.mcp_provider.decrypted_credentials - if not credentials: - return None - return OAuthTokens( - access_token=credentials.get("access_token", ""), - token_type=credentials.get("token_type", "Bearer"), - expires_in=int(credentials.get("expires_in", "3600") or 3600), - refresh_token=credentials.get("refresh_token", ""), - ) - - def save_tokens(self, tokens: OAuthTokens): - """Stores new OAuth tokens for the current session.""" - # update mcp provider credentials - token_dict = tokens.model_dump() - MCPToolManageService.update_mcp_provider_credentials(self.mcp_provider, token_dict, authed=True) - - def save_code_verifier(self, code_verifier: str): - """Saves a PKCE code verifier for the current session.""" - MCPToolManageService.update_mcp_provider_credentials(self.mcp_provider, {"code_verifier": code_verifier}) - - def code_verifier(self) -> str: - """Loads the PKCE code verifier for the current session.""" - # get code verifier from mcp provider credentials - return str(self.mcp_provider.decrypted_credentials.get("code_verifier", "")) diff --git a/api/core/mcp/auth_client.py b/api/core/mcp/auth_client.py new file mode 100644 index 0000000000..942c8d3c23 --- /dev/null +++ b/api/core/mcp/auth_client.py @@ -0,0 +1,191 @@ +""" +MCP Client with Authentication Retry Support + +This module provides an enhanced MCPClient that automatically handles +authentication failures and retries operations after refreshing tokens. +""" + +import logging +from collections.abc import Callable +from typing import Any + +from sqlalchemy.orm import Session + +from core.entities.mcp_provider import MCPProviderEntity +from core.mcp.error import MCPAuthError +from core.mcp.mcp_client import MCPClient +from core.mcp.types import CallToolResult, Tool +from extensions.ext_database import db + +logger = logging.getLogger(__name__) + + +class MCPClientWithAuthRetry(MCPClient): + """ + An enhanced MCPClient that provides automatic authentication retry. + + This class extends MCPClient and intercepts MCPAuthError exceptions + to refresh authentication before retrying failed operations. + + Note: This class uses lazy session creation - database sessions are only + created when authentication retry is actually needed, not on every request. + """ + + def __init__( + self, + server_url: str, + headers: dict[str, str] | None = None, + timeout: float | None = None, + sse_read_timeout: float | None = None, + provider_entity: MCPProviderEntity | None = None, + authorization_code: str | None = None, + by_server_id: bool = False, + ): + """ + Initialize the MCP client with auth retry capability. + + Args: + server_url: The MCP server URL + headers: Optional headers for requests + timeout: Request timeout + sse_read_timeout: SSE read timeout + provider_entity: Provider entity for authentication + authorization_code: Optional authorization code for initial auth + by_server_id: Whether to look up provider by server ID + """ + super().__init__(server_url, headers, timeout, sse_read_timeout) + + self.provider_entity = provider_entity + self.authorization_code = authorization_code + self.by_server_id = by_server_id + self._has_retried = False + + def _handle_auth_error(self, error: MCPAuthError) -> None: + """ + Handle authentication error by refreshing tokens. + + This method creates a short-lived database session only when authentication + retry is needed, minimizing database connection hold time. + + Args: + error: The authentication error + + Raises: + MCPAuthError: If authentication fails or max retries reached + """ + if not self.provider_entity: + raise error + if self._has_retried: + raise error + + self._has_retried = True + + try: + # Create a temporary session only for auth retry + # This session is short-lived and only exists during the auth operation + + from services.tools.mcp_tools_manage_service import MCPToolManageService + + with Session(db.engine) as session, session.begin(): + mcp_service = MCPToolManageService(session=session) + + # Perform authentication using the service's auth method + mcp_service.auth_with_actions(self.provider_entity, self.authorization_code) + + # Retrieve new tokens + self.provider_entity = mcp_service.get_provider_entity( + self.provider_entity.id, self.provider_entity.tenant_id, by_server_id=self.by_server_id + ) + + # Session is closed here, before we update headers + token = self.provider_entity.retrieve_tokens() + if not token: + raise MCPAuthError("Authentication failed - no token received") + + # Update headers with new token + self.headers["Authorization"] = f"{token.token_type.capitalize()} {token.access_token}" + + # Clear authorization code after first use + self.authorization_code = None + + except MCPAuthError: + # Re-raise MCPAuthError as is + raise + except Exception as e: + # Catch all exceptions during auth retry + logger.exception("Authentication retry failed") + raise MCPAuthError(f"Authentication retry failed: {e}") from e + + def _execute_with_retry(self, func: Callable[..., Any], *args, **kwargs) -> Any: + """ + Execute a function with authentication retry logic. + + Args: + func: The function to execute + *args: Positional arguments for the function + **kwargs: Keyword arguments for the function + + Returns: + The result of the function call + + Raises: + MCPAuthError: If authentication fails after retries + Any other exceptions from the function + """ + try: + return func(*args, **kwargs) + except MCPAuthError as e: + self._handle_auth_error(e) + + # Re-initialize the connection with new headers + if self._initialized: + # Clean up existing connection + self._exit_stack.close() + self._session = None + self._initialized = False + + # Re-initialize with new headers + self._initialize() + self._initialized = True + + return func(*args, **kwargs) + finally: + # Reset retry flag after operation completes + self._has_retried = False + + def __enter__(self): + """Enter the context manager with retry support.""" + + def initialize_with_retry(): + super(MCPClientWithAuthRetry, self).__enter__() + return self + + return self._execute_with_retry(initialize_with_retry) + + def list_tools(self) -> list[Tool]: + """ + List available tools from the MCP server with auth retry. + + Returns: + List of available tools + + Raises: + MCPAuthError: If authentication fails after retries + """ + return self._execute_with_retry(super().list_tools) + + def invoke_tool(self, tool_name: str, tool_args: dict[str, Any]) -> CallToolResult: + """ + Invoke a tool on the MCP server with auth retry. + + Args: + tool_name: Name of the tool to invoke + tool_args: Arguments for the tool + + Returns: + Result of the tool invocation + + Raises: + MCPAuthError: If authentication fails after retries + """ + return self._execute_with_retry(super().invoke_tool, tool_name, tool_args) diff --git a/docker/volumes/sandbox/dependencies/python-requirements.txt b/api/core/mcp/auth_client_comparison.md similarity index 100% rename from docker/volumes/sandbox/dependencies/python-requirements.txt rename to api/core/mcp/auth_client_comparison.md diff --git a/api/core/mcp/client/sse_client.py b/api/core/mcp/client/sse_client.py index 6db22a09e0..2d5e3dd263 100644 --- a/api/core/mcp/client/sse_client.py +++ b/api/core/mcp/client/sse_client.py @@ -46,7 +46,7 @@ class SSETransport: url: str, headers: dict[str, Any] | None = None, timeout: float = 5.0, - sse_read_timeout: float = 5 * 60, + sse_read_timeout: float = 1 * 60, ): """Initialize the SSE transport. @@ -255,7 +255,7 @@ def sse_client( url: str, headers: dict[str, Any] | None = None, timeout: float = 5.0, - sse_read_timeout: float = 5 * 60, + sse_read_timeout: float = 1 * 60, ) -> Generator[tuple[ReadQueue, WriteQueue], None, None]: """ Client transport for SSE. @@ -276,31 +276,34 @@ def sse_client( read_queue: ReadQueue | None = None write_queue: WriteQueue | None = None - with ThreadPoolExecutor() as executor: - try: - with create_ssrf_proxy_mcp_http_client(headers=transport.headers) as client: - with ssrf_proxy_sse_connect( - url, timeout=httpx.Timeout(timeout, read=sse_read_timeout), client=client - ) as event_source: - event_source.response.raise_for_status() + executor = ThreadPoolExecutor() + try: + with create_ssrf_proxy_mcp_http_client(headers=transport.headers) as client: + with ssrf_proxy_sse_connect( + url, timeout=httpx.Timeout(timeout, read=sse_read_timeout), client=client + ) as event_source: + event_source.response.raise_for_status() - read_queue, write_queue = transport.connect(executor, client, event_source) + read_queue, write_queue = transport.connect(executor, client, event_source) - yield read_queue, write_queue + yield read_queue, write_queue - except httpx.HTTPStatusError as exc: - if exc.response.status_code == 401: - raise MCPAuthError() - raise MCPConnectionError() - except Exception: - logger.exception("Error connecting to SSE endpoint") - raise - finally: - # Clean up queues - if read_queue: - read_queue.put(None) - if write_queue: - write_queue.put(None) + except httpx.HTTPStatusError as exc: + if exc.response.status_code == 401: + raise MCPAuthError() + raise MCPConnectionError() + except Exception: + logger.exception("Error connecting to SSE endpoint") + raise + finally: + # Clean up queues + if read_queue: + read_queue.put(None) + if write_queue: + write_queue.put(None) + + # Shutdown executor without waiting to prevent hanging + executor.shutdown(wait=False) def send_message(http_client: httpx.Client, endpoint_url: str, session_message: SessionMessage): diff --git a/api/core/mcp/client/streamable_client.py b/api/core/mcp/client/streamable_client.py index 7eafa79837..a9c1eda624 100644 --- a/api/core/mcp/client/streamable_client.py +++ b/api/core/mcp/client/streamable_client.py @@ -434,45 +434,48 @@ def streamablehttp_client( server_to_client_queue: ServerToClientQueue = queue.Queue() # For messages FROM server TO client client_to_server_queue: ClientToServerQueue = queue.Queue() # For messages FROM client TO server - with ThreadPoolExecutor(max_workers=2) as executor: - try: - with create_ssrf_proxy_mcp_http_client( - headers=transport.request_headers, - timeout=httpx.Timeout(transport.timeout, read=transport.sse_read_timeout), - ) as client: - # Define callbacks that need access to thread pool - def start_get_stream(): - """Start a worker thread to handle server-initiated messages.""" - executor.submit(transport.handle_get_stream, client, server_to_client_queue) + executor = ThreadPoolExecutor(max_workers=2) + try: + with create_ssrf_proxy_mcp_http_client( + headers=transport.request_headers, + timeout=httpx.Timeout(transport.timeout, read=transport.sse_read_timeout), + ) as client: + # Define callbacks that need access to thread pool + def start_get_stream(): + """Start a worker thread to handle server-initiated messages.""" + executor.submit(transport.handle_get_stream, client, server_to_client_queue) - # Start the post_writer worker thread - executor.submit( - transport.post_writer, - client, - client_to_server_queue, # Queue for messages FROM client TO server - server_to_client_queue, # Queue for messages FROM server TO client - start_get_stream, - ) + # Start the post_writer worker thread + executor.submit( + transport.post_writer, + client, + client_to_server_queue, # Queue for messages FROM client TO server + server_to_client_queue, # Queue for messages FROM server TO client + start_get_stream, + ) - try: - yield ( - server_to_client_queue, # Queue for receiving messages FROM server - client_to_server_queue, # Queue for sending messages TO server - transport.get_session_id, - ) - finally: - if transport.session_id and terminate_on_close: - transport.terminate_session(client) - - # Signal threads to stop - client_to_server_queue.put(None) - finally: - # Clear any remaining items and add None sentinel to unblock any waiting threads try: - while not client_to_server_queue.empty(): - client_to_server_queue.get_nowait() - except queue.Empty: - pass + yield ( + server_to_client_queue, # Queue for receiving messages FROM server + client_to_server_queue, # Queue for sending messages TO server + transport.get_session_id, + ) + finally: + if transport.session_id and terminate_on_close: + transport.terminate_session(client) - client_to_server_queue.put(None) - server_to_client_queue.put(None) + # Signal threads to stop + client_to_server_queue.put(None) + finally: + # Clear any remaining items and add None sentinel to unblock any waiting threads + try: + while not client_to_server_queue.empty(): + client_to_server_queue.get_nowait() + except queue.Empty: + pass + + client_to_server_queue.put(None) + server_to_client_queue.put(None) + + # Shutdown executor without waiting to prevent hanging + executor.shutdown(wait=False) diff --git a/api/core/mcp/entities.py b/api/core/mcp/entities.py index 7553c10a2e..08823daab1 100644 --- a/api/core/mcp/entities.py +++ b/api/core/mcp/entities.py @@ -1,10 +1,13 @@ from dataclasses import dataclass +from enum import StrEnum from typing import Any, Generic, TypeVar -from core.mcp.session.base_session import BaseSession -from core.mcp.types import LATEST_PROTOCOL_VERSION, RequestId, RequestParams +from pydantic import BaseModel -SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", LATEST_PROTOCOL_VERSION] +from core.mcp.session.base_session import BaseSession +from core.mcp.types import LATEST_PROTOCOL_VERSION, OAuthClientInformation, OAuthMetadata, RequestId, RequestParams + +SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", "2025-03-26", LATEST_PROTOCOL_VERSION] SessionT = TypeVar("SessionT", bound=BaseSession[Any, Any, Any, Any, Any]) @@ -17,3 +20,41 @@ class RequestContext(Generic[SessionT, LifespanContextT]): meta: RequestParams.Meta | None session: SessionT lifespan_context: LifespanContextT + + +class AuthActionType(StrEnum): + """Types of actions that can be performed during auth flow.""" + + SAVE_CLIENT_INFO = "save_client_info" + SAVE_TOKENS = "save_tokens" + SAVE_CODE_VERIFIER = "save_code_verifier" + START_AUTHORIZATION = "start_authorization" + SUCCESS = "success" + + +class AuthAction(BaseModel): + """Represents an action that needs to be performed as a result of auth flow.""" + + action_type: AuthActionType + data: dict[str, Any] + provider_id: str | None = None + tenant_id: str | None = None + + +class AuthResult(BaseModel): + """Result of auth function containing actions to be performed and response data.""" + + actions: list[AuthAction] + response: dict[str, str] + + +class OAuthCallbackState(BaseModel): + """State data stored in Redis during OAuth callback flow.""" + + provider_id: str + tenant_id: str + server_url: str + metadata: OAuthMetadata | None = None + client_information: OAuthClientInformation + code_verifier: str + redirect_uri: str diff --git a/api/core/mcp/error.py b/api/core/mcp/error.py index 92ea7bde09..d4fb8b7674 100644 --- a/api/core/mcp/error.py +++ b/api/core/mcp/error.py @@ -8,3 +8,7 @@ class MCPConnectionError(MCPError): class MCPAuthError(MCPConnectionError): pass + + +class MCPRefreshTokenError(MCPError): + pass diff --git a/api/core/mcp/mcp_client.py b/api/core/mcp/mcp_client.py index 86ec2c4db9..b0e0dab9be 100644 --- a/api/core/mcp/mcp_client.py +++ b/api/core/mcp/mcp_client.py @@ -7,9 +7,9 @@ from urllib.parse import urlparse from core.mcp.client.sse_client import sse_client from core.mcp.client.streamable_client import streamablehttp_client -from core.mcp.error import MCPAuthError, MCPConnectionError +from core.mcp.error import MCPConnectionError from core.mcp.session.client_session import ClientSession -from core.mcp.types import Tool +from core.mcp.types import CallToolResult, Tool logger = logging.getLogger(__name__) @@ -18,40 +18,18 @@ class MCPClient: def __init__( self, server_url: str, - provider_id: str, - tenant_id: str, - authed: bool = True, - authorization_code: str | None = None, - for_list: bool = False, headers: dict[str, str] | None = None, timeout: float | None = None, sse_read_timeout: float | None = None, ): - # Initialize info - self.provider_id = provider_id - self.tenant_id = tenant_id - self.client_type = "streamable" self.server_url = server_url self.headers = headers or {} self.timeout = timeout self.sse_read_timeout = sse_read_timeout - # Authentication info - self.authed = authed - self.authorization_code = authorization_code - if authed: - from core.mcp.auth.auth_provider import OAuthClientProvider - - self.provider = OAuthClientProvider(self.provider_id, self.tenant_id, for_list=for_list) - self.token = self.provider.tokens() - # Initialize session and client objects self._session: ClientSession | None = None - self._streams_context: AbstractContextManager[Any] | None = None - self._session_context: ClientSession | None = None self._exit_stack = ExitStack() - - # Whether the client has been initialized self._initialized = False def __enter__(self): @@ -85,61 +63,42 @@ class MCPClient: logger.debug("MCP connection failed with 'sse', falling back to 'mcp' method.") self.connect_server(streamablehttp_client, "mcp") - def connect_server( - self, client_factory: Callable[..., AbstractContextManager[Any]], method_name: str, first_try: bool = True - ): - from core.mcp.auth.auth_flow import auth + def connect_server(self, client_factory: Callable[..., AbstractContextManager[Any]], method_name: str) -> None: + """ + Connect to the MCP server using streamable http or sse. + Default to streamable http. + Args: + client_factory: The client factory to use(streamablehttp_client or sse_client). + method_name: The method name to use(mcp or sse). + """ + streams_context = client_factory( + url=self.server_url, + headers=self.headers, + timeout=self.timeout, + sse_read_timeout=self.sse_read_timeout, + ) - try: - headers = ( - {"Authorization": f"{self.token.token_type.capitalize()} {self.token.access_token}"} - if self.authed and self.token - else self.headers - ) - self._streams_context = client_factory( - url=self.server_url, - headers=headers, - timeout=self.timeout, - sse_read_timeout=self.sse_read_timeout, - ) - if not self._streams_context: - raise MCPConnectionError("Failed to create connection context") + # Use exit_stack to manage context managers properly + if method_name == "mcp": + read_stream, write_stream, _ = self._exit_stack.enter_context(streams_context) + streams = (read_stream, write_stream) + else: # sse_client + streams = self._exit_stack.enter_context(streams_context) - # Use exit_stack to manage context managers properly - if method_name == "mcp": - read_stream, write_stream, _ = self._exit_stack.enter_context(self._streams_context) - streams = (read_stream, write_stream) - else: # sse_client - streams = self._exit_stack.enter_context(self._streams_context) - - self._session_context = ClientSession(*streams) - self._session = self._exit_stack.enter_context(self._session_context) - self._session.initialize() - return - - except MCPAuthError: - if not self.authed: - raise - try: - auth(self.provider, self.server_url, self.authorization_code) - except Exception as e: - raise ValueError(f"Failed to authenticate: {e}") - self.token = self.provider.tokens() - if first_try: - return self.connect_server(client_factory, method_name, first_try=False) + session_context = ClientSession(*streams) + self._session = self._exit_stack.enter_context(session_context) + self._session.initialize() def list_tools(self) -> list[Tool]: - """Connect to an MCP server running with SSE transport""" - # List available tools to verify connection - if not self._initialized or not self._session: + """List available tools from the MCP server""" + if not self._session: raise ValueError("Session not initialized.") response = self._session.list_tools() - tools = response.tools - return tools + return response.tools - def invoke_tool(self, tool_name: str, tool_args: dict): + def invoke_tool(self, tool_name: str, tool_args: dict[str, Any]) -> CallToolResult: """Call a tool""" - if not self._initialized or not self._session: + if not self._session: raise ValueError("Session not initialized.") return self._session.call_tool(tool_name, tool_args) @@ -153,6 +112,4 @@ class MCPClient: raise ValueError(f"Error during cleanup: {e}") finally: self._session = None - self._session_context = None - self._streams_context = None self._initialized = False diff --git a/api/core/mcp/session/base_session.py b/api/core/mcp/session/base_session.py index 653b3773c0..3dcd166ea2 100644 --- a/api/core/mcp/session/base_session.py +++ b/api/core/mcp/session/base_session.py @@ -201,11 +201,14 @@ class BaseSession( self._receiver_future.result(timeout=5.0) # Wait up to 5 seconds except TimeoutError: # If the receiver loop is still running after timeout, we'll force shutdown - pass + # Cancel the future to interrupt the receiver loop + self._receiver_future.cancel() # Shutdown the executor if self._executor: - self._executor.shutdown(wait=True) + # Use non-blocking shutdown to prevent hanging + # The receiver thread should have already exited due to the None message in the queue + self._executor.shutdown(wait=False) def send_request( self, diff --git a/api/core/mcp/session/client_session.py b/api/core/mcp/session/client_session.py index 143dba402a..d684fe0dd7 100644 --- a/api/core/mcp/session/client_session.py +++ b/api/core/mcp/session/client_session.py @@ -288,7 +288,7 @@ class ClientSession( def complete( self, - ref: types.ResourceReference | types.PromptReference, + ref: types.ResourceTemplateReference | types.PromptReference, argument: dict[str, str], ) -> types.CompleteResult: """Send a completion/complete request.""" @@ -298,7 +298,7 @@ class ClientSession( method="completion/complete", params=types.CompleteRequestParams( ref=ref, - argument=types.CompletionArgument(**argument), + argument=types.CompletionArgument.model_validate(argument), ), ) ), diff --git a/api/core/mcp/types.py b/api/core/mcp/types.py index c7a046b585..fd2062d2e1 100644 --- a/api/core/mcp/types.py +++ b/api/core/mcp/types.py @@ -1,13 +1,6 @@ from collections.abc import Callable from dataclasses import dataclass -from typing import ( - Annotated, - Any, - Generic, - Literal, - TypeAlias, - TypeVar, -) +from typing import Annotated, Any, Generic, Literal, TypeAlias, TypeVar from pydantic import BaseModel, ConfigDict, Field, FileUrl, RootModel from pydantic.networks import AnyUrl, UrlConstraints @@ -33,6 +26,7 @@ for reference. LATEST_PROTOCOL_VERSION = "2025-03-26" # Server support 2024-11-05 to allow claude to use. SERVER_LATEST_PROTOCOL_VERSION = "2024-11-05" +DEFAULT_NEGOTIATED_VERSION = "2025-03-26" ProgressToken = str | int Cursor = str Role = Literal["user", "assistant"] @@ -55,14 +49,22 @@ class RequestParams(BaseModel): meta: Meta | None = Field(alias="_meta", default=None) +class PaginatedRequestParams(RequestParams): + cursor: Cursor | None = None + """ + An opaque token representing the current pagination position. + If provided, the server should return results starting after this cursor. + """ + + class NotificationParams(BaseModel): class Meta(BaseModel): model_config = ConfigDict(extra="allow") meta: Meta | None = Field(alias="_meta", default=None) """ - This parameter name is reserved by MCP to allow clients and servers to attach - additional metadata to their notifications. + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. """ @@ -79,12 +81,11 @@ class Request(BaseModel, Generic[RequestParamsT, MethodT]): model_config = ConfigDict(extra="allow") -class PaginatedRequest(Request[RequestParamsT, MethodT]): - cursor: Cursor | None = None - """ - An opaque token representing the current pagination position. - If provided, the server should return results starting after this cursor. - """ +class PaginatedRequest(Request[PaginatedRequestParams | None, MethodT], Generic[MethodT]): + """Base class for paginated requests, + matching the schema's PaginatedRequest interface.""" + + params: PaginatedRequestParams | None = None class Notification(BaseModel, Generic[NotificationParamsT, MethodT]): @@ -98,13 +99,12 @@ class Notification(BaseModel, Generic[NotificationParamsT, MethodT]): class Result(BaseModel): """Base class for JSON-RPC results.""" - model_config = ConfigDict(extra="allow") - meta: dict[str, Any] | None = Field(alias="_meta", default=None) """ - This result property is reserved by the protocol to allow clients and servers to - attach additional metadata to their responses. + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. """ + model_config = ConfigDict(extra="allow") class PaginatedResult(Result): @@ -186,10 +186,26 @@ class EmptyResult(Result): """A response that indicates success but carries no data.""" -class Implementation(BaseModel): - """Describes the name and version of an MCP implementation.""" +class BaseMetadata(BaseModel): + """Base class for entities with name and optional title fields.""" name: str + """The programmatic name of the entity.""" + + title: str | None = None + """ + Intended for UI and end-user contexts — optimized to be human-readable and easily understood, + even by those unfamiliar with domain-specific terminology. + + If not provided, the name should be used for display (except for Tool, + where `annotations.title` should be given precedence over using `name`, + if present). + """ + + +class Implementation(BaseMetadata): + """Describes the name and version of an MCP implementation.""" + version: str model_config = ConfigDict(extra="allow") @@ -203,7 +219,7 @@ class RootsCapability(BaseModel): class SamplingCapability(BaseModel): - """Capability for logging operations.""" + """Capability for sampling operations.""" model_config = ConfigDict(extra="allow") @@ -252,6 +268,12 @@ class LoggingCapability(BaseModel): model_config = ConfigDict(extra="allow") +class CompletionsCapability(BaseModel): + """Capability for completions operations.""" + + model_config = ConfigDict(extra="allow") + + class ServerCapabilities(BaseModel): """Capabilities that a server may support.""" @@ -265,6 +287,8 @@ class ServerCapabilities(BaseModel): """Present if the server offers any resources to read.""" tools: ToolsCapability | None = None """Present if the server offers any tools to call.""" + completions: CompletionsCapability | None = None + """Present if the server offers autocompletion suggestions for prompts and resources.""" model_config = ConfigDict(extra="allow") @@ -284,7 +308,7 @@ class InitializeRequest(Request[InitializeRequestParams, Literal["initialize"]]) to begin initialization. """ - method: Literal["initialize"] + method: Literal["initialize"] = "initialize" params: InitializeRequestParams @@ -305,7 +329,7 @@ class InitializedNotification(Notification[NotificationParams | None, Literal["n finished. """ - method: Literal["notifications/initialized"] + method: Literal["notifications/initialized"] = "notifications/initialized" params: NotificationParams | None = None @@ -315,7 +339,7 @@ class PingRequest(Request[RequestParams | None, Literal["ping"]]): still alive. """ - method: Literal["ping"] + method: Literal["ping"] = "ping" params: RequestParams | None = None @@ -334,6 +358,11 @@ class ProgressNotificationParams(NotificationParams): """ total: float | None = None """Total number of items to process (or total progress required), if known.""" + message: str | None = None + """ + Message related to progress. This should provide relevant human readable + progress information. + """ model_config = ConfigDict(extra="allow") @@ -343,15 +372,14 @@ class ProgressNotification(Notification[ProgressNotificationParams, Literal["not long-running request. """ - method: Literal["notifications/progress"] + method: Literal["notifications/progress"] = "notifications/progress" params: ProgressNotificationParams -class ListResourcesRequest(PaginatedRequest[RequestParams | None, Literal["resources/list"]]): +class ListResourcesRequest(PaginatedRequest[Literal["resources/list"]]): """Sent from the client to request a list of resources the server has.""" - method: Literal["resources/list"] - params: RequestParams | None = None + method: Literal["resources/list"] = "resources/list" class Annotations(BaseModel): @@ -360,13 +388,11 @@ class Annotations(BaseModel): model_config = ConfigDict(extra="allow") -class Resource(BaseModel): +class Resource(BaseMetadata): """A known resource that the server is capable of reading.""" uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] """The URI of this resource.""" - name: str - """A human-readable name for this resource.""" description: str | None = None """A description of what this resource represents.""" mimeType: str | None = None @@ -379,10 +405,15 @@ class Resource(BaseModel): This can be used by Hosts to display file sizes and estimate context window usage. """ annotations: Annotations | None = None + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") -class ResourceTemplate(BaseModel): +class ResourceTemplate(BaseMetadata): """A template description for resources available on the server.""" uriTemplate: str @@ -390,8 +421,6 @@ class ResourceTemplate(BaseModel): A URI template (according to RFC 6570) that can be used to construct resource URIs. """ - name: str - """A human-readable name for the type of resource this template refers to.""" description: str | None = None """A human-readable description of what this template is for.""" mimeType: str | None = None @@ -400,6 +429,11 @@ class ResourceTemplate(BaseModel): included if all resources matching this template have the same type. """ annotations: Annotations | None = None + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") @@ -409,11 +443,10 @@ class ListResourcesResult(PaginatedResult): resources: list[Resource] -class ListResourceTemplatesRequest(PaginatedRequest[RequestParams | None, Literal["resources/templates/list"]]): +class ListResourceTemplatesRequest(PaginatedRequest[Literal["resources/templates/list"]]): """Sent from the client to request a list of resource templates the server has.""" - method: Literal["resources/templates/list"] - params: RequestParams | None = None + method: Literal["resources/templates/list"] = "resources/templates/list" class ListResourceTemplatesResult(PaginatedResult): @@ -436,7 +469,7 @@ class ReadResourceRequestParams(RequestParams): class ReadResourceRequest(Request[ReadResourceRequestParams, Literal["resources/read"]]): """Sent from the client to the server, to read a specific resource URI.""" - method: Literal["resources/read"] + method: Literal["resources/read"] = "resources/read" params: ReadResourceRequestParams @@ -447,6 +480,11 @@ class ResourceContents(BaseModel): """The URI of this resource.""" mimeType: str | None = None """The MIME type of this resource, if known.""" + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") @@ -481,7 +519,7 @@ class ResourceListChangedNotification( of resources it can read from has changed. """ - method: Literal["notifications/resources/list_changed"] + method: Literal["notifications/resources/list_changed"] = "notifications/resources/list_changed" params: NotificationParams | None = None @@ -502,7 +540,7 @@ class SubscribeRequest(Request[SubscribeRequestParams, Literal["resources/subscr whenever a particular resource changes. """ - method: Literal["resources/subscribe"] + method: Literal["resources/subscribe"] = "resources/subscribe" params: SubscribeRequestParams @@ -520,7 +558,7 @@ class UnsubscribeRequest(Request[UnsubscribeRequestParams, Literal["resources/un the server. """ - method: Literal["resources/unsubscribe"] + method: Literal["resources/unsubscribe"] = "resources/unsubscribe" params: UnsubscribeRequestParams @@ -543,15 +581,14 @@ class ResourceUpdatedNotification( changed and may need to be read again. """ - method: Literal["notifications/resources/updated"] + method: Literal["notifications/resources/updated"] = "notifications/resources/updated" params: ResourceUpdatedNotificationParams -class ListPromptsRequest(PaginatedRequest[RequestParams | None, Literal["prompts/list"]]): +class ListPromptsRequest(PaginatedRequest[Literal["prompts/list"]]): """Sent from the client to request a list of prompts and prompt templates.""" - method: Literal["prompts/list"] - params: RequestParams | None = None + method: Literal["prompts/list"] = "prompts/list" class PromptArgument(BaseModel): @@ -566,15 +603,18 @@ class PromptArgument(BaseModel): model_config = ConfigDict(extra="allow") -class Prompt(BaseModel): +class Prompt(BaseMetadata): """A prompt or prompt template that the server offers.""" - name: str - """The name of the prompt or prompt template.""" description: str | None = None """An optional description of what this prompt provides.""" arguments: list[PromptArgument] | None = None """A list of arguments to use for templating the prompt.""" + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") @@ -597,7 +637,7 @@ class GetPromptRequestParams(RequestParams): class GetPromptRequest(Request[GetPromptRequestParams, Literal["prompts/get"]]): """Used by the client to get a prompt provided by the server.""" - method: Literal["prompts/get"] + method: Literal["prompts/get"] = "prompts/get" params: GetPromptRequestParams @@ -608,6 +648,11 @@ class TextContent(BaseModel): text: str """The text content of the message.""" annotations: Annotations | None = None + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") @@ -623,6 +668,31 @@ class ImageContent(BaseModel): image types. """ annotations: Annotations | None = None + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ + model_config = ConfigDict(extra="allow") + + +class AudioContent(BaseModel): + """Audio content for a message.""" + + type: Literal["audio"] + data: str + """The base64-encoded audio data.""" + mimeType: str + """ + The MIME type of the audio. Different providers may support different + audio types. + """ + annotations: Annotations | None = None + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") @@ -630,7 +700,7 @@ class SamplingMessage(BaseModel): """Describes a message issued to or received from an LLM API.""" role: Role - content: TextContent | ImageContent + content: TextContent | ImageContent | AudioContent model_config = ConfigDict(extra="allow") @@ -645,14 +715,36 @@ class EmbeddedResource(BaseModel): type: Literal["resource"] resource: TextResourceContents | BlobResourceContents annotations: Annotations | None = None + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") +class ResourceLink(Resource): + """ + A resource that the server is capable of reading, included in a prompt or tool call result. + + Note: resource links returned by tools are not guaranteed to appear in the results of `resources/list` requests. + """ + + type: Literal["resource_link"] + + +ContentBlock = TextContent | ImageContent | AudioContent | ResourceLink | EmbeddedResource +"""A content block that can be used in prompts and tool results.""" + +Content: TypeAlias = ContentBlock +# """DEPRECATED: Content is deprecated, you should use ContentBlock directly.""" + + class PromptMessage(BaseModel): """Describes a message returned as part of a prompt.""" role: Role - content: TextContent | ImageContent | EmbeddedResource + content: ContentBlock model_config = ConfigDict(extra="allow") @@ -672,15 +764,14 @@ class PromptListChangedNotification( of prompts it offers has changed. """ - method: Literal["notifications/prompts/list_changed"] + method: Literal["notifications/prompts/list_changed"] = "notifications/prompts/list_changed" params: NotificationParams | None = None -class ListToolsRequest(PaginatedRequest[RequestParams | None, Literal["tools/list"]]): +class ListToolsRequest(PaginatedRequest[Literal["tools/list"]]): """Sent from the client to request a list of tools the server has.""" - method: Literal["tools/list"] - params: RequestParams | None = None + method: Literal["tools/list"] = "tools/list" class ToolAnnotations(BaseModel): @@ -731,17 +822,25 @@ class ToolAnnotations(BaseModel): model_config = ConfigDict(extra="allow") -class Tool(BaseModel): +class Tool(BaseMetadata): """Definition for a tool the client can call.""" - name: str - """The name of the tool.""" description: str | None = None """A human-readable description of the tool.""" inputSchema: dict[str, Any] """A JSON Schema object defining the expected parameters for the tool.""" + outputSchema: dict[str, Any] | None = None + """ + An optional JSON Schema object defining the structure of the tool's output + returned in the structuredContent field of a CallToolResult. + """ annotations: ToolAnnotations | None = None """Optional additional tool information.""" + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") @@ -762,14 +861,16 @@ class CallToolRequestParams(RequestParams): class CallToolRequest(Request[CallToolRequestParams, Literal["tools/call"]]): """Used by the client to invoke a tool provided by the server.""" - method: Literal["tools/call"] + method: Literal["tools/call"] = "tools/call" params: CallToolRequestParams class CallToolResult(Result): """The server's response to a tool call.""" - content: list[TextContent | ImageContent | EmbeddedResource] + content: list[ContentBlock] + structuredContent: dict[str, Any] | None = None + """An optional JSON object that represents the structured result of the tool call.""" isError: bool = False @@ -779,7 +880,7 @@ class ToolListChangedNotification(Notification[NotificationParams | None, Litera of tools it offers has changed. """ - method: Literal["notifications/tools/list_changed"] + method: Literal["notifications/tools/list_changed"] = "notifications/tools/list_changed" params: NotificationParams | None = None @@ -797,7 +898,7 @@ class SetLevelRequestParams(RequestParams): class SetLevelRequest(Request[SetLevelRequestParams, Literal["logging/setLevel"]]): """A request from the client to the server, to enable or adjust logging.""" - method: Literal["logging/setLevel"] + method: Literal["logging/setLevel"] = "logging/setLevel" params: SetLevelRequestParams @@ -808,7 +909,7 @@ class LoggingMessageNotificationParams(NotificationParams): """The severity of this log message.""" logger: str | None = None """An optional name of the logger issuing this message.""" - data: Any = None + data: Any """ The data to be logged, such as a string message or an object. Any JSON serializable type is allowed here. @@ -819,7 +920,7 @@ class LoggingMessageNotificationParams(NotificationParams): class LoggingMessageNotification(Notification[LoggingMessageNotificationParams, Literal["notifications/message"]]): """Notification of a log message passed from server to client.""" - method: Literal["notifications/message"] + method: Literal["notifications/message"] = "notifications/message" params: LoggingMessageNotificationParams @@ -914,7 +1015,7 @@ class CreateMessageRequestParams(RequestParams): class CreateMessageRequest(Request[CreateMessageRequestParams, Literal["sampling/createMessage"]]): """A request from the server to sample an LLM via the client.""" - method: Literal["sampling/createMessage"] + method: Literal["sampling/createMessage"] = "sampling/createMessage" params: CreateMessageRequestParams @@ -925,14 +1026,14 @@ class CreateMessageResult(Result): """The client's response to a sampling/create_message request from the server.""" role: Role - content: TextContent | ImageContent + content: TextContent | ImageContent | AudioContent model: str """The name of the model that generated the message.""" stopReason: StopReason | None = None """The reason why sampling stopped, if known.""" -class ResourceReference(BaseModel): +class ResourceTemplateReference(BaseModel): """A reference to a resource or resource template definition.""" type: Literal["ref/resource"] @@ -960,18 +1061,28 @@ class CompletionArgument(BaseModel): model_config = ConfigDict(extra="allow") +class CompletionContext(BaseModel): + """Additional, optional context for completions.""" + + arguments: dict[str, str] | None = None + """Previously-resolved variables in a URI template or prompt.""" + model_config = ConfigDict(extra="allow") + + class CompleteRequestParams(RequestParams): """Parameters for completion requests.""" - ref: ResourceReference | PromptReference + ref: ResourceTemplateReference | PromptReference argument: CompletionArgument + context: CompletionContext | None = None + """Additional, optional context for completions""" model_config = ConfigDict(extra="allow") class CompleteRequest(Request[CompleteRequestParams, Literal["completion/complete"]]): """A request from the client to the server, to ask for completion options.""" - method: Literal["completion/complete"] + method: Literal["completion/complete"] = "completion/complete" params: CompleteRequestParams @@ -1010,7 +1121,7 @@ class ListRootsRequest(Request[RequestParams | None, Literal["roots/list"]]): structure or access specific locations that the client has permission to read from. """ - method: Literal["roots/list"] + method: Literal["roots/list"] = "roots/list" params: RequestParams | None = None @@ -1029,6 +1140,11 @@ class Root(BaseModel): identifier for the root, which may be useful for display purposes or for referencing the root in other parts of the application. """ + meta: dict[str, Any] | None = Field(alias="_meta", default=None) + """ + See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) + for notes on _meta usage. + """ model_config = ConfigDict(extra="allow") @@ -1054,7 +1170,7 @@ class RootsListChangedNotification( using the ListRootsRequest. """ - method: Literal["notifications/roots/list_changed"] + method: Literal["notifications/roots/list_changed"] = "notifications/roots/list_changed" params: NotificationParams | None = None @@ -1074,7 +1190,7 @@ class CancelledNotification(Notification[CancelledNotificationParams, Literal["n previously-issued request. """ - method: Literal["notifications/cancelled"] + method: Literal["notifications/cancelled"] = "notifications/cancelled" params: CancelledNotificationParams diff --git a/api/core/memory/token_buffer_memory.py b/api/core/memory/token_buffer_memory.py index 35af742f2a..3ebbb60f85 100644 --- a/api/core/memory/token_buffer_memory.py +++ b/api/core/memory/token_buffer_memory.py @@ -1,6 +1,7 @@ from collections.abc import Sequence from sqlalchemy import select +from sqlalchemy.orm import sessionmaker from core.app.app_config.features.file_upload.manager import FileUploadConfigManager from core.file import file_manager @@ -18,7 +19,9 @@ from core.prompt.utils.extract_thread_messages import extract_thread_messages from extensions.ext_database import db from factories import file_factory from models.model import AppMode, Conversation, Message, MessageFile -from models.workflow import Workflow, WorkflowRun +from models.workflow import Workflow +from repositories.api_workflow_run_repository import APIWorkflowRunRepository +from repositories.factory import DifyAPIRepositoryFactory class TokenBufferMemory: @@ -29,6 +32,14 @@ class TokenBufferMemory: ): self.conversation = conversation self.model_instance = model_instance + self._workflow_run_repo: APIWorkflowRunRepository | None = None + + @property + def workflow_run_repo(self) -> APIWorkflowRunRepository: + if self._workflow_run_repo is None: + session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) + self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) + return self._workflow_run_repo def _build_prompt_message_with_files( self, @@ -50,7 +61,16 @@ class TokenBufferMemory: if self.conversation.mode in {AppMode.AGENT_CHAT, AppMode.COMPLETION, AppMode.CHAT}: file_extra_config = FileUploadConfigManager.convert(self.conversation.model_config) elif self.conversation.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: - workflow_run = db.session.scalar(select(WorkflowRun).where(WorkflowRun.id == message.workflow_run_id)) + app = self.conversation.app + if not app: + raise ValueError("App not found for conversation") + + if not message.workflow_run_id: + raise ValueError("Workflow run ID not found") + + workflow_run = self.workflow_run_repo.get_workflow_run_by_id( + tenant_id=app.tenant_id, app_id=app.id, run_id=message.workflow_run_id + ) if not workflow_run: raise ValueError(f"Workflow run not found: {message.workflow_run_id}") workflow = db.session.scalar(select(Workflow).where(Workflow.id == workflow_run.workflow_id)) diff --git a/api/core/model_runtime/entities/common_entities.py b/api/core/model_runtime/entities/common_entities.py index c7353de5af..b673efae22 100644 --- a/api/core/model_runtime/entities/common_entities.py +++ b/api/core/model_runtime/entities/common_entities.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel +from pydantic import BaseModel, model_validator class I18nObject(BaseModel): @@ -9,7 +9,8 @@ class I18nObject(BaseModel): zh_Hans: str | None = None en_US: str - def __init__(self, **data): - super().__init__(**data) + @model_validator(mode="after") + def _(self): if not self.zh_Hans: self.zh_Hans = self.en_US + return self diff --git a/api/core/model_runtime/entities/message_entities.py b/api/core/model_runtime/entities/message_entities.py index 9235c881e0..89dae2dbff 100644 --- a/api/core/model_runtime/entities/message_entities.py +++ b/api/core/model_runtime/entities/message_entities.py @@ -74,7 +74,7 @@ class TextPromptMessageContent(PromptMessageContent): Model class for text prompt message content. """ - type: Literal[PromptMessageContentType.TEXT] = PromptMessageContentType.TEXT + type: Literal[PromptMessageContentType.TEXT] = PromptMessageContentType.TEXT # type: ignore data: str @@ -95,11 +95,11 @@ class MultiModalPromptMessageContent(PromptMessageContent): class VideoPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.VIDEO] = PromptMessageContentType.VIDEO + type: Literal[PromptMessageContentType.VIDEO] = PromptMessageContentType.VIDEO # type: ignore class AudioPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.AUDIO] = PromptMessageContentType.AUDIO + type: Literal[PromptMessageContentType.AUDIO] = PromptMessageContentType.AUDIO # type: ignore class ImagePromptMessageContent(MultiModalPromptMessageContent): @@ -111,12 +111,12 @@ class ImagePromptMessageContent(MultiModalPromptMessageContent): LOW = auto() HIGH = auto() - type: Literal[PromptMessageContentType.IMAGE] = PromptMessageContentType.IMAGE + type: Literal[PromptMessageContentType.IMAGE] = PromptMessageContentType.IMAGE # type: ignore detail: DETAIL = DETAIL.LOW class DocumentPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.DOCUMENT] = PromptMessageContentType.DOCUMENT + type: Literal[PromptMessageContentType.DOCUMENT] = PromptMessageContentType.DOCUMENT # type: ignore PromptMessageContentUnionTypes = Annotated[ diff --git a/api/core/model_runtime/entities/provider_entities.py b/api/core/model_runtime/entities/provider_entities.py index 2ccc9e0eae..0508116962 100644 --- a/api/core/model_runtime/entities/provider_entities.py +++ b/api/core/model_runtime/entities/provider_entities.py @@ -1,13 +1,13 @@ from collections.abc import Sequence -from enum import Enum, StrEnum, auto +from enum import StrEnum, auto -from pydantic import BaseModel, ConfigDict, Field, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.model_entities import AIModelEntity, ModelType -class ConfigurateMethod(Enum): +class ConfigurateMethod(StrEnum): """ Enum class for configurate method of provider model. """ @@ -46,10 +46,11 @@ class FormOption(BaseModel): value: str show_on: list[FormShowOnObject] = [] - def __init__(self, **data): - super().__init__(**data) + @model_validator(mode="after") + def _(self): if not self.label: self.label = I18nObject(en_US=self.value) + return self class CredentialFormSchema(BaseModel): diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py index 23d36c03af..3967acf07b 100644 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py +++ b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py @@ -15,7 +15,7 @@ class GPT2Tokenizer: use gpt2 tokenizer to get num tokens """ _tokenizer = GPT2Tokenizer.get_encoder() - tokens = _tokenizer.encode(text) + tokens = _tokenizer.encode(text) # type: ignore return len(tokens) @staticmethod diff --git a/api/core/model_runtime/model_providers/model_provider_factory.py b/api/core/model_runtime/model_providers/model_provider_factory.py index e070c17abd..e1afc41bee 100644 --- a/api/core/model_runtime/model_providers/model_provider_factory.py +++ b/api/core/model_runtime/model_providers/model_provider_factory.py @@ -269,17 +269,17 @@ class ModelProviderFactory: } if model_type == ModelType.LLM: - return LargeLanguageModel(**init_params) # type: ignore + return LargeLanguageModel.model_validate(init_params) elif model_type == ModelType.TEXT_EMBEDDING: - return TextEmbeddingModel(**init_params) # type: ignore + return TextEmbeddingModel.model_validate(init_params) elif model_type == ModelType.RERANK: - return RerankModel(**init_params) # type: ignore + return RerankModel.model_validate(init_params) elif model_type == ModelType.SPEECH2TEXT: - return Speech2TextModel(**init_params) # type: ignore + return Speech2TextModel.model_validate(init_params) elif model_type == ModelType.MODERATION: - return ModerationModel(**init_params) # type: ignore + return ModerationModel.model_validate(init_params) elif model_type == ModelType.TTS: - return TTSModel(**init_params) # type: ignore + return TTSModel.model_validate(init_params) def get_provider_icon(self, provider: str, icon_type: str, lang: str) -> tuple[bytes, str]: """ diff --git a/api/core/model_runtime/utils/encoders.py b/api/core/model_runtime/utils/encoders.py index c758eaf49f..c85152463e 100644 --- a/api/core/model_runtime/utils/encoders.py +++ b/api/core/model_runtime/utils/encoders.py @@ -196,15 +196,15 @@ def jsonable_encoder( return encoder(obj) try: - data = dict(obj) + data = dict(obj) # type: ignore except Exception as e: errors: list[Exception] = [] errors.append(e) try: - data = vars(obj) + data = vars(obj) # type: ignore except Exception as e: errors.append(e) - raise ValueError(errors) from e + raise ValueError(str(errors)) from e return jsonable_encoder( data, by_alias=by_alias, diff --git a/api/core/moderation/api/api.py b/api/core/moderation/api/api.py index 573f4ec2a7..2d72b17a04 100644 --- a/api/core/moderation/api/api.py +++ b/api/core/moderation/api/api.py @@ -51,7 +51,7 @@ class ApiModeration(Moderation): params = ModerationInputParams(app_id=self.app_id, inputs=inputs, query=query) result = self._get_config_by_requestor(APIBasedExtensionPoint.APP_MODERATION_INPUT, params.model_dump()) - return ModerationInputsResult(**result) + return ModerationInputsResult.model_validate(result) return ModerationInputsResult( flagged=flagged, action=ModerationAction.DIRECT_OUTPUT, preset_response=preset_response @@ -67,7 +67,7 @@ class ApiModeration(Moderation): params = ModerationOutputParams(app_id=self.app_id, text=text) result = self._get_config_by_requestor(APIBasedExtensionPoint.APP_MODERATION_OUTPUT, params.model_dump()) - return ModerationOutputsResult(**result) + return ModerationOutputsResult.model_validate(result) return ModerationOutputsResult( flagged=flagged, action=ModerationAction.DIRECT_OUTPUT, preset_response=preset_response diff --git a/api/core/ops/aliyun_trace/data_exporter/traceclient.py b/api/core/ops/aliyun_trace/data_exporter/traceclient.py index f54405b5de..5aa9fb6689 100644 --- a/api/core/ops/aliyun_trace/data_exporter/traceclient.py +++ b/api/core/ops/aliyun_trace/data_exporter/traceclient.py @@ -7,7 +7,7 @@ import uuid from collections import deque from collections.abc import Sequence from datetime import datetime -from typing import Final +from typing import Final, cast from urllib.parse import urljoin import httpx @@ -199,7 +199,7 @@ def convert_to_trace_id(uuid_v4: str | None) -> int: raise ValueError("UUID cannot be None") try: uuid_obj = uuid.UUID(uuid_v4) - return uuid_obj.int + return cast(int, uuid_obj.int) except ValueError as e: raise ValueError(f"Invalid UUID input: {uuid_v4}") from e diff --git a/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py b/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py index 0ee71fc23f..20ff2d0875 100644 --- a/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py +++ b/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py @@ -3,7 +3,8 @@ from dataclasses import dataclass from typing import Any from opentelemetry import trace as trace_api -from opentelemetry.sdk.trace import Event, Status, StatusCode +from opentelemetry.sdk.trace import Event +from opentelemetry.trace import Status, StatusCode from pydantic import BaseModel, Field diff --git a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py index 1497bc1863..03d2d75372 100644 --- a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py +++ b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py @@ -213,9 +213,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance): node_metadata.update(json.loads(node_execution.execution_metadata)) # Determine the correct span kind based on node type - span_kind = OpenInferenceSpanKindValues.CHAIN.value + span_kind = OpenInferenceSpanKindValues.CHAIN if node_execution.node_type == "llm": - span_kind = OpenInferenceSpanKindValues.LLM.value + span_kind = OpenInferenceSpanKindValues.LLM provider = process_data.get("model_provider") model = process_data.get("model_name") if provider: @@ -230,18 +230,18 @@ class ArizePhoenixDataTrace(BaseTraceInstance): node_metadata["prompt_tokens"] = usage_data.get("prompt_tokens", 0) node_metadata["completion_tokens"] = usage_data.get("completion_tokens", 0) elif node_execution.node_type == "dataset_retrieval": - span_kind = OpenInferenceSpanKindValues.RETRIEVER.value + span_kind = OpenInferenceSpanKindValues.RETRIEVER elif node_execution.node_type == "tool": - span_kind = OpenInferenceSpanKindValues.TOOL.value + span_kind = OpenInferenceSpanKindValues.TOOL else: - span_kind = OpenInferenceSpanKindValues.CHAIN.value + span_kind = OpenInferenceSpanKindValues.CHAIN node_span = self.tracer.start_span( name=node_execution.node_type, attributes={ SpanAttributes.INPUT_VALUE: node_execution.inputs or "{}", SpanAttributes.OUTPUT_VALUE: node_execution.outputs or "{}", - SpanAttributes.OPENINFERENCE_SPAN_KIND: span_kind, + SpanAttributes.OPENINFERENCE_SPAN_KIND: span_kind.value, SpanAttributes.METADATA: json.dumps(node_metadata, ensure_ascii=False), SpanAttributes.SESSION_ID: trace_info.conversation_id or "", }, diff --git a/api/core/ops/entities/config_entity.py b/api/core/ops/entities/config_entity.py index 4ba6eb0780..f9b8d41e0a 100644 --- a/api/core/ops/entities/config_entity.py +++ b/api/core/ops/entities/config_entity.py @@ -13,6 +13,7 @@ class TracingProviderEnum(StrEnum): OPIK = "opik" WEAVE = "weave" ALIYUN = "aliyun" + TENCENT = "tencent" class BaseTracingConfig(BaseModel): @@ -195,5 +196,32 @@ class AliyunConfig(BaseTracingConfig): return validate_url_with_path(v, "https://tracing-analysis-dc-hz.aliyuncs.com") +class TencentConfig(BaseTracingConfig): + """ + Tencent APM tracing config + """ + + token: str + endpoint: str + service_name: str + + @field_validator("token") + @classmethod + def token_validator(cls, v, info: ValidationInfo): + if not v or v.strip() == "": + raise ValueError("Token cannot be empty") + return v + + @field_validator("endpoint") + @classmethod + def endpoint_validator(cls, v, info: ValidationInfo): + return cls.validate_endpoint_url(v, "https://apm.tencentcloudapi.com") + + @field_validator("service_name") + @classmethod + def service_name_validator(cls, v, info: ValidationInfo): + return cls.validate_project_field(v, "dify_app") + + OPS_FILE_PATH = "ops_trace/" OPS_TRACE_FAILED_KEY = "FAILED_OPS_TRACE" diff --git a/api/core/ops/entities/trace_entity.py b/api/core/ops/entities/trace_entity.py index b8a25c5d7d..5b81c09a2d 100644 --- a/api/core/ops/entities/trace_entity.py +++ b/api/core/ops/entities/trace_entity.py @@ -90,6 +90,7 @@ class SuggestedQuestionTraceInfo(BaseTraceInfo): class DatasetRetrievalTraceInfo(BaseTraceInfo): documents: Any = None + error: str | None = None class ToolTraceInfo(BaseTraceInfo): diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index 931bed78d4..4de4f403ce 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -2,7 +2,7 @@ import logging import os from datetime import datetime, timedelta -from langfuse import Langfuse # type: ignore +from langfuse import Langfuse from sqlalchemy.orm import sessionmaker from core.ops.base_trace_instance import BaseTraceInstance @@ -73,7 +73,7 @@ class LangFuseDataTrace(BaseTraceInstance): if trace_info.message_id: trace_id = trace_info.trace_id or trace_info.message_id - name = TraceTaskName.MESSAGE_TRACE.value + name = TraceTaskName.MESSAGE_TRACE trace_data = LangfuseTrace( id=trace_id, user_id=user_id, @@ -88,7 +88,7 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_trace(langfuse_trace_data=trace_data) workflow_span_data = LangfuseSpan( id=trace_info.workflow_run_id, - name=TraceTaskName.WORKFLOW_TRACE.value, + name=TraceTaskName.WORKFLOW_TRACE, input=dict(trace_info.workflow_run_inputs), output=dict(trace_info.workflow_run_outputs), trace_id=trace_id, @@ -103,7 +103,7 @@ class LangFuseDataTrace(BaseTraceInstance): trace_data = LangfuseTrace( id=trace_id, user_id=user_id, - name=TraceTaskName.WORKFLOW_TRACE.value, + name=TraceTaskName.WORKFLOW_TRACE, input=dict(trace_info.workflow_run_inputs), output=dict(trace_info.workflow_run_outputs), metadata=metadata, @@ -253,7 +253,7 @@ class LangFuseDataTrace(BaseTraceInstance): trace_data = LangfuseTrace( id=trace_id, user_id=user_id, - name=TraceTaskName.MESSAGE_TRACE.value, + name=TraceTaskName.MESSAGE_TRACE, input={ "message": trace_info.inputs, "files": file_list, @@ -303,7 +303,7 @@ class LangFuseDataTrace(BaseTraceInstance): if trace_info.message_data is None: return span_data = LangfuseSpan( - name=TraceTaskName.MODERATION_TRACE.value, + name=TraceTaskName.MODERATION_TRACE, input=trace_info.inputs, output={ "action": trace_info.action, @@ -331,7 +331,7 @@ class LangFuseDataTrace(BaseTraceInstance): ) generation_data = LangfuseGeneration( - name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value, + name=TraceTaskName.SUGGESTED_QUESTION_TRACE, input=trace_info.inputs, output=str(trace_info.suggested_question), trace_id=trace_info.trace_id or trace_info.message_id, @@ -349,7 +349,7 @@ class LangFuseDataTrace(BaseTraceInstance): if trace_info.message_data is None: return dataset_retrieval_span_data = LangfuseSpan( - name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value, + name=TraceTaskName.DATASET_RETRIEVAL_TRACE, input=trace_info.inputs, output={"documents": trace_info.documents}, trace_id=trace_info.trace_id or trace_info.message_id, @@ -377,7 +377,7 @@ class LangFuseDataTrace(BaseTraceInstance): def generate_name_trace(self, trace_info: GenerateNameTraceInfo): name_generation_trace_data = LangfuseTrace( - name=TraceTaskName.GENERATE_NAME_TRACE.value, + name=TraceTaskName.GENERATE_NAME_TRACE, input=trace_info.inputs, output=trace_info.outputs, user_id=trace_info.tenant_id, @@ -388,7 +388,7 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_trace(langfuse_trace_data=name_generation_trace_data) name_generation_span_data = LangfuseSpan( - name=TraceTaskName.GENERATE_NAME_TRACE.value, + name=TraceTaskName.GENERATE_NAME_TRACE, input=trace_info.inputs, output=trace_info.outputs, trace_id=trace_info.conversation_id, diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py index 24a43e1cd8..8b8117b24c 100644 --- a/api/core/ops/langsmith_trace/langsmith_trace.py +++ b/api/core/ops/langsmith_trace/langsmith_trace.py @@ -81,7 +81,7 @@ class LangSmithDataTrace(BaseTraceInstance): if trace_info.message_id: message_run = LangSmithRunModel( id=trace_info.message_id, - name=TraceTaskName.MESSAGE_TRACE.value, + name=TraceTaskName.MESSAGE_TRACE, inputs=dict(trace_info.workflow_run_inputs), outputs=dict(trace_info.workflow_run_outputs), run_type=LangSmithRunType.chain, @@ -110,7 +110,7 @@ class LangSmithDataTrace(BaseTraceInstance): file_list=trace_info.file_list, total_tokens=trace_info.total_tokens, id=trace_info.workflow_run_id, - name=TraceTaskName.WORKFLOW_TRACE.value, + name=TraceTaskName.WORKFLOW_TRACE, inputs=dict(trace_info.workflow_run_inputs), run_type=LangSmithRunType.tool, start_time=trace_info.workflow_data.created_at, @@ -271,7 +271,7 @@ class LangSmithDataTrace(BaseTraceInstance): output_tokens=trace_info.answer_tokens, total_tokens=trace_info.total_tokens, id=message_id, - name=TraceTaskName.MESSAGE_TRACE.value, + name=TraceTaskName.MESSAGE_TRACE, inputs=trace_info.inputs, run_type=LangSmithRunType.chain, start_time=trace_info.start_time, @@ -327,7 +327,7 @@ class LangSmithDataTrace(BaseTraceInstance): if trace_info.message_data is None: return langsmith_run = LangSmithRunModel( - name=TraceTaskName.MODERATION_TRACE.value, + name=TraceTaskName.MODERATION_TRACE, inputs=trace_info.inputs, outputs={ "action": trace_info.action, @@ -362,7 +362,7 @@ class LangSmithDataTrace(BaseTraceInstance): if message_data is None: return suggested_question_run = LangSmithRunModel( - name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value, + name=TraceTaskName.SUGGESTED_QUESTION_TRACE, inputs=trace_info.inputs, outputs=trace_info.suggested_question, run_type=LangSmithRunType.tool, @@ -391,7 +391,7 @@ class LangSmithDataTrace(BaseTraceInstance): if trace_info.message_data is None: return dataset_retrieval_run = LangSmithRunModel( - name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value, + name=TraceTaskName.DATASET_RETRIEVAL_TRACE, inputs=trace_info.inputs, outputs={"documents": trace_info.documents}, run_type=LangSmithRunType.retriever, @@ -447,7 +447,7 @@ class LangSmithDataTrace(BaseTraceInstance): def generate_name_trace(self, trace_info: GenerateNameTraceInfo): name_run = LangSmithRunModel( - name=TraceTaskName.GENERATE_NAME_TRACE.value, + name=TraceTaskName.GENERATE_NAME_TRACE, inputs=trace_info.inputs, outputs=trace_info.outputs, run_type=LangSmithRunType.tool, diff --git a/api/core/ops/opik_trace/opik_trace.py b/api/core/ops/opik_trace/opik_trace.py index 8fa92f9fcd..8050c59db9 100644 --- a/api/core/ops/opik_trace/opik_trace.py +++ b/api/core/ops/opik_trace/opik_trace.py @@ -108,7 +108,7 @@ class OpikDataTrace(BaseTraceInstance): trace_data = { "id": opik_trace_id, - "name": TraceTaskName.MESSAGE_TRACE.value, + "name": TraceTaskName.MESSAGE_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": workflow_metadata, @@ -125,7 +125,7 @@ class OpikDataTrace(BaseTraceInstance): "id": root_span_id, "parent_span_id": None, "trace_id": opik_trace_id, - "name": TraceTaskName.WORKFLOW_TRACE.value, + "name": TraceTaskName.WORKFLOW_TRACE, "input": wrap_dict("input", trace_info.workflow_run_inputs), "output": wrap_dict("output", trace_info.workflow_run_outputs), "start_time": trace_info.start_time, @@ -138,7 +138,7 @@ class OpikDataTrace(BaseTraceInstance): else: trace_data = { "id": opik_trace_id, - "name": TraceTaskName.MESSAGE_TRACE.value, + "name": TraceTaskName.MESSAGE_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": workflow_metadata, @@ -290,7 +290,7 @@ class OpikDataTrace(BaseTraceInstance): trace_data = { "id": prepare_opik_uuid(trace_info.start_time, dify_trace_id), - "name": TraceTaskName.MESSAGE_TRACE.value, + "name": TraceTaskName.MESSAGE_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": wrap_metadata(metadata), @@ -329,7 +329,7 @@ class OpikDataTrace(BaseTraceInstance): span_data = { "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), - "name": TraceTaskName.MODERATION_TRACE.value, + "name": TraceTaskName.MODERATION_TRACE, "type": "tool", "start_time": start_time, "end_time": trace_info.end_time or trace_info.message_data.updated_at, @@ -355,7 +355,7 @@ class OpikDataTrace(BaseTraceInstance): span_data = { "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), - "name": TraceTaskName.SUGGESTED_QUESTION_TRACE.value, + "name": TraceTaskName.SUGGESTED_QUESTION_TRACE, "type": "tool", "start_time": start_time, "end_time": trace_info.end_time or message_data.updated_at, @@ -375,7 +375,7 @@ class OpikDataTrace(BaseTraceInstance): span_data = { "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), - "name": TraceTaskName.DATASET_RETRIEVAL_TRACE.value, + "name": TraceTaskName.DATASET_RETRIEVAL_TRACE, "type": "tool", "start_time": start_time, "end_time": trace_info.end_time or trace_info.message_data.updated_at, @@ -405,7 +405,7 @@ class OpikDataTrace(BaseTraceInstance): def generate_name_trace(self, trace_info: GenerateNameTraceInfo): trace_data = { "id": prepare_opik_uuid(trace_info.start_time, trace_info.trace_id or trace_info.message_id), - "name": TraceTaskName.GENERATE_NAME_TRACE.value, + "name": TraceTaskName.GENERATE_NAME_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": wrap_metadata(trace_info.metadata), @@ -420,7 +420,7 @@ class OpikDataTrace(BaseTraceInstance): span_data = { "trace_id": trace.id, - "name": TraceTaskName.GENERATE_NAME_TRACE.value, + "name": TraceTaskName.GENERATE_NAME_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": wrap_metadata(trace_info.metadata), diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 0679b27271..de0d4560e3 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -12,7 +12,7 @@ from uuid import UUID, uuid4 from cachetools import LRUCache from flask import current_app from sqlalchemy import select -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, sessionmaker from core.helper.encrypter import decrypt_token, encrypt_token, obfuscated_token from core.ops.entities.config_entity import ( @@ -34,7 +34,8 @@ from core.ops.utils import get_message_data from extensions.ext_database import db from extensions.ext_storage import storage from models.model import App, AppModelConfig, Conversation, Message, MessageFile, TraceAppConfig -from models.workflow import WorkflowAppLog, WorkflowRun +from models.workflow import WorkflowAppLog +from repositories.factory import DifyAPIRepositoryFactory from tasks.ops_trace_task import process_trace_tasks if TYPE_CHECKING: @@ -120,6 +121,17 @@ class OpsTraceProviderConfigMap(collections.UserDict[str, dict[str, Any]]): "trace_instance": AliyunDataTrace, } + case TracingProviderEnum.TENCENT: + from core.ops.entities.config_entity import TencentConfig + from core.ops.tencent_trace.tencent_trace import TencentDataTrace + + return { + "config_class": TencentConfig, + "secret_keys": ["token"], + "other_keys": ["endpoint", "service_name"], + "trace_instance": TencentDataTrace, + } + case _: raise KeyError(f"Unsupported tracing provider: {provider}") @@ -155,7 +167,10 @@ class OpsTraceManager: if key in tracing_config: if "*" in tracing_config[key]: # If the key contains '*', retain the original value from the current config - new_config[key] = current_trace_config.get(key, tracing_config[key]) + if current_trace_config: + new_config[key] = current_trace_config.get(key, tracing_config[key]) + else: + new_config[key] = tracing_config[key] else: # Otherwise, encrypt the key new_config[key] = encrypt_token(tenant_id, tracing_config[key]) @@ -405,6 +420,18 @@ class OpsTraceManager: class TraceTask: + _workflow_run_repo = None + _repo_lock = threading.Lock() + + @classmethod + def _get_workflow_run_repo(cls): + if cls._workflow_run_repo is None: + with cls._repo_lock: + if cls._workflow_run_repo is None: + session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) + cls._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) + return cls._workflow_run_repo + def __init__( self, trace_type: Any, @@ -472,27 +499,27 @@ class TraceTask: if not workflow_run_id: return {} + workflow_run_repo = self._get_workflow_run_repo() + workflow_run = workflow_run_repo.get_workflow_run_by_id_without_tenant(run_id=workflow_run_id) + if not workflow_run: + raise ValueError("Workflow run not found") + + workflow_id = workflow_run.workflow_id + tenant_id = workflow_run.tenant_id + workflow_run_id = workflow_run.id + workflow_run_elapsed_time = workflow_run.elapsed_time + workflow_run_status = workflow_run.status + workflow_run_inputs = workflow_run.inputs_dict + workflow_run_outputs = workflow_run.outputs_dict + workflow_run_version = workflow_run.version + error = workflow_run.error or "" + + total_tokens = workflow_run.total_tokens + + file_list = workflow_run_inputs.get("sys.file") or [] + query = workflow_run_inputs.get("query") or workflow_run_inputs.get("sys.query") or "" + with Session(db.engine) as session: - workflow_run_stmt = select(WorkflowRun).where(WorkflowRun.id == workflow_run_id) - workflow_run = session.scalars(workflow_run_stmt).first() - if not workflow_run: - raise ValueError("Workflow run not found") - - workflow_id = workflow_run.workflow_id - tenant_id = workflow_run.tenant_id - workflow_run_id = workflow_run.id - workflow_run_elapsed_time = workflow_run.elapsed_time - workflow_run_status = workflow_run.status - workflow_run_inputs = workflow_run.inputs_dict - workflow_run_outputs = workflow_run.outputs_dict - workflow_run_version = workflow_run.version - error = workflow_run.error or "" - - total_tokens = workflow_run.total_tokens - - file_list = workflow_run_inputs.get("sys.file") or [] - query = workflow_run_inputs.get("query") or workflow_run_inputs.get("sys.query") or "" - # get workflow_app_log_id workflow_app_log_data_stmt = select(WorkflowAppLog.id).where( WorkflowAppLog.tenant_id == tenant_id, @@ -509,43 +536,43 @@ class TraceTask: ) message_id = session.scalar(message_data_stmt) - metadata = { - "workflow_id": workflow_id, - "conversation_id": conversation_id, - "workflow_run_id": workflow_run_id, - "tenant_id": tenant_id, - "elapsed_time": workflow_run_elapsed_time, - "status": workflow_run_status, - "version": workflow_run_version, - "total_tokens": total_tokens, - "file_list": file_list, - "triggered_from": workflow_run.triggered_from, - "user_id": user_id, - "app_id": workflow_run.app_id, - } + metadata = { + "workflow_id": workflow_id, + "conversation_id": conversation_id, + "workflow_run_id": workflow_run_id, + "tenant_id": tenant_id, + "elapsed_time": workflow_run_elapsed_time, + "status": workflow_run_status, + "version": workflow_run_version, + "total_tokens": total_tokens, + "file_list": file_list, + "triggered_from": workflow_run.triggered_from, + "user_id": user_id, + "app_id": workflow_run.app_id, + } - workflow_trace_info = WorkflowTraceInfo( - trace_id=self.trace_id, - workflow_data=workflow_run.to_dict(), - conversation_id=conversation_id, - workflow_id=workflow_id, - tenant_id=tenant_id, - workflow_run_id=workflow_run_id, - workflow_run_elapsed_time=workflow_run_elapsed_time, - workflow_run_status=workflow_run_status, - workflow_run_inputs=workflow_run_inputs, - workflow_run_outputs=workflow_run_outputs, - workflow_run_version=workflow_run_version, - error=error, - total_tokens=total_tokens, - file_list=file_list, - query=query, - metadata=metadata, - workflow_app_log_id=workflow_app_log_id, - message_id=message_id, - start_time=workflow_run.created_at, - end_time=workflow_run.finished_at, - ) + workflow_trace_info = WorkflowTraceInfo( + trace_id=self.trace_id, + workflow_data=workflow_run.to_dict(), + conversation_id=conversation_id, + workflow_id=workflow_id, + tenant_id=tenant_id, + workflow_run_id=workflow_run_id, + workflow_run_elapsed_time=workflow_run_elapsed_time, + workflow_run_status=workflow_run_status, + workflow_run_inputs=workflow_run_inputs, + workflow_run_outputs=workflow_run_outputs, + workflow_run_version=workflow_run_version, + error=error, + total_tokens=total_tokens, + file_list=file_list, + query=query, + metadata=metadata, + workflow_app_log_id=workflow_app_log_id, + message_id=message_id, + start_time=workflow_run.created_at, + end_time=workflow_run.finished_at, + ) return workflow_trace_info def message_trace(self, message_id: str | None): @@ -720,6 +747,7 @@ class TraceTask: end_time=timer.get("end"), metadata=metadata, message_data=message_data.to_dict(), + error=kwargs.get("error"), ) return dataset_retrieval_trace_info @@ -886,6 +914,7 @@ class TraceQueueManager: continue file_id = uuid4().hex trace_info = task.execute() + task_data = TaskData( app_id=task.app_id, trace_info_type=type(trace_info).__name__, @@ -897,4 +926,4 @@ class TraceQueueManager: "file_id": file_id, "app_id": task.app_id, } - process_trace_tasks.delay(file_info) + process_trace_tasks.delay(file_info) # type: ignore diff --git a/web/app/components/app/configuration/base/icons/citation.tsx b/api/core/ops/tencent_trace/__init__.py similarity index 100% rename from web/app/components/app/configuration/base/icons/citation.tsx rename to api/core/ops/tencent_trace/__init__.py diff --git a/api/core/ops/tencent_trace/client.py b/api/core/ops/tencent_trace/client.py new file mode 100644 index 0000000000..270732aa02 --- /dev/null +++ b/api/core/ops/tencent_trace/client.py @@ -0,0 +1,337 @@ +""" +Tencent APM Trace Client - handles network operations, metrics, and API communication +""" + +from __future__ import annotations + +import importlib +import logging +import os +import socket +from typing import TYPE_CHECKING +from urllib.parse import urlparse + +if TYPE_CHECKING: + from opentelemetry.metrics import Meter + from opentelemetry.metrics._internal.instrument import Histogram + from opentelemetry.sdk.metrics.export import MetricReader + +from opentelemetry import trace as trace_api +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.semconv.resource import ResourceAttributes +from opentelemetry.trace import SpanKind +from opentelemetry.util.types import AttributeValue + +from configs import dify_config + +from .entities.tencent_semconv import LLM_OPERATION_DURATION +from .entities.tencent_trace_entity import SpanData + +logger = logging.getLogger(__name__) + + +class TencentTraceClient: + """Tencent APM trace client using OpenTelemetry OTLP exporter""" + + def __init__( + self, + service_name: str, + endpoint: str, + token: str, + max_queue_size: int = 1000, + schedule_delay_sec: int = 5, + max_export_batch_size: int = 50, + metrics_export_interval_sec: int = 10, + ): + self.endpoint = endpoint + self.token = token + self.service_name = service_name + self.metrics_export_interval_sec = metrics_export_interval_sec + + self.resource = Resource( + attributes={ + ResourceAttributes.SERVICE_NAME: service_name, + ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}", + ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}", + ResourceAttributes.HOST_NAME: socket.gethostname(), + } + ) + # Prepare gRPC endpoint/metadata + grpc_endpoint, insecure, _, _ = self._resolve_grpc_target(endpoint) + + headers = (("authorization", f"Bearer {token}"),) + + self.exporter = OTLPSpanExporter( + endpoint=grpc_endpoint, + headers=headers, + insecure=insecure, + timeout=30, + ) + + self.tracer_provider = TracerProvider(resource=self.resource) + self.span_processor = BatchSpanProcessor( + span_exporter=self.exporter, + max_queue_size=max_queue_size, + schedule_delay_millis=schedule_delay_sec * 1000, + max_export_batch_size=max_export_batch_size, + ) + self.tracer_provider.add_span_processor(self.span_processor) + + self.tracer = self.tracer_provider.get_tracer("dify.tencent_apm") + + # Store span contexts for parent-child relationships + self.span_contexts: dict[int, trace_api.SpanContext] = {} + + self.meter: Meter | None = None + self.hist_llm_duration: Histogram | None = None + self.metric_reader: MetricReader | None = None + + # Metrics exporter and instruments + try: + from opentelemetry import metrics + from opentelemetry.sdk.metrics import Histogram, MeterProvider + from opentelemetry.sdk.metrics.export import AggregationTemporality, PeriodicExportingMetricReader + + protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "").strip().lower() + use_http_protobuf = protocol in {"http/protobuf", "http-protobuf"} + use_http_json = protocol in {"http/json", "http-json"} + + # Set preferred temporality for histograms to DELTA + preferred_temporality: dict[type, AggregationTemporality] = {Histogram: AggregationTemporality.DELTA} + + def _create_metric_exporter(exporter_cls, **kwargs): + """Create metric exporter with preferred_temporality support""" + try: + return exporter_cls(**kwargs, preferred_temporality=preferred_temporality) + except Exception: + return exporter_cls(**kwargs) + + metric_reader = None + if use_http_json: + exporter_cls = None + for mod_path in ( + "opentelemetry.exporter.otlp.http.json.metric_exporter", + "opentelemetry.exporter.otlp.json.metric_exporter", + ): + try: + mod = importlib.import_module(mod_path) + exporter_cls = getattr(mod, "OTLPMetricExporter", None) + if exporter_cls: + break + except Exception: + continue + if exporter_cls is not None: + metric_exporter = _create_metric_exporter( + exporter_cls, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + else: + from opentelemetry.exporter.otlp.proto.http.metric_exporter import ( + OTLPMetricExporter as HttpMetricExporter, + ) + + metric_exporter = _create_metric_exporter( + HttpMetricExporter, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + + elif use_http_protobuf: + from opentelemetry.exporter.otlp.proto.http.metric_exporter import ( + OTLPMetricExporter as HttpMetricExporter, + ) + + metric_exporter = _create_metric_exporter( + HttpMetricExporter, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + else: + from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( + OTLPMetricExporter as GrpcMetricExporter, + ) + + m_grpc_endpoint, m_insecure, _, _ = self._resolve_grpc_target(endpoint) + + metric_exporter = _create_metric_exporter( + GrpcMetricExporter, + endpoint=m_grpc_endpoint, + headers={"authorization": f"Bearer {token}"}, + insecure=m_insecure, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + + if metric_reader is not None: + provider = MeterProvider(resource=self.resource, metric_readers=[metric_reader]) + metrics.set_meter_provider(provider) + self.meter = metrics.get_meter("dify-sdk", dify_config.project.version) + self.hist_llm_duration = self.meter.create_histogram( + name=LLM_OPERATION_DURATION, + unit="s", + description="LLM operation duration (seconds)", + ) + self.metric_reader = metric_reader + else: + self.meter = None + self.hist_llm_duration = None + self.metric_reader = None + except Exception: + logger.exception("[Tencent APM] Metrics initialization failed; metrics disabled") + self.meter = None + self.hist_llm_duration = None + self.metric_reader = None + + def add_span(self, span_data: SpanData) -> None: + """Create and export span using OpenTelemetry Tracer API""" + try: + self._create_and_export_span(span_data) + logger.debug("[Tencent APM] Created span: %s", span_data.name) + + except Exception: + logger.exception("[Tencent APM] Failed to create span: %s", span_data.name) + + # Metrics recording API + def record_llm_duration(self, latency_seconds: float, attributes: dict[str, str] | None = None) -> None: + """Record LLM operation duration histogram in seconds.""" + try: + if not hasattr(self, "hist_llm_duration") or self.hist_llm_duration is None: + return + attrs: dict[str, str] = {} + if attributes: + for k, v in attributes.items(): + attrs[k] = str(v) if not isinstance(v, (str, int, float, bool)) else v # type: ignore[assignment] + self.hist_llm_duration.record(latency_seconds, attrs) # type: ignore[attr-defined] + except Exception: + logger.debug("[Tencent APM] Failed to record LLM duration", exc_info=True) + + def _create_and_export_span(self, span_data: SpanData) -> None: + """Create span using OpenTelemetry Tracer API""" + try: + parent_context = None + if span_data.parent_span_id and span_data.parent_span_id in self.span_contexts: + parent_context = trace_api.set_span_in_context( + trace_api.NonRecordingSpan(self.span_contexts[span_data.parent_span_id]) + ) + + span = self.tracer.start_span( + name=span_data.name, + context=parent_context, + kind=SpanKind.INTERNAL, + start_time=span_data.start_time, + ) + self.span_contexts[span_data.span_id] = span.get_span_context() + + if span_data.attributes: + attributes: dict[str, AttributeValue] = {} + for key, value in span_data.attributes.items(): + if isinstance(value, (int, float, bool)): + attributes[key] = value + else: + attributes[key] = str(value) + span.set_attributes(attributes) + + if span_data.events: + for event in span_data.events: + span.add_event(event.name, event.attributes, event.timestamp) + + if span_data.status: + span.set_status(span_data.status) + + # Manually end span; do not use context manager to avoid double-end warnings + span.end(end_time=span_data.end_time) + + except Exception: + logger.exception("[Tencent APM] Error creating span: %s", span_data.name) + + def api_check(self) -> bool: + """Check API connectivity using socket connection test for gRPC endpoints""" + try: + # Resolve gRPC target consistently with exporters + _, _, host, port = self._resolve_grpc_target(self.endpoint) + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(5) + result = sock.connect_ex((host, port)) + sock.close() + + if result == 0: + logger.info("[Tencent APM] Endpoint %s:%s is accessible", host, port) + return True + else: + logger.warning("[Tencent APM] Endpoint %s:%s is not accessible", host, port) + if host in ["127.0.0.1", "localhost"]: + logger.info("[Tencent APM] Development environment detected, allowing config save") + return True + return False + + except Exception: + logger.exception("[Tencent APM] API check failed") + if "127.0.0.1" in self.endpoint or "localhost" in self.endpoint: + return True + return False + + def get_project_url(self) -> str: + """Get project console URL""" + return "https://console.cloud.tencent.com/apm" + + def shutdown(self) -> None: + """Shutdown the client and export remaining spans""" + try: + if self.span_processor: + logger.info("[Tencent APM] Flushing remaining spans before shutdown") + _ = self.span_processor.force_flush() + self.span_processor.shutdown() + + if self.tracer_provider: + self.tracer_provider.shutdown() + if self.metric_reader is not None: + try: + self.metric_reader.shutdown() # type: ignore[attr-defined] + except Exception: + pass + + except Exception: + logger.exception("[Tencent APM] Error during client shutdown") + + @staticmethod + def _resolve_grpc_target(endpoint: str, default_port: int = 4317) -> tuple[str, bool, str, int]: + """Normalize endpoint to gRPC target and security flag. + + Returns: + (grpc_endpoint, insecure, host, port) + """ + try: + if endpoint.startswith(("http://", "https://")): + parsed = urlparse(endpoint) + host = parsed.hostname or "localhost" + port = parsed.port or default_port + insecure = parsed.scheme == "http" + return f"{host}:{port}", insecure, host, port + + host = endpoint + port = default_port + if ":" in endpoint: + parts = endpoint.rsplit(":", 1) + host = parts[0] or "localhost" + try: + port = int(parts[1]) + except Exception: + port = default_port + + insecure = ("localhost" in host) or ("127.0.0.1" in host) + return f"{host}:{port}", insecure, host, port + except Exception: + host, port = "localhost", default_port + return f"{host}:{port}", True, host, port diff --git a/api/core/ops/tencent_trace/entities/__init__.py b/api/core/ops/tencent_trace/entities/__init__.py new file mode 100644 index 0000000000..b1602628ed --- /dev/null +++ b/api/core/ops/tencent_trace/entities/__init__.py @@ -0,0 +1 @@ +# Tencent trace entities module diff --git a/api/core/ops/tencent_trace/entities/tencent_semconv.py b/api/core/ops/tencent_trace/entities/tencent_semconv.py new file mode 100644 index 0000000000..5ea6eeacef --- /dev/null +++ b/api/core/ops/tencent_trace/entities/tencent_semconv.py @@ -0,0 +1,73 @@ +from enum import Enum + +# public +GEN_AI_SESSION_ID = "gen_ai.session.id" + +GEN_AI_USER_ID = "gen_ai.user.id" + +GEN_AI_USER_NAME = "gen_ai.user.name" + +GEN_AI_SPAN_KIND = "gen_ai.span.kind" + +GEN_AI_FRAMEWORK = "gen_ai.framework" + +GEN_AI_IS_ENTRY = "gen_ai.is_entry" # mark to count the LLM-related traces + +# Chain +INPUT_VALUE = "gen_ai.entity.input" + +OUTPUT_VALUE = "gen_ai.entity.output" + + +# Retriever +RETRIEVAL_QUERY = "retrieval.query" + +RETRIEVAL_DOCUMENT = "retrieval.document" + + +# GENERATION +GEN_AI_MODEL_NAME = "gen_ai.response.model" + +GEN_AI_PROVIDER = "gen_ai.provider.name" + + +GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens" + +GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens" + +GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens" + +GEN_AI_PROMPT_TEMPLATE_TEMPLATE = "gen_ai.prompt_template.template" + +GEN_AI_PROMPT_TEMPLATE_VARIABLE = "gen_ai.prompt_template.variable" + +GEN_AI_PROMPT = "gen_ai.prompt" + +GEN_AI_COMPLETION = "gen_ai.completion" + +GEN_AI_RESPONSE_FINISH_REASON = "gen_ai.response.finish_reason" + +# Tool +TOOL_NAME = "tool.name" + +TOOL_DESCRIPTION = "tool.description" + +TOOL_PARAMETERS = "tool.parameters" + +# Instrumentation Library +INSTRUMENTATION_NAME = "dify-sdk" +INSTRUMENTATION_VERSION = "0.1.0" +INSTRUMENTATION_LANGUAGE = "python" + + +# Metrics +LLM_OPERATION_DURATION = "gen_ai.client.operation.duration" + + +class GenAISpanKind(Enum): + WORKFLOW = "WORKFLOW" # OpenLLMetry + RETRIEVER = "RETRIEVER" # RAG + GENERATION = "GENERATION" # Langfuse + TOOL = "TOOL" # OpenLLMetry + AGENT = "AGENT" # OpenLLMetry + TASK = "TASK" # OpenLLMetry diff --git a/api/core/ops/tencent_trace/entities/tencent_trace_entity.py b/api/core/ops/tencent_trace/entities/tencent_trace_entity.py new file mode 100644 index 0000000000..428850f109 --- /dev/null +++ b/api/core/ops/tencent_trace/entities/tencent_trace_entity.py @@ -0,0 +1,21 @@ +from collections.abc import Sequence + +from opentelemetry import trace as trace_api +from opentelemetry.sdk.trace import Event +from opentelemetry.trace import Status, StatusCode +from pydantic import BaseModel, Field + + +class SpanData(BaseModel): + model_config = {"arbitrary_types_allowed": True} + + trace_id: int = Field(..., description="The unique identifier for the trace.") + parent_span_id: int | None = Field(None, description="The ID of the parent span, if any.") + span_id: int = Field(..., description="The unique identifier for this span.") + name: str = Field(..., description="The name of the span.") + attributes: dict[str, str] = Field(default_factory=dict, description="Attributes associated with the span.") + events: Sequence[Event] = Field(default_factory=list, description="Events recorded in the span.") + links: Sequence[trace_api.Link] = Field(default_factory=list, description="Links to other spans.") + status: Status = Field(default=Status(StatusCode.UNSET), description="The status of the span.") + start_time: int = Field(..., description="The start time of the span in nanoseconds.") + end_time: int = Field(..., description="The end time of the span in nanoseconds.") diff --git a/api/core/ops/tencent_trace/span_builder.py b/api/core/ops/tencent_trace/span_builder.py new file mode 100644 index 0000000000..5ba592290d --- /dev/null +++ b/api/core/ops/tencent_trace/span_builder.py @@ -0,0 +1,372 @@ +""" +Tencent APM Span Builder - handles all span construction logic +""" + +import json +import logging +from datetime import datetime + +from opentelemetry.trace import Status, StatusCode + +from core.ops.entities.trace_entity import ( + DatasetRetrievalTraceInfo, + MessageTraceInfo, + ToolTraceInfo, + WorkflowTraceInfo, +) +from core.ops.tencent_trace.entities.tencent_semconv import ( + GEN_AI_COMPLETION, + GEN_AI_FRAMEWORK, + GEN_AI_IS_ENTRY, + GEN_AI_MODEL_NAME, + GEN_AI_PROMPT, + GEN_AI_PROVIDER, + GEN_AI_RESPONSE_FINISH_REASON, + GEN_AI_SESSION_ID, + GEN_AI_SPAN_KIND, + GEN_AI_USAGE_INPUT_TOKENS, + GEN_AI_USAGE_OUTPUT_TOKENS, + GEN_AI_USAGE_TOTAL_TOKENS, + GEN_AI_USER_ID, + INPUT_VALUE, + OUTPUT_VALUE, + RETRIEVAL_DOCUMENT, + RETRIEVAL_QUERY, + TOOL_DESCRIPTION, + TOOL_NAME, + TOOL_PARAMETERS, + GenAISpanKind, +) +from core.ops.tencent_trace.entities.tencent_trace_entity import SpanData +from core.ops.tencent_trace.utils import TencentTraceUtils +from core.rag.models.document import Document +from core.workflow.entities.workflow_node_execution import ( + WorkflowNodeExecution, + WorkflowNodeExecutionMetadataKey, + WorkflowNodeExecutionStatus, +) + +logger = logging.getLogger(__name__) + + +class TencentSpanBuilder: + """Builder class for constructing different types of spans""" + + @staticmethod + def _get_time_nanoseconds(time_value: datetime | None) -> int: + """Convert datetime to nanoseconds for span creation.""" + return TencentTraceUtils.convert_datetime_to_nanoseconds(time_value) + + @staticmethod + def build_workflow_spans( + trace_info: WorkflowTraceInfo, trace_id: int, user_id: str, links: list | None = None + ) -> list[SpanData]: + """Build workflow-related spans""" + spans = [] + links = links or [] + + message_span_id = None + workflow_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "workflow") + + if hasattr(trace_info, "metadata") and trace_info.metadata.get("conversation_id"): + message_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "message") + + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + if message_span_id: + message_span = TencentSpanBuilder._build_message_span( + trace_info, trace_id, message_span_id, user_id, status, links + ) + spans.append(message_span) + + workflow_span = TencentSpanBuilder._build_workflow_span( + trace_info, trace_id, workflow_span_id, message_span_id, user_id, status, links + ) + spans.append(workflow_span) + + return spans + + @staticmethod + def _build_message_span( + trace_info: WorkflowTraceInfo, trace_id: int, message_span_id: int, user_id: str, status: Status, links: list + ) -> SpanData: + """Build message span for chatflow""" + return SpanData( + trace_id=trace_id, + parent_span_id=None, + span_id=message_span_id, + name="message", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_IS_ENTRY: "true", + INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query", ""), + OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), + }, + status=status, + links=links, + ) + + @staticmethod + def _build_workflow_span( + trace_info: WorkflowTraceInfo, + trace_id: int, + workflow_span_id: int, + message_span_id: int | None, + user_id: str, + status: Status, + links: list, + ) -> SpanData: + """Build workflow span""" + attributes = { + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), + } + + if message_span_id is None: + attributes[GEN_AI_IS_ENTRY] = "true" + + return SpanData( + trace_id=trace_id, + parent_span_id=message_span_id, + span_id=workflow_span_id, + name="workflow", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes=attributes, + status=status, + links=links, + ) + + @staticmethod + def build_workflow_llm_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build LLM span for workflow nodes.""" + process_data = node_execution.process_data or {} + outputs = node_execution.outputs or {} + usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {}) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name="GENERATION", + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.GENERATION.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_MODEL_NAME: process_data.get("model_name", ""), + GEN_AI_PROVIDER: process_data.get("model_provider", ""), + GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)), + GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)), + GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)), + GEN_AI_PROMPT: json.dumps(process_data.get("prompts", []), ensure_ascii=False), + GEN_AI_COMPLETION: str(outputs.get("text", "")), + GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason", ""), + INPUT_VALUE: json.dumps(process_data.get("prompts", []), ensure_ascii=False), + OUTPUT_VALUE: str(outputs.get("text", "")), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_message_span( + trace_info: MessageTraceInfo, trace_id: int, user_id: str, links: list | None = None + ) -> SpanData: + """Build message span.""" + links = links or [] + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + return SpanData( + trace_id=trace_id, + parent_span_id=None, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message"), + name="message", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_IS_ENTRY: "true", + INPUT_VALUE: str(trace_info.inputs or ""), + OUTPUT_VALUE: str(trace_info.outputs or ""), + }, + status=status, + links=links, + ) + + @staticmethod + def build_tool_span(trace_info: ToolTraceInfo, trace_id: int, parent_span_id: int) -> SpanData: + """Build tool span.""" + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + return SpanData( + trace_id=trace_id, + parent_span_id=parent_span_id, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "tool"), + name=trace_info.tool_name, + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value, + GEN_AI_FRAMEWORK: "dify", + TOOL_NAME: trace_info.tool_name, + TOOL_DESCRIPTION: "", + TOOL_PARAMETERS: json.dumps(trace_info.tool_parameters, ensure_ascii=False), + INPUT_VALUE: json.dumps(trace_info.tool_inputs, ensure_ascii=False), + OUTPUT_VALUE: str(trace_info.tool_outputs), + }, + status=status, + ) + + @staticmethod + def build_retrieval_span(trace_info: DatasetRetrievalTraceInfo, trace_id: int, parent_span_id: int) -> SpanData: + """Build dataset retrieval span.""" + status = Status(StatusCode.OK) + if getattr(trace_info, "error", None): + status = Status(StatusCode.ERROR, trace_info.error) # type: ignore[arg-type] + + documents_data = TencentSpanBuilder._extract_retrieval_documents(trace_info.documents) + + return SpanData( + trace_id=trace_id, + parent_span_id=parent_span_id, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "retrieval"), + name="retrieval", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value, + GEN_AI_FRAMEWORK: "dify", + RETRIEVAL_QUERY: str(trace_info.inputs or ""), + RETRIEVAL_DOCUMENT: json.dumps(documents_data, ensure_ascii=False), + INPUT_VALUE: str(trace_info.inputs or ""), + OUTPUT_VALUE: json.dumps(documents_data, ensure_ascii=False), + }, + status=status, + ) + + @staticmethod + def _get_workflow_node_status(node_execution: WorkflowNodeExecution) -> Status: + """Get workflow node execution status.""" + if node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED: + return Status(StatusCode.OK) + elif node_execution.status in [WorkflowNodeExecutionStatus.FAILED, WorkflowNodeExecutionStatus.EXCEPTION]: + return Status(StatusCode.ERROR, str(node_execution.error)) + return Status(StatusCode.UNSET) + + @staticmethod + def build_workflow_retrieval_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build knowledge retrieval span for workflow nodes.""" + input_value = "" + if node_execution.inputs: + input_value = str(node_execution.inputs.get("query", "")) + output_value = "" + if node_execution.outputs: + output_value = json.dumps(node_execution.outputs.get("result", []), ensure_ascii=False) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value, + GEN_AI_FRAMEWORK: "dify", + RETRIEVAL_QUERY: input_value, + RETRIEVAL_DOCUMENT: output_value, + INPUT_VALUE: input_value, + OUTPUT_VALUE: output_value, + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_workflow_tool_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build tool span for workflow nodes.""" + tool_des = {} + if node_execution.metadata: + tool_des = node_execution.metadata.get(WorkflowNodeExecutionMetadataKey.TOOL_INFO, {}) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value, + GEN_AI_FRAMEWORK: "dify", + TOOL_NAME: node_execution.title, + TOOL_DESCRIPTION: json.dumps(tool_des, ensure_ascii=False), + TOOL_PARAMETERS: json.dumps(node_execution.inputs or {}, ensure_ascii=False), + INPUT_VALUE: json.dumps(node_execution.inputs or {}, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_workflow_task_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build generic task span for workflow nodes.""" + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.TASK.value, + GEN_AI_FRAMEWORK: "dify", + INPUT_VALUE: json.dumps(node_execution.inputs, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def _extract_retrieval_documents(documents: list[Document]): + """Extract documents data for retrieval tracing.""" + documents_data = [] + for document in documents: + document_data = { + "content": document.page_content, + "metadata": { + "dataset_id": document.metadata.get("dataset_id"), + "doc_id": document.metadata.get("doc_id"), + "document_id": document.metadata.get("document_id"), + }, + "score": document.metadata.get("score"), + } + documents_data.append(document_data) + return documents_data diff --git a/api/core/ops/tencent_trace/tencent_trace.py b/api/core/ops/tencent_trace/tencent_trace.py new file mode 100644 index 0000000000..5ef1c61b24 --- /dev/null +++ b/api/core/ops/tencent_trace/tencent_trace.py @@ -0,0 +1,317 @@ +""" +Tencent APM tracing implementation with separated concerns +""" + +import logging + +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker + +from core.ops.base_trace_instance import BaseTraceInstance +from core.ops.entities.config_entity import TencentConfig +from core.ops.entities.trace_entity import ( + BaseTraceInfo, + DatasetRetrievalTraceInfo, + GenerateNameTraceInfo, + MessageTraceInfo, + ModerationTraceInfo, + SuggestedQuestionTraceInfo, + ToolTraceInfo, + WorkflowTraceInfo, +) +from core.ops.tencent_trace.client import TencentTraceClient +from core.ops.tencent_trace.entities.tencent_trace_entity import SpanData +from core.ops.tencent_trace.span_builder import TencentSpanBuilder +from core.ops.tencent_trace.utils import TencentTraceUtils +from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository +from core.workflow.entities.workflow_node_execution import ( + WorkflowNodeExecution, +) +from core.workflow.nodes import NodeType +from extensions.ext_database import db +from models import Account, App, TenantAccountJoin, WorkflowNodeExecutionTriggeredFrom + +logger = logging.getLogger(__name__) + + +class TencentDataTrace(BaseTraceInstance): + """ + Tencent APM trace implementation with single responsibility principle. + Acts as a coordinator that delegates specific tasks to specialized classes. + """ + + def __init__(self, tencent_config: TencentConfig): + super().__init__(tencent_config) + self.trace_client = TencentTraceClient( + service_name=tencent_config.service_name, + endpoint=tencent_config.endpoint, + token=tencent_config.token, + metrics_export_interval_sec=5, + ) + + def trace(self, trace_info: BaseTraceInfo) -> None: + """Main tracing entry point - coordinates different trace types.""" + if isinstance(trace_info, WorkflowTraceInfo): + self.workflow_trace(trace_info) + elif isinstance(trace_info, MessageTraceInfo): + self.message_trace(trace_info) + elif isinstance(trace_info, ModerationTraceInfo): + pass + elif isinstance(trace_info, SuggestedQuestionTraceInfo): + self.suggested_question_trace(trace_info) + elif isinstance(trace_info, DatasetRetrievalTraceInfo): + self.dataset_retrieval_trace(trace_info) + elif isinstance(trace_info, ToolTraceInfo): + self.tool_trace(trace_info) + elif isinstance(trace_info, GenerateNameTraceInfo): + pass + + def api_check(self) -> bool: + return self.trace_client.api_check() + + def get_project_url(self) -> str: + return self.trace_client.get_project_url() + + def workflow_trace(self, trace_info: WorkflowTraceInfo) -> None: + """Handle workflow tracing by coordinating data retrieval and span construction.""" + try: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_info.workflow_run_id) + + links = [] + if trace_info.trace_id: + links.append(TencentTraceUtils.create_link(trace_info.trace_id)) + + user_id = self._get_user_id(trace_info) + + workflow_spans = TencentSpanBuilder.build_workflow_spans(trace_info, trace_id, str(user_id), links) + + for span in workflow_spans: + self.trace_client.add_span(span) + + self._process_workflow_nodes(trace_info, trace_id) + + except Exception: + logger.exception("[Tencent APM] Failed to process workflow trace") + + def message_trace(self, trace_info: MessageTraceInfo) -> None: + """Handle message tracing.""" + try: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_info.message_id) + user_id = self._get_user_id(trace_info) + + links = [] + if trace_info.trace_id: + links.append(TencentTraceUtils.create_link(trace_info.trace_id)) + + message_span = TencentSpanBuilder.build_message_span(trace_info, trace_id, str(user_id), links) + + self.trace_client.add_span(message_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process message trace") + + def tool_trace(self, trace_info: ToolTraceInfo) -> None: + """Handle tool tracing.""" + try: + parent_span_id = None + trace_root_id = None + + if trace_info.message_id: + parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message") + trace_root_id = trace_info.message_id + + if parent_span_id and trace_root_id: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_root_id) + + tool_span = TencentSpanBuilder.build_tool_span(trace_info, trace_id, parent_span_id) + + self.trace_client.add_span(tool_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process tool trace") + + def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo) -> None: + """Handle dataset retrieval tracing.""" + try: + parent_span_id = None + trace_root_id = None + + if trace_info.message_id: + parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message") + trace_root_id = trace_info.message_id + + if parent_span_id and trace_root_id: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_root_id) + + retrieval_span = TencentSpanBuilder.build_retrieval_span(trace_info, trace_id, parent_span_id) + + self.trace_client.add_span(retrieval_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process dataset retrieval trace") + + def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo) -> None: + """Handle suggested question tracing""" + try: + logger.info("[Tencent APM] Processing suggested question trace") + + except Exception: + logger.exception("[Tencent APM] Failed to process suggested question trace") + + def _process_workflow_nodes(self, trace_info: WorkflowTraceInfo, trace_id: int) -> None: + """Process workflow node executions.""" + try: + workflow_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "workflow") + + node_executions = self._get_workflow_node_executions(trace_info) + + for node_execution in node_executions: + try: + node_span = self._build_workflow_node_span(node_execution, trace_id, trace_info, workflow_span_id) + if node_span: + self.trace_client.add_span(node_span) + + if node_execution.node_type == NodeType.LLM: + self._record_llm_metrics(node_execution) + except Exception: + logger.exception("[Tencent APM] Failed to process node execution: %s", node_execution.id) + + except Exception: + logger.exception("[Tencent APM] Failed to process workflow nodes") + + def _build_workflow_node_span( + self, node_execution: WorkflowNodeExecution, trace_id: int, trace_info: WorkflowTraceInfo, workflow_span_id: int + ) -> SpanData | None: + """Build span for different node types""" + try: + if node_execution.node_type == NodeType.LLM: + return TencentSpanBuilder.build_workflow_llm_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + elif node_execution.node_type == NodeType.KNOWLEDGE_RETRIEVAL: + return TencentSpanBuilder.build_workflow_retrieval_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + elif node_execution.node_type == NodeType.TOOL: + return TencentSpanBuilder.build_workflow_tool_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + else: + # Handle all other node types as generic tasks + return TencentSpanBuilder.build_workflow_task_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + except Exception: + logger.debug( + "[Tencent APM] Error building span for node %s: %s", + node_execution.id, + node_execution.node_type, + exc_info=True, + ) + return None + + def _get_workflow_node_executions(self, trace_info: WorkflowTraceInfo) -> list[WorkflowNodeExecution]: + """Retrieve workflow node executions from database.""" + try: + session_maker = sessionmaker(bind=db.engine) + + with Session(db.engine, expire_on_commit=False) as session: + app_id = trace_info.metadata.get("app_id") + if not app_id: + raise ValueError("No app_id found in trace_info metadata") + + app_stmt = select(App).where(App.id == app_id) + app = session.scalar(app_stmt) + if not app: + raise ValueError(f"App with id {app_id} not found") + + if not app.created_by: + raise ValueError(f"App with id {app_id} has no creator") + + account_stmt = select(Account).where(Account.id == app.created_by) + service_account = session.scalar(account_stmt) + if not service_account: + raise ValueError(f"Creator account not found for app {app_id}") + + current_tenant = ( + session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first() + ) + if not current_tenant: + raise ValueError(f"Current tenant not found for account {service_account.id}") + + service_account.set_tenant_id(current_tenant.tenant_id) + + repository = SQLAlchemyWorkflowNodeExecutionRepository( + session_factory=session_maker, + user=service_account, + app_id=trace_info.metadata.get("app_id"), + triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN, + ) + + executions = repository.get_by_workflow_run(workflow_run_id=trace_info.workflow_run_id) + return list(executions) + + except Exception: + logger.exception("[Tencent APM] Failed to get workflow node executions") + return [] + + def _get_user_id(self, trace_info: BaseTraceInfo) -> str: + """Get user ID from trace info.""" + try: + tenant_id = None + user_id = None + + if isinstance(trace_info, (WorkflowTraceInfo, GenerateNameTraceInfo)): + tenant_id = trace_info.tenant_id + + if hasattr(trace_info, "metadata") and trace_info.metadata: + user_id = trace_info.metadata.get("user_id") + + if user_id and tenant_id: + stmt = ( + select(Account.name) + .join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id) + .where(Account.id == user_id, TenantAccountJoin.tenant_id == tenant_id) + ) + + session_maker = sessionmaker(bind=db.engine) + with session_maker() as session: + account_name = session.scalar(stmt) + return account_name or str(user_id) + elif user_id: + return str(user_id) + + return "anonymous" + + except Exception: + logger.exception("[Tencent APM] Failed to get user ID") + return "unknown" + + def _record_llm_metrics(self, node_execution: WorkflowNodeExecution) -> None: + """Record LLM performance metrics""" + try: + if not hasattr(self.trace_client, "record_llm_duration"): + return + + process_data = node_execution.process_data or {} + usage = process_data.get("usage", {}) + latency_s = float(usage.get("latency", 0.0)) + + if latency_s > 0: + attributes = { + "provider": process_data.get("model_provider", ""), + "model": process_data.get("model_name", ""), + "span_kind": "GENERATION", + } + self.trace_client.record_llm_duration(latency_s, attributes) + + except Exception: + logger.debug("[Tencent APM] Failed to record LLM metrics") + + def __del__(self): + """Ensure proper cleanup on garbage collection.""" + try: + if hasattr(self, "trace_client"): + self.trace_client.shutdown() + except Exception: + pass diff --git a/api/core/ops/tencent_trace/utils.py b/api/core/ops/tencent_trace/utils.py new file mode 100644 index 0000000000..96087951ab --- /dev/null +++ b/api/core/ops/tencent_trace/utils.py @@ -0,0 +1,65 @@ +""" +Utility functions for Tencent APM tracing +""" + +import hashlib +import random +import uuid +from datetime import datetime +from typing import cast + +from opentelemetry.trace import Link, SpanContext, TraceFlags + + +class TencentTraceUtils: + """Utility class for common tracing operations.""" + + INVALID_SPAN_ID = 0x0000000000000000 + INVALID_TRACE_ID = 0x00000000000000000000000000000000 + + @staticmethod + def convert_to_trace_id(uuid_v4: str | None) -> int: + try: + uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4() + except Exception as e: + raise ValueError(f"Invalid UUID input: {e}") + return cast(int, uuid_obj.int) + + @staticmethod + def convert_to_span_id(uuid_v4: str | None, span_type: str) -> int: + try: + uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4() + except Exception as e: + raise ValueError(f"Invalid UUID input: {e}") + combined_key = f"{uuid_obj.hex}-{span_type}" + hash_bytes = hashlib.sha256(combined_key.encode("utf-8")).digest() + return int.from_bytes(hash_bytes[:8], byteorder="big", signed=False) + + @staticmethod + def generate_span_id() -> int: + span_id = random.getrandbits(64) + while span_id == TencentTraceUtils.INVALID_SPAN_ID: + span_id = random.getrandbits(64) + return span_id + + @staticmethod + def convert_datetime_to_nanoseconds(start_time: datetime | None) -> int: + if start_time is None: + start_time = datetime.now() + timestamp_in_seconds = start_time.timestamp() + return int(timestamp_in_seconds * 1e9) + + @staticmethod + def create_link(trace_id_str: str) -> Link: + try: + trace_id = int(trace_id_str, 16) if len(trace_id_str) == 32 else cast(int, uuid.UUID(trace_id_str).int) + except (ValueError, TypeError): + trace_id = cast(int, uuid.uuid4().int) + + span_context = SpanContext( + trace_id=trace_id, + span_id=TencentTraceUtils.INVALID_SPAN_ID, + is_remote=False, + trace_flags=TraceFlags(TraceFlags.SAMPLED), + ) + return Link(span_context) diff --git a/api/core/ops/weave_trace/weave_trace.py b/api/core/ops/weave_trace/weave_trace.py index 339694cf07..9b3d7a8192 100644 --- a/api/core/ops/weave_trace/weave_trace.py +++ b/api/core/ops/weave_trace/weave_trace.py @@ -62,7 +62,8 @@ class WeaveDataTrace(BaseTraceInstance): self, ): try: - project_url = f"https://wandb.ai/{self.weave_client._project_id()}" + project_identifier = f"{self.entity}/{self.project_name}" if self.entity else self.project_name + project_url = f"https://wandb.ai/{project_identifier}" return project_url except Exception as e: logger.debug("Weave get run url failed: %s", str(e)) @@ -103,7 +104,7 @@ class WeaveDataTrace(BaseTraceInstance): message_run = WeaveTraceModel( id=trace_info.message_id, - op=str(TraceTaskName.MESSAGE_TRACE.value), + op=str(TraceTaskName.MESSAGE_TRACE), inputs=dict(trace_info.workflow_run_inputs), outputs=dict(trace_info.workflow_run_outputs), total_tokens=trace_info.total_tokens, @@ -125,7 +126,7 @@ class WeaveDataTrace(BaseTraceInstance): file_list=trace_info.file_list, total_tokens=trace_info.total_tokens, id=trace_info.workflow_run_id, - op=str(TraceTaskName.WORKFLOW_TRACE.value), + op=str(TraceTaskName.WORKFLOW_TRACE), inputs=dict(trace_info.workflow_run_inputs), outputs=dict(trace_info.workflow_run_outputs), attributes=workflow_attributes, @@ -252,7 +253,7 @@ class WeaveDataTrace(BaseTraceInstance): message_run = WeaveTraceModel( id=trace_id, - op=str(TraceTaskName.MESSAGE_TRACE.value), + op=str(TraceTaskName.MESSAGE_TRACE), input_tokens=trace_info.message_tokens, output_tokens=trace_info.answer_tokens, total_tokens=trace_info.total_tokens, @@ -299,7 +300,7 @@ class WeaveDataTrace(BaseTraceInstance): moderation_run = WeaveTraceModel( id=str(uuid.uuid4()), - op=str(TraceTaskName.MODERATION_TRACE.value), + op=str(TraceTaskName.MODERATION_TRACE), inputs=trace_info.inputs, outputs={ "action": trace_info.action, @@ -329,7 +330,7 @@ class WeaveDataTrace(BaseTraceInstance): suggested_question_run = WeaveTraceModel( id=str(uuid.uuid4()), - op=str(TraceTaskName.SUGGESTED_QUESTION_TRACE.value), + op=str(TraceTaskName.SUGGESTED_QUESTION_TRACE), inputs=trace_info.inputs, outputs=trace_info.suggested_question, attributes=attributes, @@ -354,7 +355,7 @@ class WeaveDataTrace(BaseTraceInstance): dataset_retrieval_run = WeaveTraceModel( id=str(uuid.uuid4()), - op=str(TraceTaskName.DATASET_RETRIEVAL_TRACE.value), + op=str(TraceTaskName.DATASET_RETRIEVAL_TRACE), inputs=trace_info.inputs, outputs={"documents": trace_info.documents}, attributes=attributes, @@ -396,7 +397,7 @@ class WeaveDataTrace(BaseTraceInstance): name_run = WeaveTraceModel( id=str(uuid.uuid4()), - op=str(TraceTaskName.GENERATE_NAME_TRACE.value), + op=str(TraceTaskName.GENERATE_NAME_TRACE), inputs=trace_info.inputs, outputs=trace_info.outputs, attributes=attributes, @@ -424,7 +425,23 @@ class WeaveDataTrace(BaseTraceInstance): raise ValueError(f"Weave API check failed: {str(e)}") def start_call(self, run_data: WeaveTraceModel, parent_run_id: str | None = None): - call = self.weave_client.create_call(op=run_data.op, inputs=run_data.inputs, attributes=run_data.attributes) + inputs = run_data.inputs + if inputs is None: + inputs = {} + elif not isinstance(inputs, dict): + inputs = {"inputs": str(inputs)} + + attributes = run_data.attributes + if attributes is None: + attributes = {} + elif not isinstance(attributes, dict): + attributes = {"attributes": str(attributes)} + + call = self.weave_client.create_call( + op=run_data.op, + inputs=inputs, + attributes=attributes, + ) self.calls[run_data.id] = call if parent_run_id: self.calls[run_data.id].parent_id = parent_run_id @@ -432,6 +449,7 @@ class WeaveDataTrace(BaseTraceInstance): def finish_call(self, run_data: WeaveTraceModel): call = self.calls.get(run_data.id) if call: - self.weave_client.finish_call(call=call, output=run_data.outputs, exception=run_data.exception) + exception = Exception(run_data.exception) if run_data.exception else None + self.weave_client.finish_call(call=call, output=run_data.outputs, exception=exception) else: raise ValueError(f"Call with id {run_data.id} not found") diff --git a/api/core/plugin/backwards_invocation/app.py b/api/core/plugin/backwards_invocation/app.py index 8b08b09eb9..32ac132e1e 100644 --- a/api/core/plugin/backwards_invocation/app.py +++ b/api/core/plugin/backwards_invocation/app.py @@ -14,7 +14,7 @@ from core.app.apps.workflow.app_generator import WorkflowAppGenerator from core.app.entities.app_invoke_entities import InvokeFrom from core.plugin.backwards_invocation.base import BaseBackwardsInvocation from extensions.ext_database import db -from models.account import Account +from models import Account from models.model import App, AppMode, EndUser diff --git a/api/core/plugin/backwards_invocation/node.py b/api/core/plugin/backwards_invocation/node.py index 1d6d21cff7..9fbcbf55b4 100644 --- a/api/core/plugin/backwards_invocation/node.py +++ b/api/core/plugin/backwards_invocation/node.py @@ -52,7 +52,7 @@ class PluginNodeBackwardsInvocation(BaseBackwardsInvocation): instruction=instruction, # instruct with variables are not supported ) node_data_dict = node_data.model_dump() - node_data_dict["type"] = NodeType.PARAMETER_EXTRACTOR.value + node_data_dict["type"] = NodeType.PARAMETER_EXTRACTOR execution = workflow_service.run_free_workflow_node( node_data_dict, tenant_id=tenant_id, diff --git a/api/core/plugin/entities/parameters.py b/api/core/plugin/entities/parameters.py index 68b5c1084a..1e7f8e4c86 100644 --- a/api/core/plugin/entities/parameters.py +++ b/api/core/plugin/entities/parameters.py @@ -76,7 +76,7 @@ class PluginParameter(BaseModel): auto_generate: PluginParameterAutoGenerate | None = None template: PluginParameterTemplate | None = None required: bool = False - default: Union[float, int, str] | None = None + default: Union[float, int, str, bool] | None = None min: Union[float, int] | None = None max: Union[float, int] | None = None precision: int | None = None diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 10f37f75f8..d5df85730b 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -83,16 +83,16 @@ class RequestInvokeLLM(BaseRequestInvokeModel): raise ValueError("prompt_messages must be a list") for i in range(len(v)): - if v[i]["role"] == PromptMessageRole.USER.value: - v[i] = UserPromptMessage(**v[i]) - elif v[i]["role"] == PromptMessageRole.ASSISTANT.value: - v[i] = AssistantPromptMessage(**v[i]) - elif v[i]["role"] == PromptMessageRole.SYSTEM.value: - v[i] = SystemPromptMessage(**v[i]) - elif v[i]["role"] == PromptMessageRole.TOOL.value: - v[i] = ToolPromptMessage(**v[i]) + if v[i]["role"] == PromptMessageRole.USER: + v[i] = UserPromptMessage.model_validate(v[i]) + elif v[i]["role"] == PromptMessageRole.ASSISTANT: + v[i] = AssistantPromptMessage.model_validate(v[i]) + elif v[i]["role"] == PromptMessageRole.SYSTEM: + v[i] = SystemPromptMessage.model_validate(v[i]) + elif v[i]["role"] == PromptMessageRole.TOOL: + v[i] = ToolPromptMessage.model_validate(v[i]) else: - v[i] = PromptMessage(**v[i]) + v[i] = PromptMessage.model_validate(v[i]) return v diff --git a/api/core/plugin/impl/base.py b/api/core/plugin/impl/base.py index 8e3df4da2c..e9dc58eec8 100644 --- a/api/core/plugin/impl/base.py +++ b/api/core/plugin/impl/base.py @@ -2,11 +2,10 @@ import inspect import json import logging from collections.abc import Callable, Generator -from typing import TypeVar +from typing import Any, TypeVar, cast -import requests +import httpx from pydantic import BaseModel -from requests.exceptions import HTTPError from yarl import URL from configs import dify_config @@ -32,6 +31,17 @@ from core.plugin.impl.exc import ( ) plugin_daemon_inner_api_baseurl = URL(str(dify_config.PLUGIN_DAEMON_URL)) +_plugin_daemon_timeout_config = cast( + float | httpx.Timeout | None, + getattr(dify_config, "PLUGIN_DAEMON_TIMEOUT", 300.0), +) +plugin_daemon_request_timeout: httpx.Timeout | None +if _plugin_daemon_timeout_config is None: + plugin_daemon_request_timeout = None +elif isinstance(_plugin_daemon_timeout_config, httpx.Timeout): + plugin_daemon_request_timeout = _plugin_daemon_timeout_config +else: + plugin_daemon_request_timeout = httpx.Timeout(_plugin_daemon_timeout_config) T = TypeVar("T", bound=(BaseModel | dict | list | bool | str)) @@ -47,29 +57,57 @@ class BasePluginClient: data: bytes | dict | str | None = None, params: dict | None = None, files: dict | None = None, - stream: bool = False, - ) -> requests.Response: + ) -> httpx.Response: """ Make a request to the plugin daemon inner API. """ - url = plugin_daemon_inner_api_baseurl / path - headers = headers or {} - headers["X-Api-Key"] = dify_config.PLUGIN_DAEMON_KEY - headers["Accept-Encoding"] = "gzip, deflate, br" + url, headers, prepared_data, params, files = self._prepare_request(path, headers, data, params, files) - if headers.get("Content-Type") == "application/json" and isinstance(data, dict): - data = json.dumps(data) + request_kwargs: dict[str, Any] = { + "method": method, + "url": url, + "headers": headers, + "params": params, + "files": files, + "timeout": plugin_daemon_request_timeout, + } + if isinstance(prepared_data, dict): + request_kwargs["data"] = prepared_data + elif prepared_data is not None: + request_kwargs["content"] = prepared_data try: - response = requests.request( - method=method, url=str(url), headers=headers, data=data, params=params, stream=stream, files=files - ) - except requests.ConnectionError: + response = httpx.request(**request_kwargs) + except httpx.RequestError: logger.exception("Request to Plugin Daemon Service failed") raise PluginDaemonInnerError(code=-500, message="Request to Plugin Daemon Service failed") return response + def _prepare_request( + self, + path: str, + headers: dict | None, + data: bytes | dict | str | None, + params: dict | None, + files: dict | None, + ) -> tuple[str, dict, bytes | dict | str | None, dict | None, dict | None]: + url = plugin_daemon_inner_api_baseurl / path + prepared_headers = dict(headers or {}) + prepared_headers["X-Api-Key"] = dify_config.PLUGIN_DAEMON_KEY + prepared_headers.setdefault("Accept-Encoding", "gzip, deflate, br") + + prepared_data: bytes | dict | str | None = ( + data if isinstance(data, (bytes, str, dict)) or data is None else None + ) + if isinstance(data, dict): + if prepared_headers.get("Content-Type") == "application/json": + prepared_data = json.dumps(data) + else: + prepared_data = data + + return str(url), prepared_headers, prepared_data, params, files + def _stream_request( self, method: str, @@ -78,23 +116,45 @@ class BasePluginClient: headers: dict | None = None, data: bytes | dict | None = None, files: dict | None = None, - ) -> Generator[bytes, None, None]: + ) -> Generator[str, None, None]: """ Make a stream request to the plugin daemon inner API """ - response = self._request(method, path, headers, data, params, files, stream=True) - for line in response.iter_lines(chunk_size=1024 * 8): - line = line.decode("utf-8").strip() - if line.startswith("data:"): - line = line[5:].strip() - if line: - yield line + url, headers, prepared_data, params, files = self._prepare_request(path, headers, data, params, files) + + stream_kwargs: dict[str, Any] = { + "method": method, + "url": url, + "headers": headers, + "params": params, + "files": files, + "timeout": plugin_daemon_request_timeout, + } + if isinstance(prepared_data, dict): + stream_kwargs["data"] = prepared_data + elif prepared_data is not None: + stream_kwargs["content"] = prepared_data + + try: + with httpx.stream(**stream_kwargs) as response: + for raw_line in response.iter_lines(): + if raw_line is None: + continue + line = raw_line.decode("utf-8") if isinstance(raw_line, bytes) else raw_line + line = line.strip() + if line.startswith("data:"): + line = line[5:].strip() + if line: + yield line + except httpx.RequestError: + logger.exception("Stream request to Plugin Daemon Service failed") + raise PluginDaemonInnerError(code=-500, message="Request to Plugin Daemon Service failed") def _stream_request_with_model( self, method: str, path: str, - type: type[T], + type_: type[T], headers: dict | None = None, data: bytes | dict | None = None, params: dict | None = None, @@ -104,13 +164,13 @@ class BasePluginClient: Make a stream request to the plugin daemon inner API and yield the response as a model. """ for line in self._stream_request(method, path, params, headers, data, files): - yield type(**json.loads(line)) # type: ignore + yield type_(**json.loads(line)) # type: ignore def _request_with_model( self, method: str, path: str, - type: type[T], + type_: type[T], headers: dict | None = None, data: bytes | None = None, params: dict | None = None, @@ -120,13 +180,13 @@ class BasePluginClient: Make a request to the plugin daemon inner API and return the response as a model. """ response = self._request(method, path, headers, data, params, files) - return type(**response.json()) # type: ignore + return type_(**response.json()) # type: ignore[return-value] def _request_with_plugin_daemon_response( self, method: str, path: str, - type: type[T], + type_: type[T], headers: dict | None = None, data: bytes | dict | None = None, params: dict | None = None, @@ -139,23 +199,23 @@ class BasePluginClient: try: response = self._request(method, path, headers, data, params, files) response.raise_for_status() - except HTTPError as e: - msg = f"Failed to request plugin daemon, status: {e.response.status_code}, url: {path}" - logger.exception(msg) + except httpx.HTTPStatusError as e: + logger.exception("Failed to request plugin daemon, status: %s, url: %s", e.response.status_code, path) raise e except Exception as e: msg = f"Failed to request plugin daemon, url: {path}" - logger.exception(msg) + logger.exception("Failed to request plugin daemon, url: %s", path) raise ValueError(msg) from e try: json_response = response.json() if transformer: json_response = transformer(json_response) - rep = PluginDaemonBasicResponse[type](**json_response) # type: ignore + # https://stackoverflow.com/questions/59634937/variable-foo-class-is-not-valid-as-type-but-why + rep = PluginDaemonBasicResponse[type_].model_validate(json_response) # type: ignore except Exception: msg = ( - f"Failed to parse response from plugin daemon to PluginDaemonBasicResponse [{str(type.__name__)}]," + f"Failed to parse response from plugin daemon to PluginDaemonBasicResponse [{str(type_.__name__)}]," f" url: {path}" ) logger.exception(msg) @@ -163,7 +223,7 @@ class BasePluginClient: if rep.code != 0: try: - error = PluginDaemonError(**json.loads(rep.message)) + error = PluginDaemonError.model_validate(json.loads(rep.message)) except Exception: raise ValueError(f"{rep.message}, code: {rep.code}") @@ -178,7 +238,7 @@ class BasePluginClient: self, method: str, path: str, - type: type[T], + type_: type[T], headers: dict | None = None, data: bytes | dict | None = None, params: dict | None = None, @@ -189,7 +249,7 @@ class BasePluginClient: """ for line in self._stream_request(method, path, params, headers, data, files): try: - rep = PluginDaemonBasicResponse[type].model_validate_json(line) # type: ignore + rep = PluginDaemonBasicResponse[type_].model_validate_json(line) # type: ignore except (ValueError, TypeError): # TODO modify this when line_data has code and message try: @@ -204,11 +264,11 @@ class BasePluginClient: if rep.code != 0: if rep.code == -500: try: - error = PluginDaemonError(**json.loads(rep.message)) + error = PluginDaemonError.model_validate(json.loads(rep.message)) except Exception: raise PluginDaemonInnerError(code=rep.code, message=rep.message) - logger.error("Error in stream reponse for plugin %s", rep.__dict__) + logger.error("Error in stream response for plugin %s", rep.__dict__) self._handle_plugin_daemon_error(error.error_type, error.message) raise ValueError(f"plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: diff --git a/api/core/plugin/impl/datasource.py b/api/core/plugin/impl/datasource.py index 84087f8104..ce1ef71494 100644 --- a/api/core/plugin/impl/datasource.py +++ b/api/core/plugin/impl/datasource.py @@ -46,7 +46,9 @@ class PluginDatasourceManager(BasePluginClient): params={"page": 1, "page_size": 256}, transformer=transformer, ) - local_file_datasource_provider = PluginDatasourceProviderEntity(**self._get_local_file_datasource_provider()) + local_file_datasource_provider = PluginDatasourceProviderEntity.model_validate( + self._get_local_file_datasource_provider() + ) for provider in response: ToolTransformService.repack_provider(tenant_id=tenant_id, provider=provider) @@ -104,7 +106,7 @@ class PluginDatasourceManager(BasePluginClient): Fetch datasource provider for the given tenant and plugin. """ if provider_id == "langgenius/file/file": - return PluginDatasourceProviderEntity(**self._get_local_file_datasource_provider()) + return PluginDatasourceProviderEntity.model_validate(self._get_local_file_datasource_provider()) tool_provider_id = DatasourceProviderID(provider_id) diff --git a/api/core/plugin/impl/exc.py b/api/core/plugin/impl/exc.py index 23a69bd92f..e28a324217 100644 --- a/api/core/plugin/impl/exc.py +++ b/api/core/plugin/impl/exc.py @@ -40,7 +40,7 @@ class PluginDaemonBadRequestError(PluginDaemonClientSideError): description: str = "Bad Request" -class PluginInvokeError(PluginDaemonClientSideError): +class PluginInvokeError(PluginDaemonClientSideError, ValueError): description: str = "Invoke Error" def _get_error_object(self) -> Mapping: diff --git a/api/core/plugin/impl/model.py b/api/core/plugin/impl/model.py index 153da142f4..5dfc3c212e 100644 --- a/api/core/plugin/impl/model.py +++ b/api/core/plugin/impl/model.py @@ -162,7 +162,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/llm/invoke", - type=LLMResultChunk, + type_=LLMResultChunk, data=jsonable_encoder( { "user_id": user_id, @@ -208,7 +208,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/llm/num_tokens", - type=PluginLLMNumTokensResponse, + type_=PluginLLMNumTokensResponse, data=jsonable_encoder( { "user_id": user_id, @@ -250,7 +250,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/text_embedding/invoke", - type=TextEmbeddingResult, + type_=TextEmbeddingResult, data=jsonable_encoder( { "user_id": user_id, @@ -291,7 +291,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/text_embedding/num_tokens", - type=PluginTextEmbeddingNumTokensResponse, + type_=PluginTextEmbeddingNumTokensResponse, data=jsonable_encoder( { "user_id": user_id, @@ -334,7 +334,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/rerank/invoke", - type=RerankResult, + type_=RerankResult, data=jsonable_encoder( { "user_id": user_id, @@ -378,7 +378,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/tts/invoke", - type=PluginStringResultResponse, + type_=PluginStringResultResponse, data=jsonable_encoder( { "user_id": user_id, @@ -422,7 +422,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/tts/model/voices", - type=PluginVoicesResponse, + type_=PluginVoicesResponse, data=jsonable_encoder( { "user_id": user_id, @@ -466,7 +466,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/speech2text/invoke", - type=PluginStringResultResponse, + type_=PluginStringResultResponse, data=jsonable_encoder( { "user_id": user_id, @@ -506,7 +506,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/moderation/invoke", - type=PluginBasicBooleanResponse, + type_=PluginBasicBooleanResponse, data=jsonable_encoder( { "user_id": user_id, diff --git a/api/core/plugin/utils/chunk_merger.py b/api/core/plugin/utils/chunk_merger.py index e30076f9d3..28cb70f96a 100644 --- a/api/core/plugin/utils/chunk_merger.py +++ b/api/core/plugin/utils/chunk_merger.py @@ -1,6 +1,6 @@ from collections.abc import Generator from dataclasses import dataclass, field -from typing import TypeVar, Union, cast +from typing import TypeVar, Union from core.agent.entities import AgentInvokeMessage from core.tools.entities.tool_entities import ToolInvokeMessage @@ -87,7 +87,8 @@ def merge_blob_chunks( ), meta=resp.meta, ) - yield cast(MessageType, merged_message) + assert isinstance(merged_message, (ToolInvokeMessage, AgentInvokeMessage)) + yield merged_message # type: ignore # Clean up the buffer del files[chunk_id] else: diff --git a/api/core/prompt/advanced_prompt_transform.py b/api/core/prompt/advanced_prompt_transform.py index 5f2ffefd94..d74b2bddf5 100644 --- a/api/core/prompt/advanced_prompt_transform.py +++ b/api/core/prompt/advanced_prompt_transform.py @@ -18,7 +18,7 @@ from core.model_runtime.entities.message_entities import ImagePromptMessageConte from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig from core.prompt.prompt_transform import PromptTransform from core.prompt.utils.prompt_template_parser import PromptTemplateParser -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.runtime import VariablePool class AdvancedPromptTransform(PromptTransform): diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 499d39bd5d..6cf6620d8d 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -610,7 +610,7 @@ class ProviderManager: provider_quota_to_provider_record_dict = {} for provider_record in provider_records: - if provider_record.provider_type != ProviderType.SYSTEM.value: + if provider_record.provider_type != ProviderType.SYSTEM: continue provider_quota_to_provider_record_dict[ProviderQuotaType.value_of(provider_record.quota_type)] = ( @@ -627,8 +627,8 @@ class ProviderManager: tenant_id=tenant_id, # TODO: Use provider name with prefix after the data migration. provider_name=ModelProviderID(provider_name).provider_name, - provider_type=ProviderType.SYSTEM.value, - quota_type=ProviderQuotaType.TRIAL.value, + provider_type=ProviderType.SYSTEM, + quota_type=ProviderQuotaType.TRIAL, quota_limit=quota.quota_limit, # type: ignore quota_used=0, is_valid=True, @@ -641,8 +641,8 @@ class ProviderManager: stmt = select(Provider).where( Provider.tenant_id == tenant_id, Provider.provider_name == ModelProviderID(provider_name).provider_name, - Provider.provider_type == ProviderType.SYSTEM.value, - Provider.quota_type == ProviderQuotaType.TRIAL.value, + Provider.provider_type == ProviderType.SYSTEM, + Provider.quota_type == ProviderQuotaType.TRIAL, ) existed_provider_record = db.session.scalar(stmt) if not existed_provider_record: @@ -702,7 +702,7 @@ class ProviderManager: """Get custom provider configuration.""" # Find custom provider record (non-system) custom_provider_record = next( - (record for record in provider_records if record.provider_type != ProviderType.SYSTEM.value), None + (record for record in provider_records if record.provider_type != ProviderType.SYSTEM), None ) if not custom_provider_record: @@ -905,7 +905,7 @@ class ProviderManager: # Convert provider_records to dict quota_type_to_provider_records_dict: dict[ProviderQuotaType, Provider] = {} for provider_record in provider_records: - if provider_record.provider_type != ProviderType.SYSTEM.value: + if provider_record.provider_type != ProviderType.SYSTEM: continue quota_type_to_provider_records_dict[ProviderQuotaType.value_of(provider_record.quota_type)] = ( @@ -1046,7 +1046,7 @@ class ProviderManager: """ secret_input_form_variables = [] for credential_form_schema in credential_form_schemas: - if credential_form_schema.type.value == FormType.SECRET_INPUT.value: + if credential_form_schema.type == FormType.SECRET_INPUT: secret_input_form_variables.append(credential_form_schema.variable) return secret_input_form_variables diff --git a/api/core/rag/data_post_processor/data_post_processor.py b/api/core/rag/data_post_processor/data_post_processor.py index 696e3e967f..cc946a72c3 100644 --- a/api/core/rag/data_post_processor/data_post_processor.py +++ b/api/core/rag/data_post_processor/data_post_processor.py @@ -46,7 +46,7 @@ class DataPostProcessor: reranking_model: dict | None = None, weights: dict | None = None, ) -> BaseRerankRunner | None: - if reranking_mode == RerankMode.WEIGHTED_SCORE.value and weights: + if reranking_mode == RerankMode.WEIGHTED_SCORE and weights: runner = RerankRunnerFactory.create_rerank_runner( runner_type=reranking_mode, tenant_id=tenant_id, @@ -62,7 +62,7 @@ class DataPostProcessor: ), ) return runner - elif reranking_mode == RerankMode.RERANKING_MODEL.value: + elif reranking_mode == RerankMode.RERANKING_MODEL: rerank_model_instance = self._get_rerank_model_instance(tenant_id, reranking_model) if rerank_model_instance is None: return None diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 429744c0de..2290de19bc 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -21,7 +21,7 @@ from models.dataset import Document as DatasetDocument from services.external_knowledge_service import ExternalDatasetService default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -34,7 +34,7 @@ class RetrievalService: @classmethod def retrieve( cls, - retrieval_method: str, + retrieval_method: RetrievalMethod, dataset_id: str, query: str, top_k: int, @@ -56,7 +56,7 @@ class RetrievalService: # Optimize multithreading with thread pools with ThreadPoolExecutor(max_workers=dify_config.RETRIEVAL_SERVICE_EXECUTORS) as executor: # type: ignore futures = [] - if retrieval_method == "keyword_search": + if retrieval_method == RetrievalMethod.KEYWORD_SEARCH: futures.append( executor.submit( cls.keyword_search, @@ -106,7 +106,9 @@ class RetrievalService: if exceptions: raise ValueError(";\n".join(exceptions)) - if retrieval_method == RetrievalMethod.HYBRID_SEARCH.value: + # Deduplicate documents for hybrid search to avoid duplicate chunks + if retrieval_method == RetrievalMethod.HYBRID_SEARCH: + all_documents = cls._deduplicate_documents(all_documents) data_post_processor = DataPostProcessor( str(dataset.tenant_id), reranking_mode, reranking_model, weights, False ) @@ -132,7 +134,7 @@ class RetrievalService: if not dataset: return [] metadata_condition = ( - MetadataCondition(**metadata_filtering_conditions) if metadata_filtering_conditions else None + MetadataCondition.model_validate(metadata_filtering_conditions) if metadata_filtering_conditions else None ) all_documents = ExternalDatasetService.fetch_external_knowledge_retrieval( dataset.tenant_id, @@ -143,6 +145,40 @@ class RetrievalService: ) return all_documents + @classmethod + def _deduplicate_documents(cls, documents: list[Document]) -> list[Document]: + """Deduplicate documents based on doc_id to avoid duplicate chunks in hybrid search.""" + if not documents: + return documents + + unique_documents = [] + seen_doc_ids = set() + + for document in documents: + # For dify provider documents, use doc_id for deduplication + if document.provider == "dify" and document.metadata is not None and "doc_id" in document.metadata: + doc_id = document.metadata["doc_id"] + if doc_id not in seen_doc_ids: + seen_doc_ids.add(doc_id) + unique_documents.append(document) + # If duplicate, keep the one with higher score + elif "score" in document.metadata: + # Find existing document with same doc_id and compare scores + for i, existing_doc in enumerate(unique_documents): + if ( + existing_doc.metadata + and existing_doc.metadata.get("doc_id") == doc_id + and existing_doc.metadata.get("score", 0) < document.metadata.get("score", 0) + ): + unique_documents[i] = document + break + else: + # For non-dify documents, use content-based deduplication + if document not in unique_documents: + unique_documents.append(document) + + return unique_documents + @classmethod def _get_dataset(cls, dataset_id: str) -> Dataset | None: with Session(db.engine) as session: @@ -184,7 +220,7 @@ class RetrievalService: score_threshold: float | None, reranking_model: dict | None, all_documents: list, - retrieval_method: str, + retrieval_method: RetrievalMethod, exceptions: list, document_ids_filter: list[str] | None = None, ): @@ -209,10 +245,10 @@ class RetrievalService: reranking_model and reranking_model.get("reranking_model_name") and reranking_model.get("reranking_provider_name") - and retrieval_method == RetrievalMethod.SEMANTIC_SEARCH.value + and retrieval_method == RetrievalMethod.SEMANTIC_SEARCH ): data_post_processor = DataPostProcessor( - str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL.value), reranking_model, None, False + str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL), reranking_model, None, False ) all_documents.extend( data_post_processor.invoke( @@ -257,10 +293,10 @@ class RetrievalService: reranking_model and reranking_model.get("reranking_model_name") and reranking_model.get("reranking_provider_name") - and retrieval_method == RetrievalMethod.FULL_TEXT_SEARCH.value + and retrieval_method == RetrievalMethod.FULL_TEXT_SEARCH ): data_post_processor = DataPostProcessor( - str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL.value), reranking_model, None, False + str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL), reranking_model, None, False ) all_documents.extend( data_post_processor.invoke( diff --git a/web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx b/api/core/rag/datasource/vdb/alibabacloud_mysql/__init__.py similarity index 100% rename from web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx rename to api/core/rag/datasource/vdb/alibabacloud_mysql/__init__.py diff --git a/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py b/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py new file mode 100644 index 0000000000..fdb5ffebfc --- /dev/null +++ b/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py @@ -0,0 +1,388 @@ +import hashlib +import json +import logging +import uuid +from contextlib import contextmanager +from typing import Any, Literal, cast + +import mysql.connector +from mysql.connector import Error as MySQLError +from pydantic import BaseModel, model_validator + +from configs import dify_config +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.embedding.embedding_base import Embeddings +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + +logger = logging.getLogger(__name__) + + +class AlibabaCloudMySQLVectorConfig(BaseModel): + host: str + port: int + user: str + password: str + database: str + max_connection: int + charset: str = "utf8mb4" + distance_function: Literal["cosine", "euclidean"] = "cosine" + hnsw_m: int = 6 + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict): + if not values.get("host"): + raise ValueError("config ALIBABACLOUD_MYSQL_HOST is required") + if not values.get("port"): + raise ValueError("config ALIBABACLOUD_MYSQL_PORT is required") + if not values.get("user"): + raise ValueError("config ALIBABACLOUD_MYSQL_USER is required") + if values.get("password") is None: + raise ValueError("config ALIBABACLOUD_MYSQL_PASSWORD is required") + if not values.get("database"): + raise ValueError("config ALIBABACLOUD_MYSQL_DATABASE is required") + if not values.get("max_connection"): + raise ValueError("config ALIBABACLOUD_MYSQL_MAX_CONNECTION is required") + return values + + +SQL_CREATE_TABLE = """ +CREATE TABLE IF NOT EXISTS {table_name} ( + id VARCHAR(36) PRIMARY KEY, + text LONGTEXT NOT NULL, + meta JSON NOT NULL, + embedding VECTOR({dimension}) NOT NULL, + VECTOR INDEX (embedding) M={hnsw_m} DISTANCE={distance_function} +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; +""" + +SQL_CREATE_META_INDEX = """ +CREATE INDEX idx_{index_hash}_meta ON {table_name} + ((CAST(JSON_UNQUOTE(JSON_EXTRACT(meta, '$.document_id')) AS CHAR(36)))); +""" + +SQL_CREATE_FULLTEXT_INDEX = """ +CREATE FULLTEXT INDEX idx_{index_hash}_text ON {table_name} (text) WITH PARSER ngram; +""" + + +class AlibabaCloudMySQLVector(BaseVector): + def __init__(self, collection_name: str, config: AlibabaCloudMySQLVectorConfig): + super().__init__(collection_name) + self.pool = self._create_connection_pool(config) + self.table_name = collection_name.lower() + self.index_hash = hashlib.md5(self.table_name.encode()).hexdigest()[:8] + self.distance_function = config.distance_function.lower() + self.hnsw_m = config.hnsw_m + self._check_vector_support() + + def get_type(self) -> str: + return VectorType.ALIBABACLOUD_MYSQL + + def _create_connection_pool(self, config: AlibabaCloudMySQLVectorConfig): + # Create connection pool using mysql-connector-python pooling + pool_config: dict[str, Any] = { + "host": config.host, + "port": config.port, + "user": config.user, + "password": config.password, + "database": config.database, + "charset": config.charset, + "autocommit": True, + "pool_name": f"pool_{self.collection_name}", + "pool_size": config.max_connection, + "pool_reset_session": True, + } + return mysql.connector.pooling.MySQLConnectionPool(**pool_config) + + def _check_vector_support(self): + """Check if the MySQL server supports vector operations.""" + try: + with self._get_cursor() as cur: + # Check MySQL version and vector support + cur.execute("SELECT VERSION()") + version = cur.fetchone()["VERSION()"] + logger.debug("Connected to MySQL version: %s", version) + # Try to execute a simple vector function to verify support + cur.execute("SELECT VEC_FromText('[1,2,3]') IS NOT NULL as vector_support") + result = cur.fetchone() + if not result or not result.get("vector_support"): + raise ValueError( + "RDS MySQL Vector functions are not available." + " Please ensure you're using RDS MySQL 8.0.36+ with Vector support." + ) + + except MySQLError as e: + if "FUNCTION" in str(e) and "VEC_FromText" in str(e): + raise ValueError( + "RDS MySQL Vector functions are not available." + " Please ensure you're using RDS MySQL 8.0.36+ with Vector support." + ) from e + raise e + + @contextmanager + def _get_cursor(self): + conn = self.pool.get_connection() + cur = conn.cursor(dictionary=True) + try: + yield cur + finally: + cur.close() + conn.close() + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + dimension = len(embeddings[0]) + self._create_collection(dimension) + return self.add_texts(texts, embeddings) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + values = [] + pks = [] + for i, doc in enumerate(documents): + if doc.metadata is not None: + doc_id = doc.metadata.get("doc_id", str(uuid.uuid4())) + pks.append(doc_id) + # Convert embedding list to Aliyun MySQL vector format + vector_str = "[" + ",".join(map(str, embeddings[i])) + "]" + values.append( + ( + doc_id, + doc.page_content, + json.dumps(doc.metadata), + vector_str, + ) + ) + + with self._get_cursor() as cur: + insert_sql = ( + f"INSERT INTO {self.table_name} (id, text, meta, embedding) VALUES (%s, %s, %s, VEC_FromText(%s))" + ) + cur.executemany(insert_sql, values) + return pks + + def text_exists(self, id: str) -> bool: + with self._get_cursor() as cur: + cur.execute(f"SELECT id FROM {self.table_name} WHERE id = %s", (id,)) + return cur.fetchone() is not None + + def get_by_ids(self, ids: list[str]) -> list[Document]: + if not ids: + return [] + + with self._get_cursor() as cur: + placeholders = ",".join(["%s"] * len(ids)) + cur.execute(f"SELECT meta, text FROM {self.table_name} WHERE id IN ({placeholders})", ids) + docs = [] + for record in cur: + metadata = record["meta"] + if isinstance(metadata, str): + metadata = json.loads(metadata) + docs.append(Document(page_content=record["text"], metadata=metadata)) + return docs + + def delete_by_ids(self, ids: list[str]): + # Avoiding crashes caused by performing delete operations on empty lists + if not ids: + return + + with self._get_cursor() as cur: + try: + placeholders = ",".join(["%s"] * len(ids)) + cur.execute(f"DELETE FROM {self.table_name} WHERE id IN ({placeholders})", ids) + except MySQLError as e: + if e.errno == 1146: # Table doesn't exist + logger.warning("Table %s not found, skipping delete operation.", self.table_name) + return + else: + raise e + + def delete_by_metadata_field(self, key: str, value: str): + with self._get_cursor() as cur: + cur.execute( + f"DELETE FROM {self.table_name} WHERE JSON_UNQUOTE(JSON_EXTRACT(meta, %s)) = %s", (f"$.{key}", value) + ) + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + """ + Search the nearest neighbors to a vector using RDS MySQL vector distance functions. + + :param query_vector: The input vector to search for similar items. + :return: List of Documents that are nearest to the query vector. + """ + top_k = kwargs.get("top_k", 4) + if not isinstance(top_k, int) or top_k <= 0: + raise ValueError("top_k must be a positive integer") + + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + params = [] + + if document_ids_filter: + placeholders = ",".join(["%s"] * len(document_ids_filter)) + where_clause = f" WHERE JSON_UNQUOTE(JSON_EXTRACT(meta, '$.document_id')) IN ({placeholders}) " + params.extend(document_ids_filter) + + # Convert query vector to RDS MySQL vector format + query_vector_str = "[" + ",".join(map(str, query_vector)) + "]" + + # Use RSD MySQL's native vector distance functions + with self._get_cursor() as cur: + # Choose distance function based on configuration + distance_func = "VEC_DISTANCE_COSINE" if self.distance_function == "cosine" else "VEC_DISTANCE_EUCLIDEAN" + + # Note: RDS MySQL optimizer will use vector index when ORDER BY + LIMIT are present + # Use column alias in ORDER BY to avoid calculating distance twice + sql = f""" + SELECT meta, text, + {distance_func}(embedding, VEC_FromText(%s)) AS distance + FROM {self.table_name} + {where_clause} + ORDER BY distance + LIMIT %s + """ + query_params = [query_vector_str] + params + [top_k] + + cur.execute(sql, query_params) + + docs = [] + score_threshold = float(kwargs.get("score_threshold") or 0.0) + + for record in cur: + try: + distance = float(record["distance"]) + # Convert distance to similarity score + if self.distance_function == "cosine": + # For cosine distance: similarity = 1 - distance + similarity = 1.0 - distance + else: + # For euclidean distance: use inverse relationship + # similarity = 1 / (1 + distance) + similarity = 1.0 / (1.0 + distance) + + metadata = record["meta"] + if isinstance(metadata, str): + metadata = json.loads(metadata) + metadata["score"] = similarity + metadata["distance"] = distance + + if similarity >= score_threshold: + docs.append(Document(page_content=record["text"], metadata=metadata)) + except (ValueError, json.JSONDecodeError) as e: + logger.warning("Error processing search result: %s", e) + continue + + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + top_k = kwargs.get("top_k", 5) + if not isinstance(top_k, int) or top_k <= 0: + raise ValueError("top_k must be a positive integer") + + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + params = [] + + if document_ids_filter: + placeholders = ",".join(["%s"] * len(document_ids_filter)) + where_clause = f" AND JSON_UNQUOTE(JSON_EXTRACT(meta, '$.document_id')) IN ({placeholders}) " + params.extend(document_ids_filter) + + with self._get_cursor() as cur: + # Build query parameters: query (twice for MATCH clauses), document_ids_filter (if any), top_k + query_params = [query, query] + params + [top_k] + cur.execute( + f"""SELECT meta, text, + MATCH(text) AGAINST(%s IN NATURAL LANGUAGE MODE) AS score + FROM {self.table_name} + WHERE MATCH(text) AGAINST(%s IN NATURAL LANGUAGE MODE) + {where_clause} + ORDER BY score DESC + LIMIT %s""", + query_params, + ) + docs = [] + for record in cur: + metadata = record["meta"] + if isinstance(metadata, str): + metadata = json.loads(metadata) + metadata["score"] = float(record["score"]) + docs.append(Document(page_content=record["text"], metadata=metadata)) + return docs + + def delete(self): + with self._get_cursor() as cur: + cur.execute(f"DROP TABLE IF EXISTS {self.table_name}") + + def _create_collection(self, dimension: int): + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" + lock_name = f"{collection_exist_cache_key}_lock" + with redis_client.lock(lock_name, timeout=20): + if redis_client.get(collection_exist_cache_key): + return + + with self._get_cursor() as cur: + # Create table with vector column and vector index + cur.execute( + SQL_CREATE_TABLE.format( + table_name=self.table_name, + dimension=dimension, + distance_function=self.distance_function, + hnsw_m=self.hnsw_m, + ) + ) + # Create metadata index (check if exists first) + try: + cur.execute(SQL_CREATE_META_INDEX.format(table_name=self.table_name, index_hash=self.index_hash)) + except MySQLError as e: + if e.errno != 1061: # Duplicate key name + logger.warning("Could not create meta index: %s", e) + + # Create full-text index for text search + try: + cur.execute( + SQL_CREATE_FULLTEXT_INDEX.format(table_name=self.table_name, index_hash=self.index_hash) + ) + except MySQLError as e: + if e.errno != 1061: # Duplicate key name + logger.warning("Could not create fulltext index: %s", e) + + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + +class AlibabaCloudMySQLVectorFactory(AbstractVectorFactory): + def _validate_distance_function(self, distance_function: str) -> Literal["cosine", "euclidean"]: + """Validate and return the distance function as a proper Literal type.""" + if distance_function not in ["cosine", "euclidean"]: + raise ValueError(f"Invalid distance function: {distance_function}. Must be 'cosine' or 'euclidean'") + return cast(Literal["cosine", "euclidean"], distance_function) + + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> AlibabaCloudMySQLVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + dataset.index_struct = json.dumps( + self.gen_index_struct_dict(VectorType.ALIBABACLOUD_MYSQL, collection_name) + ) + return AlibabaCloudMySQLVector( + collection_name=collection_name, + config=AlibabaCloudMySQLVectorConfig( + host=dify_config.ALIBABACLOUD_MYSQL_HOST or "localhost", + port=dify_config.ALIBABACLOUD_MYSQL_PORT, + user=dify_config.ALIBABACLOUD_MYSQL_USER or "root", + password=dify_config.ALIBABACLOUD_MYSQL_PASSWORD or "", + database=dify_config.ALIBABACLOUD_MYSQL_DATABASE or "dify", + max_connection=dify_config.ALIBABACLOUD_MYSQL_MAX_CONNECTION, + charset=dify_config.ALIBABACLOUD_MYSQL_CHARSET or "utf8mb4", + distance_function=self._validate_distance_function( + dify_config.ALIBABACLOUD_MYSQL_DISTANCE_FUNCTION or "cosine" + ), + hnsw_m=dify_config.ALIBABACLOUD_MYSQL_HNSW_M or 6, + ), + ) diff --git a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py index e55e5f3101..a306f9ba0c 100644 --- a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py +++ b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py @@ -488,9 +488,9 @@ class ClickzettaVector(BaseVector): create_table_sql = f""" CREATE TABLE IF NOT EXISTS {self._config.schema_name}.{self._table_name} ( id STRING NOT NULL COMMENT 'Unique document identifier', - {Field.CONTENT_KEY.value} STRING NOT NULL COMMENT 'Document text content for search and retrieval', - {Field.METADATA_KEY.value} JSON COMMENT 'Document metadata including source, type, and other attributes', - {Field.VECTOR.value} VECTOR(FLOAT, {dimension}) NOT NULL COMMENT + {Field.CONTENT_KEY} STRING NOT NULL COMMENT 'Document text content for search and retrieval', + {Field.METADATA_KEY} JSON COMMENT 'Document metadata including source, type, and other attributes', + {Field.VECTOR} VECTOR(FLOAT, {dimension}) NOT NULL COMMENT 'High-dimensional embedding vector for semantic similarity search', PRIMARY KEY (id) ) COMMENT 'Dify RAG knowledge base vector storage table for document embeddings and content' @@ -519,15 +519,15 @@ class ClickzettaVector(BaseVector): existing_indexes = cursor.fetchall() for idx in existing_indexes: # Check if vector index already exists on the embedding column - if Field.VECTOR.value in str(idx).lower(): - logger.info("Vector index already exists on column %s", Field.VECTOR.value) + if Field.VECTOR in str(idx).lower(): + logger.info("Vector index already exists on column %s", Field.VECTOR) return except (RuntimeError, ValueError) as e: logger.warning("Failed to check existing indexes: %s", e) index_sql = f""" CREATE VECTOR INDEX IF NOT EXISTS {index_name} - ON TABLE {self._config.schema_name}.{self._table_name}({Field.VECTOR.value}) + ON TABLE {self._config.schema_name}.{self._table_name}({Field.VECTOR}) PROPERTIES ( "distance.function" = "{self._config.vector_distance_function}", "scalar.type" = "f32", @@ -560,17 +560,17 @@ class ClickzettaVector(BaseVector): # More precise check: look for inverted index specifically on the content column if ( "inverted" in idx_str - and Field.CONTENT_KEY.value.lower() in idx_str + and Field.CONTENT_KEY.lower() in idx_str and (index_name.lower() in idx_str or f"idx_{self._table_name}_text" in idx_str) ): - logger.info("Inverted index already exists on column %s: %s", Field.CONTENT_KEY.value, idx) + logger.info("Inverted index already exists on column %s: %s", Field.CONTENT_KEY, idx) return except (RuntimeError, ValueError) as e: logger.warning("Failed to check existing indexes: %s", e) index_sql = f""" CREATE INVERTED INDEX IF NOT EXISTS {index_name} - ON TABLE {self._config.schema_name}.{self._table_name} ({Field.CONTENT_KEY.value}) + ON TABLE {self._config.schema_name}.{self._table_name} ({Field.CONTENT_KEY}) PROPERTIES ( "analyzer" = "{self._config.analyzer_type}", "mode" = "{self._config.analyzer_mode}" @@ -588,13 +588,13 @@ class ClickzettaVector(BaseVector): or "with the same type" in error_msg or "cannot create inverted index" in error_msg ) and "already has index" in error_msg: - logger.info("Inverted index already exists on column %s", Field.CONTENT_KEY.value) + logger.info("Inverted index already exists on column %s", Field.CONTENT_KEY) # Try to get the existing index name for logging try: cursor.execute(f"SHOW INDEX FROM {self._config.schema_name}.{self._table_name}") existing_indexes = cursor.fetchall() for idx in existing_indexes: - if "inverted" in str(idx).lower() and Field.CONTENT_KEY.value.lower() in str(idx).lower(): + if "inverted" in str(idx).lower() and Field.CONTENT_KEY.lower() in str(idx).lower(): logger.info("Found existing inverted index: %s", idx) break except (RuntimeError, ValueError): @@ -669,7 +669,7 @@ class ClickzettaVector(BaseVector): # Use parameterized INSERT with executemany for better performance and security # Cast JSON and VECTOR in SQL, pass raw data as parameters - columns = f"id, {Field.CONTENT_KEY.value}, {Field.METADATA_KEY.value}, {Field.VECTOR.value}" + columns = f"id, {Field.CONTENT_KEY}, {Field.METADATA_KEY}, {Field.VECTOR}" insert_sql = ( f"INSERT INTO {self._config.schema_name}.{self._table_name} ({columns}) " f"VALUES (?, ?, CAST(? AS JSON), CAST(? AS VECTOR({vector_dimension})))" @@ -767,7 +767,7 @@ class ClickzettaVector(BaseVector): # Use json_extract_string function for ClickZetta compatibility sql = ( f"DELETE FROM {self._config.schema_name}.{self._table_name} " - f"WHERE json_extract_string({Field.METADATA_KEY.value}, '$.{key}') = ?" + f"WHERE json_extract_string({Field.METADATA_KEY}, '$.{key}') = ?" ) cursor.execute(sql, binding_params=[value]) @@ -795,9 +795,7 @@ class ClickzettaVector(BaseVector): safe_doc_ids = [str(id).replace("'", "''") for id in document_ids_filter] doc_ids_str = ",".join(f"'{id}'" for id in safe_doc_ids) # Use json_extract_string function for ClickZetta compatibility - filter_clauses.append( - f"json_extract_string({Field.METADATA_KEY.value}, '$.document_id') IN ({doc_ids_str})" - ) + filter_clauses.append(f"json_extract_string({Field.METADATA_KEY}, '$.document_id') IN ({doc_ids_str})") # No need for dataset_id filter since each dataset has its own table @@ -808,23 +806,21 @@ class ClickzettaVector(BaseVector): distance_func = "COSINE_DISTANCE" if score_threshold > 0: query_vector_str = f"CAST('[{self._format_vector_simple(query_vector)}]' AS VECTOR({vector_dimension}))" - filter_clauses.append( - f"{distance_func}({Field.VECTOR.value}, {query_vector_str}) < {2 - score_threshold}" - ) + filter_clauses.append(f"{distance_func}({Field.VECTOR}, {query_vector_str}) < {2 - score_threshold}") else: # For L2 distance, smaller is better distance_func = "L2_DISTANCE" if score_threshold > 0: query_vector_str = f"CAST('[{self._format_vector_simple(query_vector)}]' AS VECTOR({vector_dimension}))" - filter_clauses.append(f"{distance_func}({Field.VECTOR.value}, {query_vector_str}) < {score_threshold}") + filter_clauses.append(f"{distance_func}({Field.VECTOR}, {query_vector_str}) < {score_threshold}") where_clause = " AND ".join(filter_clauses) if filter_clauses else "1=1" # Execute vector search query query_vector_str = f"CAST('[{self._format_vector_simple(query_vector)}]' AS VECTOR({vector_dimension}))" search_sql = f""" - SELECT id, {Field.CONTENT_KEY.value}, {Field.METADATA_KEY.value}, - {distance_func}({Field.VECTOR.value}, {query_vector_str}) AS distance + SELECT id, {Field.CONTENT_KEY}, {Field.METADATA_KEY}, + {distance_func}({Field.VECTOR}, {query_vector_str}) AS distance FROM {self._config.schema_name}.{self._table_name} WHERE {where_clause} ORDER BY distance @@ -887,9 +883,7 @@ class ClickzettaVector(BaseVector): safe_doc_ids = [str(id).replace("'", "''") for id in document_ids_filter] doc_ids_str = ",".join(f"'{id}'" for id in safe_doc_ids) # Use json_extract_string function for ClickZetta compatibility - filter_clauses.append( - f"json_extract_string({Field.METADATA_KEY.value}, '$.document_id') IN ({doc_ids_str})" - ) + filter_clauses.append(f"json_extract_string({Field.METADATA_KEY}, '$.document_id') IN ({doc_ids_str})") # No need for dataset_id filter since each dataset has its own table @@ -897,13 +891,13 @@ class ClickzettaVector(BaseVector): # match_all requires all terms to be present # Use simple quote escaping for MATCH_ALL since it needs to be in the WHERE clause escaped_query = query.replace("'", "''") - filter_clauses.append(f"MATCH_ALL({Field.CONTENT_KEY.value}, '{escaped_query}')") + filter_clauses.append(f"MATCH_ALL({Field.CONTENT_KEY}, '{escaped_query}')") where_clause = " AND ".join(filter_clauses) # Execute full-text search query search_sql = f""" - SELECT id, {Field.CONTENT_KEY.value}, {Field.METADATA_KEY.value} + SELECT id, {Field.CONTENT_KEY}, {Field.METADATA_KEY} FROM {self._config.schema_name}.{self._table_name} WHERE {where_clause} LIMIT {top_k} @@ -986,19 +980,17 @@ class ClickzettaVector(BaseVector): safe_doc_ids = [str(id).replace("'", "''") for id in document_ids_filter] doc_ids_str = ",".join(f"'{id}'" for id in safe_doc_ids) # Use json_extract_string function for ClickZetta compatibility - filter_clauses.append( - f"json_extract_string({Field.METADATA_KEY.value}, '$.document_id') IN ({doc_ids_str})" - ) + filter_clauses.append(f"json_extract_string({Field.METADATA_KEY}, '$.document_id') IN ({doc_ids_str})") # No need for dataset_id filter since each dataset has its own table # Use simple quote escaping for LIKE clause escaped_query = query.replace("'", "''") - filter_clauses.append(f"{Field.CONTENT_KEY.value} LIKE '%{escaped_query}%'") + filter_clauses.append(f"{Field.CONTENT_KEY} LIKE '%{escaped_query}%'") where_clause = " AND ".join(filter_clauses) search_sql = f""" - SELECT id, {Field.CONTENT_KEY.value}, {Field.METADATA_KEY.value} + SELECT id, {Field.CONTENT_KEY}, {Field.METADATA_KEY} FROM {self._config.schema_name}.{self._table_name} WHERE {where_clause} LIMIT {top_k} diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py index 7b00928b7b..1e7fe52666 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py @@ -57,18 +57,18 @@ class ElasticSearchJaVector(ElasticSearchVector): } mappings = { "properties": { - Field.CONTENT_KEY.value: { + Field.CONTENT_KEY: { "type": "text", "analyzer": "ja_analyzer", "search_analyzer": "ja_analyzer", }, - Field.VECTOR.value: { # Make sure the dimension is correct here + Field.VECTOR: { # Make sure the dimension is correct here "type": "dense_vector", "dims": dim, "index": True, "similarity": "cosine", }, - Field.METADATA_KEY.value: { + Field.METADATA_KEY: { "type": "object", "properties": { "doc_id": {"type": "keyword"} # Map doc_id to keyword type diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py index 2c147fa7ca..0ff8c915e6 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py @@ -4,7 +4,7 @@ import math from typing import Any, cast from urllib.parse import urlparse -import requests +from elasticsearch import ConnectionError as ElasticsearchConnectionError from elasticsearch import Elasticsearch from flask import current_app from packaging.version import parse as parse_version @@ -138,7 +138,7 @@ class ElasticSearchVector(BaseVector): if not client.ping(): raise ConnectionError("Failed to connect to Elasticsearch") - except requests.ConnectionError as e: + except ElasticsearchConnectionError as e: raise ConnectionError(f"Vector database connection error: {str(e)}") except Exception as e: raise ConnectionError(f"Elasticsearch client initialization failed: {str(e)}") @@ -163,9 +163,9 @@ class ElasticSearchVector(BaseVector): index=self._collection_name, id=uuids[i], document={ - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i] or None, - Field.METADATA_KEY.value: documents[i].metadata or {}, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i] or None, + Field.METADATA_KEY: documents[i].metadata or {}, }, ) self._client.indices.refresh(index=self._collection_name) @@ -193,7 +193,7 @@ class ElasticSearchVector(BaseVector): def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: top_k = kwargs.get("top_k", 4) num_candidates = math.ceil(top_k * 1.5) - knn = {"field": Field.VECTOR.value, "query_vector": query_vector, "k": top_k, "num_candidates": num_candidates} + knn = {"field": Field.VECTOR, "query_vector": query_vector, "k": top_k, "num_candidates": num_candidates} document_ids_filter = kwargs.get("document_ids_filter") if document_ids_filter: knn["filter"] = {"terms": {"metadata.document_id": document_ids_filter}} @@ -205,9 +205,9 @@ class ElasticSearchVector(BaseVector): docs_and_scores.append( ( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ), hit["_score"], ) @@ -224,13 +224,13 @@ class ElasticSearchVector(BaseVector): return docs def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - query_str: dict[str, Any] = {"match": {Field.CONTENT_KEY.value: query}} + query_str: dict[str, Any] = {"match": {Field.CONTENT_KEY: query}} document_ids_filter = kwargs.get("document_ids_filter") if document_ids_filter: query_str = { "bool": { - "must": {"match": {Field.CONTENT_KEY.value: query}}, + "must": {"match": {Field.CONTENT_KEY: query}}, "filter": {"terms": {"metadata.document_id": document_ids_filter}}, } } @@ -240,9 +240,9 @@ class ElasticSearchVector(BaseVector): for hit in results["hits"]["hits"]: docs.append( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ) ) @@ -270,14 +270,14 @@ class ElasticSearchVector(BaseVector): dim = len(embeddings[0]) mappings = { "properties": { - Field.CONTENT_KEY.value: {"type": "text"}, - Field.VECTOR.value: { # Make sure the dimension is correct here + Field.CONTENT_KEY: {"type": "text"}, + Field.VECTOR: { # Make sure the dimension is correct here "type": "dense_vector", "dims": dim, "index": True, "similarity": "cosine", }, - Field.METADATA_KEY.value: { + Field.METADATA_KEY: { "type": "object", "properties": { "doc_id": {"type": "keyword"}, # Map doc_id to keyword type diff --git a/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py b/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py index cfee090768..c7b6593a8f 100644 --- a/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py +++ b/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py @@ -67,9 +67,9 @@ class HuaweiCloudVector(BaseVector): index=self._collection_name, id=uuids[i], document={ - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i] or None, - Field.METADATA_KEY.value: documents[i].metadata or {}, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i] or None, + Field.METADATA_KEY: documents[i].metadata or {}, }, ) self._client.indices.refresh(index=self._collection_name) @@ -101,7 +101,7 @@ class HuaweiCloudVector(BaseVector): "size": top_k, "query": { "vector": { - Field.VECTOR.value: { + Field.VECTOR: { "vector": query_vector, "topk": top_k, } @@ -116,9 +116,9 @@ class HuaweiCloudVector(BaseVector): docs_and_scores.append( ( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ), hit["_score"], ) @@ -135,15 +135,15 @@ class HuaweiCloudVector(BaseVector): return docs def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - query_str = {"match": {Field.CONTENT_KEY.value: query}} + query_str = {"match": {Field.CONTENT_KEY: query}} results = self._client.search(index=self._collection_name, query=query_str, size=kwargs.get("top_k", 4)) docs = [] for hit in results["hits"]["hits"]: docs.append( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ) ) @@ -171,8 +171,8 @@ class HuaweiCloudVector(BaseVector): dim = len(embeddings[0]) mappings = { "properties": { - Field.CONTENT_KEY.value: {"type": "text"}, - Field.VECTOR.value: { # Make sure the dimension is correct here + Field.CONTENT_KEY: {"type": "text"}, + Field.VECTOR: { # Make sure the dimension is correct here "type": "vector", "dimension": dim, "indexing": True, @@ -181,7 +181,7 @@ class HuaweiCloudVector(BaseVector): "neighbors": 32, "efc": 128, }, - Field.METADATA_KEY.value: { + Field.METADATA_KEY: { "type": "object", "properties": { "doc_id": {"type": "keyword"} # Map doc_id to keyword type diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index 8824e1c67b..bfcb620618 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -125,9 +125,9 @@ class LindormVectorStore(BaseVector): } } action_values: dict[str, Any] = { - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i], - Field.METADATA_KEY.value: documents[i].metadata, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i], + Field.METADATA_KEY: documents[i].metadata, } if self._using_ugc: action_header["index"]["routing"] = self._routing @@ -149,7 +149,7 @@ class LindormVectorStore(BaseVector): def get_ids_by_metadata_field(self, key: str, value: str): query: dict[str, Any] = { - "query": {"bool": {"must": [{"term": {f"{Field.METADATA_KEY.value}.{key}.keyword": value}}]}} + "query": {"bool": {"must": [{"term": {f"{Field.METADATA_KEY}.{key}.keyword": value}}]}} } if self._using_ugc: query["query"]["bool"]["must"].append({"term": {f"{ROUTING_FIELD}.keyword": self._routing}}) @@ -252,14 +252,14 @@ class LindormVectorStore(BaseVector): search_query: dict[str, Any] = { "size": top_k, "_source": True, - "query": {"knn": {Field.VECTOR.value: {"vector": query_vector, "k": top_k}}}, + "query": {"knn": {Field.VECTOR: {"vector": query_vector, "k": top_k}}}, } final_ext: dict[str, Any] = {"lvector": {}} if filters is not None and len(filters) > 0: # when using filter, transform filter from List[Dict] to Dict as valid format filter_dict = {"bool": {"must": filters}} if len(filters) > 1 else filters[0] - search_query["query"]["knn"][Field.VECTOR.value]["filter"] = filter_dict # filter should be Dict + search_query["query"]["knn"][Field.VECTOR]["filter"] = filter_dict # filter should be Dict final_ext["lvector"]["filter_type"] = "pre_filter" if final_ext != {"lvector": {}}: @@ -279,9 +279,9 @@ class LindormVectorStore(BaseVector): docs_and_scores.append( ( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ), hit["_score"], ) @@ -318,9 +318,9 @@ class LindormVectorStore(BaseVector): docs = [] for hit in response["hits"]["hits"]: - metadata = hit["_source"].get(Field.METADATA_KEY.value) - vector = hit["_source"].get(Field.VECTOR.value) - page_content = hit["_source"].get(Field.CONTENT_KEY.value) + metadata = hit["_source"].get(Field.METADATA_KEY) + vector = hit["_source"].get(Field.VECTOR) + page_content = hit["_source"].get(Field.CONTENT_KEY) doc = Document(page_content=page_content, vector=vector, metadata=metadata) docs.append(doc) @@ -342,8 +342,8 @@ class LindormVectorStore(BaseVector): "settings": {"index": {"knn": True, "knn_routing": self._using_ugc}}, "mappings": { "properties": { - Field.CONTENT_KEY.value: {"type": "text"}, - Field.VECTOR.value: { + Field.CONTENT_KEY: {"type": "text"}, + Field.VECTOR: { "type": "knn_vector", "dimension": len(embeddings[0]), # Make sure the dimension is correct here "method": { diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index 5f32feb709..96eb465401 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -85,7 +85,7 @@ class MilvusVector(BaseVector): collection_info = self._client.describe_collection(self._collection_name) fields = [field["name"] for field in collection_info["fields"]] # Since primary field is auto-id, no need to track it - self._fields = [f for f in fields if f != Field.PRIMARY_KEY.value] + self._fields = [f for f in fields if f != Field.PRIMARY_KEY] def _check_hybrid_search_support(self) -> bool: """ @@ -130,9 +130,9 @@ class MilvusVector(BaseVector): insert_dict = { # Do not need to insert the sparse_vector field separately, as the text_bm25_emb # function will automatically convert the native text into a sparse vector for us. - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i], - Field.METADATA_KEY.value: documents[i].metadata, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i], + Field.METADATA_KEY: documents[i].metadata, } insert_dict_list.append(insert_dict) # Total insert count @@ -243,15 +243,15 @@ class MilvusVector(BaseVector): results = self._client.search( collection_name=self._collection_name, data=[query_vector], - anns_field=Field.VECTOR.value, + anns_field=Field.VECTOR, limit=kwargs.get("top_k", 4), - output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + output_fields=[Field.CONTENT_KEY, Field.METADATA_KEY], filter=filter, ) return self._process_search_results( results, - output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + output_fields=[Field.CONTENT_KEY, Field.METADATA_KEY], score_threshold=float(kwargs.get("score_threshold") or 0.0), ) @@ -264,7 +264,7 @@ class MilvusVector(BaseVector): "Full-text search is disabled: set MILVUS_ENABLE_HYBRID_SEARCH=true (requires Milvus >= 2.5.0)." ) return [] - if not self.field_exists(Field.SPARSE_VECTOR.value): + if not self.field_exists(Field.SPARSE_VECTOR): logger.warning( "Full-text search unavailable: collection missing 'sparse_vector' field; " "recreate the collection after enabling MILVUS_ENABLE_HYBRID_SEARCH to add BM25 sparse index." @@ -279,15 +279,15 @@ class MilvusVector(BaseVector): results = self._client.search( collection_name=self._collection_name, data=[query], - anns_field=Field.SPARSE_VECTOR.value, + anns_field=Field.SPARSE_VECTOR, limit=kwargs.get("top_k", 4), - output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + output_fields=[Field.CONTENT_KEY, Field.METADATA_KEY], filter=filter, ) return self._process_search_results( results, - output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + output_fields=[Field.CONTENT_KEY, Field.METADATA_KEY], score_threshold=float(kwargs.get("score_threshold") or 0.0), ) @@ -311,7 +311,7 @@ class MilvusVector(BaseVector): dim = len(embeddings[0]) fields = [] if metadatas: - fields.append(FieldSchema(Field.METADATA_KEY.value, DataType.JSON, max_length=65_535)) + fields.append(FieldSchema(Field.METADATA_KEY, DataType.JSON, max_length=65_535)) # Create the text field, enable_analyzer will be set True to support milvus automatically # transfer text to sparse_vector, reference: https://milvus.io/docs/full-text-search.md @@ -326,15 +326,15 @@ class MilvusVector(BaseVector): ): content_field_kwargs["analyzer_params"] = self._client_config.analyzer_params - fields.append(FieldSchema(Field.CONTENT_KEY.value, DataType.VARCHAR, **content_field_kwargs)) + fields.append(FieldSchema(Field.CONTENT_KEY, DataType.VARCHAR, **content_field_kwargs)) # Create the primary key field - fields.append(FieldSchema(Field.PRIMARY_KEY.value, DataType.INT64, is_primary=True, auto_id=True)) + fields.append(FieldSchema(Field.PRIMARY_KEY, DataType.INT64, is_primary=True, auto_id=True)) # Create the vector field, supports binary or float vectors - fields.append(FieldSchema(Field.VECTOR.value, infer_dtype_bydata(embeddings[0]), dim=dim)) + fields.append(FieldSchema(Field.VECTOR, infer_dtype_bydata(embeddings[0]), dim=dim)) # Create Sparse Vector Index for the collection if self._hybrid_search_enabled: - fields.append(FieldSchema(Field.SPARSE_VECTOR.value, DataType.SPARSE_FLOAT_VECTOR)) + fields.append(FieldSchema(Field.SPARSE_VECTOR, DataType.SPARSE_FLOAT_VECTOR)) schema = CollectionSchema(fields) @@ -342,8 +342,8 @@ class MilvusVector(BaseVector): if self._hybrid_search_enabled: bm25_function = Function( name="text_bm25_emb", - input_field_names=[Field.CONTENT_KEY.value], - output_field_names=[Field.SPARSE_VECTOR.value], + input_field_names=[Field.CONTENT_KEY], + output_field_names=[Field.SPARSE_VECTOR], function_type=FunctionType.BM25, ) schema.add_function(bm25_function) @@ -352,12 +352,12 @@ class MilvusVector(BaseVector): # Create Index params for the collection index_params_obj = IndexParams() - index_params_obj.add_index(field_name=Field.VECTOR.value, **index_params) + index_params_obj.add_index(field_name=Field.VECTOR, **index_params) # Create Sparse Vector Index for the collection if self._hybrid_search_enabled: index_params_obj.add_index( - field_name=Field.SPARSE_VECTOR.value, index_type="AUTOINDEX", metric_type="BM25" + field_name=Field.SPARSE_VECTOR, index_type="AUTOINDEX", metric_type="BM25" ) # Create the collection diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py index 3eb1df027e..2f77776807 100644 --- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py +++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py @@ -1,6 +1,6 @@ import json import logging -from typing import Any, Literal +from typing import Any from uuid import uuid4 from opensearchpy import OpenSearch, Urllib3AWSV4SignerAuth, Urllib3HttpConnection, helpers @@ -8,6 +8,7 @@ from opensearchpy.helpers import BulkIndexError from pydantic import BaseModel, model_validator from configs import dify_config +from configs.middleware.vdb.opensearch_config import AuthMethod from core.rag.datasource.vdb.field import Field from core.rag.datasource.vdb.vector_base import BaseVector from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory @@ -25,7 +26,7 @@ class OpenSearchConfig(BaseModel): port: int secure: bool = False # use_ssl verify_certs: bool = True - auth_method: Literal["basic", "aws_managed_iam"] = "basic" + auth_method: AuthMethod = AuthMethod.BASIC user: str | None = None password: str | None = None aws_region: str | None = None @@ -98,9 +99,9 @@ class OpenSearchVector(BaseVector): "_op_type": "index", "_index": self._collection_name.lower(), "_source": { - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i], # Make sure you pass an array here - Field.METADATA_KEY.value: documents[i].metadata, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i], # Make sure you pass an array here + Field.METADATA_KEY: documents[i].metadata, }, } # See https://github.com/langchain-ai/langchainjs/issues/4346#issuecomment-1935123377 @@ -116,7 +117,7 @@ class OpenSearchVector(BaseVector): ) def get_ids_by_metadata_field(self, key: str, value: str): - query = {"query": {"term": {f"{Field.METADATA_KEY.value}.{key}": value}}} + query = {"query": {"term": {f"{Field.METADATA_KEY}.{key}": value}}} response = self._client.search(index=self._collection_name.lower(), body=query) if response["hits"]["hits"]: return [hit["_id"] for hit in response["hits"]["hits"]] @@ -160,7 +161,7 @@ class OpenSearchVector(BaseVector): logger.exception("Error deleting document: %s", error) def delete(self): - self._client.indices.delete(index=self._collection_name.lower()) + self._client.indices.delete(index=self._collection_name.lower(), ignore_unavailable=True) def text_exists(self, id: str) -> bool: try: @@ -180,17 +181,17 @@ class OpenSearchVector(BaseVector): query = { "size": kwargs.get("top_k", 4), - "query": {"knn": {Field.VECTOR.value: {Field.VECTOR.value: query_vector, "k": kwargs.get("top_k", 4)}}}, + "query": {"knn": {Field.VECTOR: {Field.VECTOR: query_vector, "k": kwargs.get("top_k", 4)}}}, } document_ids_filter = kwargs.get("document_ids_filter") if document_ids_filter: query["query"] = { "script_score": { - "query": {"bool": {"filter": [{"terms": {Field.DOCUMENT_ID.value: document_ids_filter}}]}}, + "query": {"bool": {"filter": [{"terms": {Field.DOCUMENT_ID: document_ids_filter}}]}}, "script": { "source": "knn_score", "lang": "knn", - "params": {"field": Field.VECTOR.value, "query_value": query_vector, "space_type": "l2"}, + "params": {"field": Field.VECTOR, "query_value": query_vector, "space_type": "l2"}, }, } } @@ -203,7 +204,7 @@ class OpenSearchVector(BaseVector): docs = [] for hit in response["hits"]["hits"]: - metadata = hit["_source"].get(Field.METADATA_KEY.value, {}) + metadata = hit["_source"].get(Field.METADATA_KEY, {}) # Make sure metadata is a dictionary if metadata is None: @@ -212,7 +213,7 @@ class OpenSearchVector(BaseVector): metadata["score"] = hit["_score"] score_threshold = float(kwargs.get("score_threshold") or 0.0) if hit["_score"] >= score_threshold: - doc = Document(page_content=hit["_source"].get(Field.CONTENT_KEY.value), metadata=metadata) + doc = Document(page_content=hit["_source"].get(Field.CONTENT_KEY), metadata=metadata) docs.append(doc) return docs @@ -227,9 +228,9 @@ class OpenSearchVector(BaseVector): docs = [] for hit in response["hits"]["hits"]: - metadata = hit["_source"].get(Field.METADATA_KEY.value) - vector = hit["_source"].get(Field.VECTOR.value) - page_content = hit["_source"].get(Field.CONTENT_KEY.value) + metadata = hit["_source"].get(Field.METADATA_KEY) + vector = hit["_source"].get(Field.VECTOR) + page_content = hit["_source"].get(Field.CONTENT_KEY) doc = Document(page_content=page_content, vector=vector, metadata=metadata) docs.append(doc) @@ -250,8 +251,8 @@ class OpenSearchVector(BaseVector): "settings": {"index": {"knn": True}}, "mappings": { "properties": { - Field.CONTENT_KEY.value: {"type": "text"}, - Field.VECTOR.value: { + Field.CONTENT_KEY: {"type": "text"}, + Field.VECTOR: { "type": "knn_vector", "dimension": len(embeddings[0]), # Make sure the dimension is correct here "method": { @@ -261,7 +262,7 @@ class OpenSearchVector(BaseVector): "parameters": {"ef_construction": 64, "m": 8}, }, }, - Field.METADATA_KEY.value: { + Field.METADATA_KEY: { "type": "object", "properties": { "doc_id": {"type": "keyword"}, # Map doc_id to keyword type @@ -293,7 +294,7 @@ class OpenSearchVectorFactory(AbstractVectorFactory): port=dify_config.OPENSEARCH_PORT, secure=dify_config.OPENSEARCH_SECURE, verify_certs=dify_config.OPENSEARCH_VERIFY_CERTS, - auth_method=dify_config.OPENSEARCH_AUTH_METHOD.value, + auth_method=dify_config.OPENSEARCH_AUTH_METHOD, user=dify_config.OPENSEARCH_USER, password=dify_config.OPENSEARCH_PASSWORD, aws_region=dify_config.OPENSEARCH_AWS_REGION, diff --git a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py index d46f29bd64..f8c62b908a 100644 --- a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py +++ b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py @@ -147,15 +147,13 @@ class QdrantVector(BaseVector): # create group_id payload index self._client.create_payload_index( - collection_name, Field.GROUP_KEY.value, field_schema=PayloadSchemaType.KEYWORD + collection_name, Field.GROUP_KEY, field_schema=PayloadSchemaType.KEYWORD ) # create doc_id payload index - self._client.create_payload_index( - collection_name, Field.DOC_ID.value, field_schema=PayloadSchemaType.KEYWORD - ) + self._client.create_payload_index(collection_name, Field.DOC_ID, field_schema=PayloadSchemaType.KEYWORD) # create document_id payload index self._client.create_payload_index( - collection_name, Field.DOCUMENT_ID.value, field_schema=PayloadSchemaType.KEYWORD + collection_name, Field.DOCUMENT_ID, field_schema=PayloadSchemaType.KEYWORD ) # create full text index text_index_params = TextIndexParams( @@ -165,9 +163,7 @@ class QdrantVector(BaseVector): max_token_len=20, lowercase=True, ) - self._client.create_payload_index( - collection_name, Field.CONTENT_KEY.value, field_schema=text_index_params - ) + self._client.create_payload_index(collection_name, Field.CONTENT_KEY, field_schema=text_index_params) redis_client.set(collection_exist_cache_key, 1, ex=3600) def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): @@ -220,10 +216,10 @@ class QdrantVector(BaseVector): self._build_payloads( batch_texts, batch_metadatas, - Field.CONTENT_KEY.value, - Field.METADATA_KEY.value, + Field.CONTENT_KEY, + Field.METADATA_KEY, group_id or "", # Ensure group_id is never None - Field.GROUP_KEY.value, + Field.GROUP_KEY, ), ) ] @@ -381,12 +377,12 @@ class QdrantVector(BaseVector): for result in results: if result.payload is None: continue - metadata = result.payload.get(Field.METADATA_KEY.value) or {} + metadata = result.payload.get(Field.METADATA_KEY) or {} # duplicate check score threshold if result.score >= score_threshold: metadata["score"] = result.score doc = Document( - page_content=result.payload.get(Field.CONTENT_KEY.value, ""), + page_content=result.payload.get(Field.CONTENT_KEY, ""), metadata=metadata, ) docs.append(doc) @@ -433,7 +429,7 @@ class QdrantVector(BaseVector): documents = [] for result in results: if result: - document = self._document_from_scored_point(result, Field.CONTENT_KEY.value, Field.METADATA_KEY.value) + document = self._document_from_scored_point(result, Field.CONTENT_KEY, Field.METADATA_KEY) documents.append(document) return documents diff --git a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py index e91d9bb0d6..f2156afa59 100644 --- a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py +++ b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py @@ -55,7 +55,7 @@ class TableStoreVector(BaseVector): self._normalize_full_text_bm25_score = config.normalize_full_text_bm25_score self._table_name = f"{collection_name}" self._index_name = f"{collection_name}_idx" - self._tags_field = f"{Field.METADATA_KEY.value}_tags" + self._tags_field = f"{Field.METADATA_KEY}_tags" def create_collection(self, embeddings: list[list[float]], **kwargs): dimension = len(embeddings[0]) @@ -64,7 +64,7 @@ class TableStoreVector(BaseVector): def get_by_ids(self, ids: list[str]) -> list[Document]: docs = [] request = BatchGetRowRequest() - columns_to_get = [Field.METADATA_KEY.value, Field.CONTENT_KEY.value] + columns_to_get = [Field.METADATA_KEY, Field.CONTENT_KEY] rows_to_get = [[("id", _id)] for _id in ids] request.add(TableInBatchGetRowItem(self._table_name, rows_to_get, columns_to_get, None, 1)) @@ -73,11 +73,7 @@ class TableStoreVector(BaseVector): for item in table_result: if item.is_ok and item.row: kv = {k: v for k, v, _ in item.row.attribute_columns} - docs.append( - Document( - page_content=kv[Field.CONTENT_KEY.value], metadata=json.loads(kv[Field.METADATA_KEY.value]) - ) - ) + docs.append(Document(page_content=kv[Field.CONTENT_KEY], metadata=json.loads(kv[Field.METADATA_KEY]))) return docs def get_type(self) -> str: @@ -95,9 +91,9 @@ class TableStoreVector(BaseVector): self._write_row( primary_key=uuids[i], attributes={ - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i], - Field.METADATA_KEY.value: documents[i].metadata, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i], + Field.METADATA_KEY: documents[i].metadata, }, ) return uuids @@ -180,7 +176,7 @@ class TableStoreVector(BaseVector): field_schemas = [ tablestore.FieldSchema( - Field.CONTENT_KEY.value, + Field.CONTENT_KEY, tablestore.FieldType.TEXT, analyzer=tablestore.AnalyzerType.MAXWORD, index=True, @@ -188,7 +184,7 @@ class TableStoreVector(BaseVector): store=False, ), tablestore.FieldSchema( - Field.VECTOR.value, + Field.VECTOR, tablestore.FieldType.VECTOR, vector_options=tablestore.VectorOptions( data_type=tablestore.VectorDataType.VD_FLOAT_32, @@ -197,7 +193,7 @@ class TableStoreVector(BaseVector): ), ), tablestore.FieldSchema( - Field.METADATA_KEY.value, + Field.METADATA_KEY, tablestore.FieldType.KEYWORD, index=True, store=False, @@ -233,15 +229,15 @@ class TableStoreVector(BaseVector): pk = [("id", primary_key)] tags = [] - for key, value in attributes[Field.METADATA_KEY.value].items(): + for key, value in attributes[Field.METADATA_KEY].items(): tags.append(str(key) + "=" + str(value)) attribute_columns = [ - (Field.CONTENT_KEY.value, attributes[Field.CONTENT_KEY.value]), - (Field.VECTOR.value, json.dumps(attributes[Field.VECTOR.value])), + (Field.CONTENT_KEY, attributes[Field.CONTENT_KEY]), + (Field.VECTOR, json.dumps(attributes[Field.VECTOR])), ( - Field.METADATA_KEY.value, - json.dumps(attributes[Field.METADATA_KEY.value]), + Field.METADATA_KEY, + json.dumps(attributes[Field.METADATA_KEY]), ), (self._tags_field, json.dumps(tags)), ] @@ -270,7 +266,7 @@ class TableStoreVector(BaseVector): index_name=self._index_name, search_query=query, columns_to_get=tablestore.ColumnsToGet( - column_names=[Field.PRIMARY_KEY.value], return_type=tablestore.ColumnReturnType.SPECIFIED + column_names=[Field.PRIMARY_KEY], return_type=tablestore.ColumnReturnType.SPECIFIED ), ) @@ -288,7 +284,7 @@ class TableStoreVector(BaseVector): self, query_vector: list[float], document_ids_filter: list[str] | None, top_k: int, score_threshold: float ) -> list[Document]: knn_vector_query = tablestore.KnnVectorQuery( - field_name=Field.VECTOR.value, + field_name=Field.VECTOR, top_k=top_k, float32_query_vector=query_vector, ) @@ -311,8 +307,8 @@ class TableStoreVector(BaseVector): for col in search_hit.row[1]: ots_column_map[col[0]] = col[1] - vector_str = ots_column_map.get(Field.VECTOR.value) - metadata_str = ots_column_map.get(Field.METADATA_KEY.value) + vector_str = ots_column_map.get(Field.VECTOR) + metadata_str = ots_column_map.get(Field.METADATA_KEY) vector = json.loads(vector_str) if vector_str else None metadata = json.loads(metadata_str) if metadata_str else {} @@ -321,7 +317,7 @@ class TableStoreVector(BaseVector): documents.append( Document( - page_content=ots_column_map.get(Field.CONTENT_KEY.value) or "", + page_content=ots_column_map.get(Field.CONTENT_KEY) or "", vector=vector, metadata=metadata, ) @@ -343,7 +339,7 @@ class TableStoreVector(BaseVector): self, query: str, document_ids_filter: list[str] | None, top_k: int, score_threshold: float ) -> list[Document]: bool_query = tablestore.BoolQuery(must_queries=[], filter_queries=[], should_queries=[], must_not_queries=[]) - bool_query.must_queries.append(tablestore.MatchQuery(text=query, field_name=Field.CONTENT_KEY.value)) + bool_query.must_queries.append(tablestore.MatchQuery(text=query, field_name=Field.CONTENT_KEY)) if document_ids_filter: bool_query.filter_queries.append(tablestore.TermsQuery(self._tags_field, document_ids_filter)) @@ -374,10 +370,10 @@ class TableStoreVector(BaseVector): for col in search_hit.row[1]: ots_column_map[col[0]] = col[1] - metadata_str = ots_column_map.get(Field.METADATA_KEY.value) + metadata_str = ots_column_map.get(Field.METADATA_KEY) metadata = json.loads(metadata_str) if metadata_str else {} - vector_str = ots_column_map.get(Field.VECTOR.value) + vector_str = ots_column_map.get(Field.VECTOR) vector = json.loads(vector_str) if vector_str else None if score: @@ -385,7 +381,7 @@ class TableStoreVector(BaseVector): documents.append( Document( - page_content=ots_column_map.get(Field.CONTENT_KEY.value) or "", + page_content=ots_column_map.get(Field.CONTENT_KEY) or "", vector=vector, metadata=metadata, ) diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py index f90a311df4..56ffb36a2b 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -5,9 +5,10 @@ from collections.abc import Generator, Iterable, Sequence from itertools import islice from typing import TYPE_CHECKING, Any, Union +import httpx import qdrant_client -import requests from flask import current_app +from httpx import DigestAuth from pydantic import BaseModel from qdrant_client.http import models as rest from qdrant_client.http.models import ( @@ -19,7 +20,6 @@ from qdrant_client.http.models import ( TokenizerType, ) from qdrant_client.local.qdrant_local import QdrantLocal -from requests.auth import HTTPDigestAuth from sqlalchemy import select from configs import dify_config @@ -141,15 +141,13 @@ class TidbOnQdrantVector(BaseVector): # create group_id payload index self._client.create_payload_index( - collection_name, Field.GROUP_KEY.value, field_schema=PayloadSchemaType.KEYWORD + collection_name, Field.GROUP_KEY, field_schema=PayloadSchemaType.KEYWORD ) # create doc_id payload index - self._client.create_payload_index( - collection_name, Field.DOC_ID.value, field_schema=PayloadSchemaType.KEYWORD - ) + self._client.create_payload_index(collection_name, Field.DOC_ID, field_schema=PayloadSchemaType.KEYWORD) # create document_id payload index self._client.create_payload_index( - collection_name, Field.DOCUMENT_ID.value, field_schema=PayloadSchemaType.KEYWORD + collection_name, Field.DOCUMENT_ID, field_schema=PayloadSchemaType.KEYWORD ) # create full text index text_index_params = TextIndexParams( @@ -159,9 +157,7 @@ class TidbOnQdrantVector(BaseVector): max_token_len=20, lowercase=True, ) - self._client.create_payload_index( - collection_name, Field.CONTENT_KEY.value, field_schema=text_index_params - ) + self._client.create_payload_index(collection_name, Field.CONTENT_KEY, field_schema=text_index_params) redis_client.set(collection_exist_cache_key, 1, ex=3600) def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): @@ -211,10 +207,10 @@ class TidbOnQdrantVector(BaseVector): self._build_payloads( batch_texts, batch_metadatas, - Field.CONTENT_KEY.value, - Field.METADATA_KEY.value, + Field.CONTENT_KEY, + Field.METADATA_KEY, group_id or "", - Field.GROUP_KEY.value, + Field.GROUP_KEY, ), ) ] @@ -349,13 +345,13 @@ class TidbOnQdrantVector(BaseVector): for result in results: if result.payload is None: continue - metadata = result.payload.get(Field.METADATA_KEY.value) or {} + metadata = result.payload.get(Field.METADATA_KEY) or {} # duplicate check score threshold score_threshold = kwargs.get("score_threshold") or 0.0 if result.score >= score_threshold: metadata["score"] = result.score doc = Document( - page_content=result.payload.get(Field.CONTENT_KEY.value, ""), + page_content=result.payload.get(Field.CONTENT_KEY, ""), metadata=metadata, ) docs.append(doc) @@ -392,7 +388,7 @@ class TidbOnQdrantVector(BaseVector): documents = [] for result in results: if result: - document = self._document_from_scored_point(result, Field.CONTENT_KEY.value, Field.METADATA_KEY.value) + document = self._document_from_scored_point(result, Field.CONTENT_KEY, Field.METADATA_KEY) documents.append(document) return documents @@ -504,10 +500,10 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory): } cluster_data = {"displayName": display_name, "region": region_object, "labels": labels} - response = requests.post( + response = httpx.post( f"{tidb_config.api_url}/clusters", json=cluster_data, - auth=HTTPDigestAuth(tidb_config.public_key, tidb_config.private_key), + auth=DigestAuth(tidb_config.public_key, tidb_config.private_key), ) if response.status_code == 200: @@ -527,10 +523,10 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory): body = {"password": new_password} - response = requests.put( + response = httpx.put( f"{tidb_config.api_url}/clusters/{cluster_id}/password", json=body, - auth=HTTPDigestAuth(tidb_config.public_key, tidb_config.private_key), + auth=DigestAuth(tidb_config.public_key, tidb_config.private_key), ) if response.status_code == 200: diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py index e1d4422144..754c149241 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py @@ -2,8 +2,8 @@ import time import uuid from collections.abc import Sequence -import requests -from requests.auth import HTTPDigestAuth +import httpx +from httpx import DigestAuth from configs import dify_config from extensions.ext_database import db @@ -49,7 +49,7 @@ class TidbService: "rootPassword": password, } - response = requests.post(f"{api_url}/clusters", json=cluster_data, auth=HTTPDigestAuth(public_key, private_key)) + response = httpx.post(f"{api_url}/clusters", json=cluster_data, auth=DigestAuth(public_key, private_key)) if response.status_code == 200: response_data = response.json() @@ -83,7 +83,7 @@ class TidbService: :return: The response from the API. """ - response = requests.delete(f"{api_url}/clusters/{cluster_id}", auth=HTTPDigestAuth(public_key, private_key)) + response = httpx.delete(f"{api_url}/clusters/{cluster_id}", auth=DigestAuth(public_key, private_key)) if response.status_code == 200: return response.json() @@ -102,7 +102,7 @@ class TidbService: :return: The response from the API. """ - response = requests.get(f"{api_url}/clusters/{cluster_id}", auth=HTTPDigestAuth(public_key, private_key)) + response = httpx.get(f"{api_url}/clusters/{cluster_id}", auth=DigestAuth(public_key, private_key)) if response.status_code == 200: return response.json() @@ -127,10 +127,10 @@ class TidbService: body = {"password": new_password, "builtinRole": "role_admin", "customRoles": []} - response = requests.patch( + response = httpx.patch( f"{api_url}/clusters/{cluster_id}/sqlUsers/{account}", json=body, - auth=HTTPDigestAuth(public_key, private_key), + auth=DigestAuth(public_key, private_key), ) if response.status_code == 200: @@ -161,9 +161,7 @@ class TidbService: tidb_serverless_list_map = {item.cluster_id: item for item in tidb_serverless_list} cluster_ids = [item.cluster_id for item in tidb_serverless_list] params = {"clusterIds": cluster_ids, "view": "BASIC"} - response = requests.get( - f"{api_url}/clusters:batchGet", params=params, auth=HTTPDigestAuth(public_key, private_key) - ) + response = httpx.get(f"{api_url}/clusters:batchGet", params=params, auth=DigestAuth(public_key, private_key)) if response.status_code == 200: response_data = response.json() @@ -224,8 +222,8 @@ class TidbService: clusters.append(cluster_data) request_body = {"requests": clusters} - response = requests.post( - f"{api_url}/clusters:batchCreate", json=request_body, auth=HTTPDigestAuth(public_key, private_key) + response = httpx.post( + f"{api_url}/clusters:batchCreate", json=request_body, auth=DigestAuth(public_key, private_key) ) if response.status_code == 200: diff --git a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py index b8897c4165..27ae038a06 100644 --- a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py +++ b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py @@ -55,13 +55,13 @@ class TiDBVector(BaseVector): return Table( self._collection_name, self._orm_base.metadata, - Column(Field.PRIMARY_KEY.value, String(36), primary_key=True, nullable=False), + Column(Field.PRIMARY_KEY, String(36), primary_key=True, nullable=False), Column( - Field.VECTOR.value, + Field.VECTOR, VectorType(dim), nullable=False, ), - Column(Field.TEXT_KEY.value, TEXT, nullable=False), + Column(Field.TEXT_KEY, TEXT, nullable=False), Column("meta", JSON, nullable=False), Column("create_time", DateTime, server_default=sqlalchemy.text("CURRENT_TIMESTAMP")), Column( diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index dc4f026ff3..0beb388693 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -71,6 +71,12 @@ class Vector: from core.rag.datasource.vdb.milvus.milvus_vector import MilvusVectorFactory return MilvusVectorFactory + case VectorType.ALIBABACLOUD_MYSQL: + from core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector import ( + AlibabaCloudMySQLVectorFactory, + ) + + return AlibabaCloudMySQLVectorFactory case VectorType.MYSCALE: from core.rag.datasource.vdb.myscale.myscale_vector import MyScaleVectorFactory diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index a415142196..bc7d93a2e0 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -2,6 +2,7 @@ from enum import StrEnum class VectorType(StrEnum): + ALIBABACLOUD_MYSQL = "alibabacloud_mysql" ANALYTICDB = "analyticdb" CHROMA = "chroma" MILVUS = "milvus" diff --git a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py index d1bdd3baef..e5feecf2bc 100644 --- a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py +++ b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py @@ -76,11 +76,11 @@ class VikingDBVector(BaseVector): if not self._has_collection(): fields = [ - Field(field_name=vdb_Field.PRIMARY_KEY.value, field_type=FieldType.String, is_primary_key=True), - Field(field_name=vdb_Field.METADATA_KEY.value, field_type=FieldType.String), - Field(field_name=vdb_Field.GROUP_KEY.value, field_type=FieldType.String), - Field(field_name=vdb_Field.CONTENT_KEY.value, field_type=FieldType.Text), - Field(field_name=vdb_Field.VECTOR.value, field_type=FieldType.Vector, dim=dimension), + Field(field_name=vdb_Field.PRIMARY_KEY, field_type=FieldType.String, is_primary_key=True), + Field(field_name=vdb_Field.METADATA_KEY, field_type=FieldType.String), + Field(field_name=vdb_Field.GROUP_KEY, field_type=FieldType.String), + Field(field_name=vdb_Field.CONTENT_KEY, field_type=FieldType.Text), + Field(field_name=vdb_Field.VECTOR, field_type=FieldType.Vector, dim=dimension), ] self._client.create_collection( @@ -100,7 +100,7 @@ class VikingDBVector(BaseVector): collection_name=self._collection_name, index_name=self._index_name, vector_index=vector_index, - partition_by=vdb_Field.GROUP_KEY.value, + partition_by=vdb_Field.GROUP_KEY, description="Index For Dify", ) redis_client.set(collection_exist_cache_key, 1, ex=3600) @@ -126,11 +126,11 @@ class VikingDBVector(BaseVector): # FIXME: fix the type of metadata later doc = Data( { - vdb_Field.PRIMARY_KEY.value: metadatas[i]["doc_id"], # type: ignore - vdb_Field.VECTOR.value: embeddings[i] if embeddings else None, - vdb_Field.CONTENT_KEY.value: page_content, - vdb_Field.METADATA_KEY.value: json.dumps(metadata), - vdb_Field.GROUP_KEY.value: self._group_id, + vdb_Field.PRIMARY_KEY: metadatas[i]["doc_id"], # type: ignore + vdb_Field.VECTOR: embeddings[i] if embeddings else None, + vdb_Field.CONTENT_KEY: page_content, + vdb_Field.METADATA_KEY: json.dumps(metadata), + vdb_Field.GROUP_KEY: self._group_id, } ) docs.append(doc) @@ -151,7 +151,7 @@ class VikingDBVector(BaseVector): # Note: Metadata field value is an dict, but vikingdb field # not support json type results = self._client.get_index(self._collection_name, self._index_name).search( - filter={"op": "must", "field": vdb_Field.GROUP_KEY.value, "conds": [self._group_id]}, + filter={"op": "must", "field": vdb_Field.GROUP_KEY, "conds": [self._group_id]}, # max value is 5000 limit=5000, ) @@ -161,7 +161,7 @@ class VikingDBVector(BaseVector): ids = [] for result in results: - metadata = result.fields.get(vdb_Field.METADATA_KEY.value) + metadata = result.fields.get(vdb_Field.METADATA_KEY) if metadata is not None: metadata = json.loads(metadata) if metadata.get(key) == value: @@ -189,12 +189,12 @@ class VikingDBVector(BaseVector): docs = [] for result in results: - metadata = result.fields.get(vdb_Field.METADATA_KEY.value) + metadata = result.fields.get(vdb_Field.METADATA_KEY) if metadata is not None: metadata = json.loads(metadata) if result.score >= score_threshold: metadata["score"] = result.score - doc = Document(page_content=result.fields.get(vdb_Field.CONTENT_KEY.value), metadata=metadata) + doc = Document(page_content=result.fields.get(vdb_Field.CONTENT_KEY), metadata=metadata) docs.append(doc) docs = sorted(docs, key=lambda x: x.metadata.get("score", 0) if x.metadata else 0, reverse=True) return docs diff --git a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py index 3ec08b93ed..d2d8fcf964 100644 --- a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py +++ b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py @@ -1,10 +1,24 @@ +""" +Weaviate vector database implementation for Dify's RAG system. + +This module provides integration with Weaviate vector database for storing and retrieving +document embeddings used in retrieval-augmented generation workflows. +""" + import datetime import json +import logging +import uuid as _uuid from typing import Any +from urllib.parse import urlparse -import requests -import weaviate # type: ignore +import weaviate +import weaviate.classes.config as wc from pydantic import BaseModel, model_validator +from weaviate.classes.data import DataObject +from weaviate.classes.init import Auth +from weaviate.classes.query import Filter, MetadataQuery +from weaviate.exceptions import UnexpectedStatusCodeError from configs import dify_config from core.rag.datasource.vdb.field import Field @@ -16,265 +30,396 @@ from core.rag.models.document import Document from extensions.ext_redis import redis_client from models.dataset import Dataset +logger = logging.getLogger(__name__) + class WeaviateConfig(BaseModel): + """ + Configuration model for Weaviate connection settings. + + Attributes: + endpoint: Weaviate server endpoint URL + api_key: Optional API key for authentication + batch_size: Number of objects to batch per insert operation + """ + endpoint: str api_key: str | None = None batch_size: int = 100 @model_validator(mode="before") @classmethod - def validate_config(cls, values: dict): + def validate_config(cls, values: dict) -> dict: + """Validates that required configuration values are present.""" if not values["endpoint"]: raise ValueError("config WEAVIATE_ENDPOINT is required") return values class WeaviateVector(BaseVector): + """ + Weaviate vector database implementation for document storage and retrieval. + + Handles creation, insertion, deletion, and querying of document embeddings + in a Weaviate collection. + """ + def __init__(self, collection_name: str, config: WeaviateConfig, attributes: list): + """ + Initializes the Weaviate vector store. + + Args: + collection_name: Name of the Weaviate collection + config: Weaviate configuration settings + attributes: List of metadata attributes to store + """ super().__init__(collection_name) self._client = self._init_client(config) self._attributes = attributes - def _init_client(self, config: WeaviateConfig) -> weaviate.Client: - auth_config = weaviate.AuthApiKey(api_key=config.api_key or "") + def _init_client(self, config: WeaviateConfig) -> weaviate.WeaviateClient: + """ + Initializes and returns a connected Weaviate client. - weaviate.connect.connection.has_grpc = False # ty: ignore [unresolved-attribute] + Configures both HTTP and gRPC connections with proper authentication. + """ + p = urlparse(config.endpoint) + host = p.hostname or config.endpoint.replace("https://", "").replace("http://", "") + http_secure = p.scheme == "https" + http_port = p.port or (443 if http_secure else 80) - try: - client = weaviate.Client( - url=config.endpoint, auth_client_secret=auth_config, timeout_config=(5, 60), startup_period=None - ) - except requests.ConnectionError: - raise ConnectionError("Vector database connection error") + grpc_host = host + grpc_secure = http_secure + grpc_port = 443 if grpc_secure else 50051 - client.batch.configure( - # `batch_size` takes an `int` value to enable auto-batching - # (`None` is used for manual batching) - batch_size=config.batch_size, - # dynamically update the `batch_size` based on import speed - dynamic=True, - # `timeout_retries` takes an `int` value to retry on time outs - timeout_retries=3, + client = weaviate.connect_to_custom( + http_host=host, + http_port=http_port, + http_secure=http_secure, + grpc_host=grpc_host, + grpc_port=grpc_port, + grpc_secure=grpc_secure, + auth_credentials=Auth.api_key(config.api_key) if config.api_key else None, ) + if not client.is_ready(): + raise ConnectionError("Vector database is not ready") + return client def get_type(self) -> str: + """Returns the vector database type identifier.""" return VectorType.WEAVIATE def get_collection_name(self, dataset: Dataset) -> str: + """ + Retrieves or generates the collection name for a dataset. + + Uses existing index structure if available, otherwise generates from dataset ID. + """ if dataset.index_struct_dict: class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] if not class_prefix.endswith("_Node"): - # original class_prefix class_prefix += "_Node" - return class_prefix dataset_id = dataset.id return Dataset.gen_collection_name_by_id(dataset_id) - def to_index_struct(self): + def to_index_struct(self) -> dict: + """Returns the index structure dictionary for persistence.""" return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}} def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): - # create collection + """ + Creates a new collection and adds initial documents with embeddings. + """ self._create_collection() - # create vector self.add_texts(texts, embeddings) def _create_collection(self): + """ + Creates the Weaviate collection with required schema if it doesn't exist. + + Uses Redis locking to prevent concurrent creation attempts. + """ lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = f"vector_indexing_{self._collection_name}" - if redis_client.get(collection_exist_cache_key): + cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(cache_key): return - schema = self._default_schema(self._collection_name) - if not self._client.schema.contains(schema): - # create collection - self._client.schema.create_class(schema) - redis_client.set(collection_exist_cache_key, 1, ex=3600) + + try: + if not self._client.collections.exists(self._collection_name): + self._client.collections.create( + name=self._collection_name, + properties=[ + wc.Property( + name=Field.TEXT_KEY.value, + data_type=wc.DataType.TEXT, + tokenization=wc.Tokenization.WORD, + ), + wc.Property(name="document_id", data_type=wc.DataType.TEXT), + wc.Property(name="doc_id", data_type=wc.DataType.TEXT), + wc.Property(name="chunk_index", data_type=wc.DataType.INT), + ], + vector_config=wc.Configure.Vectors.self_provided(), + ) + + self._ensure_properties() + redis_client.set(cache_key, 1, ex=3600) + except Exception as e: + logger.exception("Error creating collection %s", self._collection_name) + raise + + def _ensure_properties(self) -> None: + """ + Ensures all required properties exist in the collection schema. + + Adds missing properties if the collection exists but lacks them. + """ + if not self._client.collections.exists(self._collection_name): + return + + col = self._client.collections.use(self._collection_name) + cfg = col.config.get() + existing = {p.name for p in (cfg.properties or [])} + + to_add = [] + if "document_id" not in existing: + to_add.append(wc.Property(name="document_id", data_type=wc.DataType.TEXT)) + if "doc_id" not in existing: + to_add.append(wc.Property(name="doc_id", data_type=wc.DataType.TEXT)) + if "chunk_index" not in existing: + to_add.append(wc.Property(name="chunk_index", data_type=wc.DataType.INT)) + + for prop in to_add: + try: + col.config.add_property(prop) + except Exception as e: + logger.warning("Could not add property %s: %s", prop.name, e) + + def _get_uuids(self, documents: list[Document]) -> list[str]: + """ + Generates deterministic UUIDs for documents based on their content. + + Uses UUID5 with URL namespace to ensure consistent IDs for identical content. + """ + URL_NAMESPACE = _uuid.UUID("6ba7b811-9dad-11d1-80b4-00c04fd430c8") + + uuids = [] + for doc in documents: + uuid_val = _uuid.uuid5(URL_NAMESPACE, doc.page_content) + uuids.append(str(uuid_val)) + + return uuids def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + """ + Adds documents with their embeddings to the collection. + + Batches insertions for efficiency and returns the list of inserted object IDs. + """ uuids = self._get_uuids(documents) texts = [d.page_content for d in documents] metadatas = [d.metadata for d in documents] - ids = [] + col = self._client.collections.use(self._collection_name) + objs: list[DataObject] = [] + ids_out: list[str] = [] - with self._client.batch as batch: - for i, text in enumerate(texts): - data_properties = {Field.TEXT_KEY.value: text} - if metadatas is not None: - # metadata maybe None - for key, val in (metadatas[i] or {}).items(): - data_properties[key] = self._json_serializable(val) + for i, text in enumerate(texts): + props: dict[str, Any] = {Field.TEXT_KEY.value: text} + meta = metadatas[i] or {} + for k, v in meta.items(): + props[k] = self._json_serializable(v) - batch.add_data_object( - data_object=data_properties, - class_name=self._collection_name, - uuid=uuids[i], - vector=embeddings[i] if embeddings else None, + candidate = uuids[i] if uuids else None + uid = candidate if (candidate and self._is_uuid(candidate)) else str(_uuid.uuid4()) + ids_out.append(uid) + + vec_payload = None + if embeddings and i < len(embeddings) and embeddings[i]: + vec_payload = {"default": embeddings[i]} + + objs.append( + DataObject( + uuid=uid, + properties=props, # type: ignore[arg-type] # mypy incorrectly infers DataObject signature + vector=vec_payload, ) - ids.append(uuids[i]) - return ids + ) - def delete_by_metadata_field(self, key: str, value: str): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - where_filter = {"operator": "Equal", "path": [key], "valueText": value} + with col.batch.dynamic() as batch: + for obj in objs: + batch.add_object(properties=obj.properties, uuid=obj.uuid, vector=obj.vector) - self._client.batch.delete_objects(class_name=self._collection_name, where=where_filter, output="minimal") + return ids_out + + def _is_uuid(self, val: str) -> bool: + """Validates whether a string is a valid UUID format.""" + try: + _uuid.UUID(str(val)) + return True + except Exception: + return False + + def delete_by_metadata_field(self, key: str, value: str) -> None: + """Deletes all objects matching a specific metadata field value.""" + if not self._client.collections.exists(self._collection_name): + return + + col = self._client.collections.use(self._collection_name) + col.data.delete_many(where=Filter.by_property(key).equal(value)) def delete(self): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - self._client.schema.delete_class(self._collection_name) + """Deletes the entire collection from Weaviate.""" + if self._client.collections.exists(self._collection_name): + self._client.collections.delete(self._collection_name) def text_exists(self, id: str) -> bool: - collection_name = self._collection_name - schema = self._default_schema(self._collection_name) - - # check whether the index already exists - if not self._client.schema.contains(schema): + """Checks if a document with the given doc_id exists in the collection.""" + if not self._client.collections.exists(self._collection_name): return False - result = ( - self._client.query.get(collection_name) - .with_additional(["id"]) - .with_where( - { - "path": ["doc_id"], - "operator": "Equal", - "valueText": id, - } - ) - .with_limit(1) - .do() + + col = self._client.collections.use(self._collection_name) + res = col.query.fetch_objects( + filters=Filter.by_property("doc_id").equal(id), + limit=1, + return_properties=["doc_id"], ) - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") + return len(res.objects) > 0 - entries = result["data"]["Get"][collection_name] - if len(entries) == 0: - return False + def delete_by_ids(self, ids: list[str]) -> None: + """ + Deletes objects by their UUID identifiers. - return True + Silently ignores 404 errors for non-existent IDs. + """ + if not self._client.collections.exists(self._collection_name): + return - def delete_by_ids(self, ids: list[str]): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - for uuid in ids: - try: - self._client.data_object.delete( - class_name=self._collection_name, - uuid=uuid, - ) - except weaviate.UnexpectedStatusCodeException as e: - # tolerate not found error - if e.status_code != 404: - raise e + col = self._client.collections.use(self._collection_name) + + for uid in ids: + try: + col.data.delete_by_id(uid) + except UnexpectedStatusCodeError as e: + if getattr(e, "status_code", None) != 404: + raise def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: - """Look up similar documents by embedding vector in Weaviate.""" - collection_name = self._collection_name - properties = self._attributes - properties.append(Field.TEXT_KEY.value) - query_obj = self._client.query.get(collection_name, properties) + """ + Performs vector similarity search using the provided query vector. - vector = {"vector": query_vector} - document_ids_filter = kwargs.get("document_ids_filter") - if document_ids_filter: - operands = [] - for document_id_filter in document_ids_filter: - operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter}) - where_filter = {"operator": "Or", "operands": operands} - query_obj = query_obj.with_where(where_filter) - result = ( - query_obj.with_near_vector(vector) - .with_limit(kwargs.get("top_k", 4)) - .with_additional(["vector", "distance"]) - .do() + Filters by document IDs if provided and applies score threshold. + Returns documents sorted by relevance score. + """ + if not self._client.collections.exists(self._collection_name): + return [] + + col = self._client.collections.use(self._collection_name) + props = list({*self._attributes, "document_id", Field.TEXT_KEY.value}) + + where = None + doc_ids = kwargs.get("document_ids_filter") or [] + if doc_ids: + ors = [Filter.by_property("document_id").equal(x) for x in doc_ids] + where = ors[0] + for f in ors[1:]: + where = where | f + + top_k = int(kwargs.get("top_k", 4)) + score_threshold = float(kwargs.get("score_threshold") or 0.0) + + res = col.query.near_vector( + near_vector=query_vector, + limit=top_k, + return_properties=props, + return_metadata=MetadataQuery(distance=True), + include_vector=False, + filters=where, + target_vector="default", ) - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") - docs_and_scores = [] - for res in result["data"]["Get"][collection_name]: - text = res.pop(Field.TEXT_KEY.value) - score = 1 - res["_additional"]["distance"] - docs_and_scores.append((Document(page_content=text, metadata=res), score)) + docs: list[Document] = [] + for obj in res.objects: + properties = dict(obj.properties or {}) + text = properties.pop(Field.TEXT_KEY.value, "") + if obj.metadata and obj.metadata.distance is not None: + distance = obj.metadata.distance + else: + distance = 1.0 + score = 1.0 - distance - docs = [] - for doc, score in docs_and_scores: - score_threshold = float(kwargs.get("score_threshold") or 0.0) - # check score threshold - if score >= score_threshold: - if doc.metadata is not None: - doc.metadata["score"] = score - docs.append(doc) - # Sort the documents by score in descending order - docs = sorted(docs, key=lambda x: x.metadata.get("score", 0) if x.metadata else 0, reverse=True) + if score > score_threshold: + properties["score"] = score + docs.append(Document(page_content=text, metadata=properties)) + + docs.sort(key=lambda d: d.metadata.get("score", 0.0), reverse=True) return docs def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - """Return docs using BM25F. - - Args: - query: Text to look up documents similar to. - - Returns: - List of Documents most similar to the query. """ - collection_name = self._collection_name - content: dict[str, Any] = {"concepts": [query]} - properties = self._attributes - properties.append(Field.TEXT_KEY.value) - if kwargs.get("search_distance"): - content["certainty"] = kwargs.get("search_distance") - query_obj = self._client.query.get(collection_name, properties) - document_ids_filter = kwargs.get("document_ids_filter") - if document_ids_filter: - operands = [] - for document_id_filter in document_ids_filter: - operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter}) - where_filter = {"operator": "Or", "operands": operands} - query_obj = query_obj.with_where(where_filter) - query_obj = query_obj.with_additional(["vector"]) - properties = ["text"] - result = query_obj.with_bm25(query=query, properties=properties).with_limit(kwargs.get("top_k", 4)).do() - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") - docs = [] - for res in result["data"]["Get"][collection_name]: - text = res.pop(Field.TEXT_KEY.value) - additional = res.pop("_additional") - docs.append(Document(page_content=text, vector=additional["vector"], metadata=res)) + Performs BM25 full-text search on document content. + + Filters by document IDs if provided and returns matching documents with vectors. + """ + if not self._client.collections.exists(self._collection_name): + return [] + + col = self._client.collections.use(self._collection_name) + props = list({*self._attributes, Field.TEXT_KEY.value}) + + where = None + doc_ids = kwargs.get("document_ids_filter") or [] + if doc_ids: + ors = [Filter.by_property("document_id").equal(x) for x in doc_ids] + where = ors[0] + for f in ors[1:]: + where = where | f + + top_k = int(kwargs.get("top_k", 4)) + + res = col.query.bm25( + query=query, + query_properties=[Field.TEXT_KEY.value], + limit=top_k, + return_properties=props, + include_vector=True, + filters=where, + ) + + docs: list[Document] = [] + for obj in res.objects: + properties = dict(obj.properties or {}) + text = properties.pop(Field.TEXT_KEY.value, "") + + vec = obj.vector + if isinstance(vec, dict): + vec = vec.get("default") or next(iter(vec.values()), None) + + docs.append(Document(page_content=text, vector=vec, metadata=properties)) return docs - def _default_schema(self, index_name: str): - return { - "class": index_name, - "properties": [ - { - "name": "text", - "dataType": ["text"], - } - ], - } - - def _json_serializable(self, value: Any): + def _json_serializable(self, value: Any) -> Any: + """Converts values to JSON-serializable format, handling datetime objects.""" if isinstance(value, datetime.datetime): return value.isoformat() return value class WeaviateVectorFactory(AbstractVectorFactory): + """Factory class for creating WeaviateVector instances.""" + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> WeaviateVector: + """ + Initializes a WeaviateVector instance for the given dataset. + + Uses existing collection name from dataset index structure or generates a new one. + Updates dataset index structure if not already set. + """ if dataset.index_struct_dict: class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] collection_name = class_prefix @@ -282,7 +427,6 @@ class WeaviateVectorFactory(AbstractVectorFactory): dataset_id = dataset.id collection_name = Dataset.gen_collection_name_by_id(dataset_id) dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.WEAVIATE, collection_name)) - return WeaviateVector( collection_name=collection_name, config=WeaviateConfig( diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index c2f17cd148..937b8f033c 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -43,8 +43,7 @@ class CacheEmbedding(Embeddings): else: embedding_queue_indices.append(i) - # release database connection, because embedding may take a long time - db.session.close() + # NOTE: avoid closing the shared scoped session here; downstream code may still have pending work if embedding_queue_indices: embedding_queue_texts = [texts[i] for i in embedding_queue_indices] diff --git a/api/core/rag/entities/event.py b/api/core/rag/entities/event.py index 24db5d77be..2d8d4060dd 100644 --- a/api/core/rag/entities/event.py +++ b/api/core/rag/entities/event.py @@ -1,11 +1,11 @@ from collections.abc import Mapping -from enum import Enum +from enum import StrEnum from typing import Any from pydantic import BaseModel, Field -class DatasourceStreamEvent(Enum): +class DatasourceStreamEvent(StrEnum): """ Datasource Stream event """ @@ -20,12 +20,12 @@ class BaseDatasourceEvent(BaseModel): class DatasourceErrorEvent(BaseDatasourceEvent): - event: str = DatasourceStreamEvent.ERROR.value + event: DatasourceStreamEvent = DatasourceStreamEvent.ERROR error: str = Field(..., description="error message") class DatasourceCompletedEvent(BaseDatasourceEvent): - event: str = DatasourceStreamEvent.COMPLETED.value + event: DatasourceStreamEvent = DatasourceStreamEvent.COMPLETED data: Mapping[str, Any] | list = Field(..., description="result") total: int | None = Field(default=0, description="total") completed: int | None = Field(default=0, description="completed") @@ -33,6 +33,6 @@ class DatasourceCompletedEvent(BaseDatasourceEvent): class DatasourceProcessingEvent(BaseDatasourceEvent): - event: str = DatasourceStreamEvent.PROCESSING.value + event: DatasourceStreamEvent = DatasourceStreamEvent.PROCESSING total: int | None = Field(..., description="total") completed: int | None = Field(..., description="completed") diff --git a/api/core/rag/extractor/entity/extract_setting.py b/api/core/rag/extractor/entity/extract_setting.py index b9bf9d0d8c..c3bfbce98f 100644 --- a/api/core/rag/extractor/entity/extract_setting.py +++ b/api/core/rag/extractor/entity/extract_setting.py @@ -17,9 +17,6 @@ class NotionInfo(BaseModel): tenant_id: str model_config = ConfigDict(arbitrary_types_allowed=True) - def __init__(self, **data): - super().__init__(**data) - class WebsiteInfo(BaseModel): """ @@ -47,6 +44,3 @@ class ExtractSetting(BaseModel): website_info: WebsiteInfo | None = None document_model: str | None = None model_config = ConfigDict(arbitrary_types_allowed=True) - - def __init__(self, **data): - super().__init__(**data) diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index 3dc08e1832..0f62f9c4b6 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -45,7 +45,7 @@ class ExtractProcessor: cls, upload_file: UploadFile, return_text: bool = False, is_automatic: bool = False ) -> Union[list[Document], str]: extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, upload_file=upload_file, document_model="text_model" + datasource_type=DatasourceType.FILE, upload_file=upload_file, document_model="text_model" ) if return_text: delimiter = "\n" @@ -76,7 +76,7 @@ class ExtractProcessor: # https://stackoverflow.com/questions/26541416/generate-temporary-file-names-without-creating-actual-file-in-python#comment90414256_26541521 file_path = f"{temp_dir}/{tempfile.gettempdir()}{suffix}" Path(file_path).write_bytes(response.content) - extract_setting = ExtractSetting(datasource_type=DatasourceType.FILE.value, document_model="text_model") + extract_setting = ExtractSetting(datasource_type=DatasourceType.FILE, document_model="text_model") if return_text: delimiter = "\n" return delimiter.join( @@ -92,7 +92,7 @@ class ExtractProcessor: def extract( cls, extract_setting: ExtractSetting, is_automatic: bool = False, file_path: str | None = None ) -> list[Document]: - if extract_setting.datasource_type == DatasourceType.FILE.value: + if extract_setting.datasource_type == DatasourceType.FILE: with tempfile.TemporaryDirectory() as temp_dir: if not file_path: assert extract_setting.upload_file is not None, "upload_file is required" @@ -163,7 +163,7 @@ class ExtractProcessor: # txt extractor = TextExtractor(file_path, autodetect_encoding=True) return extractor.extract() - elif extract_setting.datasource_type == DatasourceType.NOTION.value: + elif extract_setting.datasource_type == DatasourceType.NOTION: assert extract_setting.notion_info is not None, "notion_info is required" extractor = NotionExtractor( notion_workspace_id=extract_setting.notion_info.notion_workspace_id, @@ -174,7 +174,7 @@ class ExtractProcessor: credential_id=extract_setting.notion_info.credential_id, ) return extractor.extract() - elif extract_setting.datasource_type == DatasourceType.WEBSITE.value: + elif extract_setting.datasource_type == DatasourceType.WEBSITE: assert extract_setting.website_info is not None, "website_info is required" if extract_setting.website_info.provider == "firecrawl": extractor = FirecrawlWebExtractor( diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py index e1ba6ef243..789ac8557d 100644 --- a/api/core/rag/extractor/firecrawl/firecrawl_app.py +++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py @@ -2,7 +2,7 @@ import json import time from typing import Any, cast -import requests +import httpx from extensions.ext_storage import storage @@ -25,7 +25,7 @@ class FirecrawlApp: } if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/scrape", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/scrape", json_data, headers) if response.status_code == 200: response_data = response.json() data = response_data["data"] @@ -42,7 +42,7 @@ class FirecrawlApp: json_data = {"url": url} if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/crawl", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/crawl", json_data, headers) if response.status_code == 200: # There's also another two fields in the response: "success" (bool) and "url" (str) job_id = response.json().get("id") @@ -51,9 +51,25 @@ class FirecrawlApp: self._handle_error(response, "start crawl job") return "" # unreachable + def map(self, url: str, params: dict[str, Any] | None = None) -> dict[str, Any]: + # Documentation: https://docs.firecrawl.dev/api-reference/endpoint/map + headers = self._prepare_headers() + json_data: dict[str, Any] = {"url": url, "integration": "dify"} + if params: + # Pass through provided params, including optional "sitemap": "only" | "include" | "skip" + json_data.update(params) + response = self._post_request(f"{self.base_url}/v2/map", json_data, headers) + if response.status_code == 200: + return cast(dict[str, Any], response.json()) + elif response.status_code in {402, 409, 500, 429, 408}: + self._handle_error(response, "start map job") + return {} + else: + raise Exception(f"Failed to start map job. Status code: {response.status_code}") + def check_crawl_status(self, job_id) -> dict[str, Any]: headers = self._prepare_headers() - response = self._get_request(f"{self.base_url}/v1/crawl/{job_id}", headers) + response = self._get_request(f"{self.base_url}/v2/crawl/{job_id}", headers) if response.status_code == 200: crawl_status_response = response.json() if crawl_status_response.get("status") == "completed": @@ -104,18 +120,18 @@ class FirecrawlApp: def _prepare_headers(self) -> dict[str, Any]: return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} - def _post_request(self, url, data, headers, retries=3, backoff_factor=0.5) -> requests.Response: + def _post_request(self, url, data, headers, retries=3, backoff_factor=0.5) -> httpx.Response: for attempt in range(retries): - response = requests.post(url, headers=headers, json=data) + response = httpx.post(url, headers=headers, json=data) if response.status_code == 502: time.sleep(backoff_factor * (2**attempt)) else: return response return response - def _get_request(self, url, headers, retries=3, backoff_factor=0.5) -> requests.Response: + def _get_request(self, url, headers, retries=3, backoff_factor=0.5) -> httpx.Response: for attempt in range(retries): - response = requests.get(url, headers=headers) + response = httpx.get(url, headers=headers) if response.status_code == 502: time.sleep(backoff_factor * (2**attempt)) else: @@ -135,12 +151,16 @@ class FirecrawlApp: "lang": "en", "country": "us", "timeout": 60000, - "ignoreInvalidURLs": False, + "ignoreInvalidURLs": True, "scrapeOptions": {}, + "sources": [ + {"type": "web"}, + ], + "integration": "dify", } if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/search", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/search", json_data, headers) if response.status_code == 200: response_data = response.json() if not response_data.get("success"): diff --git a/api/core/rag/extractor/notion_extractor.py b/api/core/rag/extractor/notion_extractor.py index bddf41af43..e87ab38349 100644 --- a/api/core/rag/extractor/notion_extractor.py +++ b/api/core/rag/extractor/notion_extractor.py @@ -3,7 +3,7 @@ import logging import operator from typing import Any, cast -import requests +import httpx from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor @@ -92,7 +92,7 @@ class NotionExtractor(BaseExtractor): if next_cursor: current_query["start_cursor"] = next_cursor - res = requests.post( + res = httpx.post( DATABASE_URL_TMPL.format(database_id=database_id), headers={ "Authorization": "Bearer " + self._notion_access_token, @@ -160,7 +160,7 @@ class NotionExtractor(BaseExtractor): while True: query_dict: dict[str, Any] = {} if not start_cursor else {"start_cursor": start_cursor} try: - res = requests.request( + res = httpx.request( "GET", block_url, headers={ @@ -173,7 +173,7 @@ class NotionExtractor(BaseExtractor): if res.status_code != 200: raise ValueError(f"Error fetching Notion block data: {res.text}") data = res.json() - except requests.RequestException as e: + except httpx.HTTPError as e: raise ValueError("Error fetching Notion block data") from e if "results" not in data or not isinstance(data["results"], list): raise ValueError("Error fetching Notion block data") @@ -222,7 +222,7 @@ class NotionExtractor(BaseExtractor): while True: query_dict: dict[str, Any] = {} if not start_cursor else {"start_cursor": start_cursor} - res = requests.request( + res = httpx.request( "GET", block_url, headers={ @@ -282,7 +282,7 @@ class NotionExtractor(BaseExtractor): while not done: query_dict: dict[str, Any] = {} if not start_cursor else {"start_cursor": start_cursor} - res = requests.request( + res = httpx.request( "GET", block_url, headers={ @@ -354,7 +354,7 @@ class NotionExtractor(BaseExtractor): query_dict: dict[str, Any] = {} - res = requests.request( + res = httpx.request( "GET", retrieve_page_url, headers={ diff --git a/api/core/rag/extractor/unstructured/unstructured_doc_extractor.py b/api/core/rag/extractor/unstructured/unstructured_doc_extractor.py index 5199208f70..7dd8beaa46 100644 --- a/api/core/rag/extractor/unstructured/unstructured_doc_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_doc_extractor.py @@ -1,6 +1,7 @@ import logging import os +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -49,7 +50,8 @@ class UnstructuredWordExtractor(BaseExtractor): from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py index ad04bd0bd1..d97d4c3a48 100644 --- a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py @@ -4,6 +4,7 @@ import logging from bs4 import BeautifulSoup +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -46,7 +47,8 @@ class UnstructuredEmailExtractor(BaseExtractor): from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py index fc14ee6275..3061d957ac 100644 --- a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py @@ -2,6 +2,7 @@ import logging import pypandoc # type: ignore +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -40,7 +41,8 @@ class UnstructuredEpubExtractor(BaseExtractor): from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py b/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py index 23030d7739..b6d8c47111 100644 --- a/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py @@ -1,5 +1,6 @@ import logging +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -32,7 +33,8 @@ class UnstructuredMarkdownExtractor(BaseExtractor): elements = partition_md(filename=self._file_path) from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py b/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py index f29e639d1b..ae60fc7981 100644 --- a/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py @@ -1,5 +1,6 @@ import logging +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -31,7 +32,8 @@ class UnstructuredMsgExtractor(BaseExtractor): elements = partition_msg(filename=self._file_path) from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py index d75e166f1b..2d4846d85e 100644 --- a/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py @@ -1,5 +1,6 @@ import logging +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -32,7 +33,8 @@ class UnstructuredXmlExtractor(BaseExtractor): from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/watercrawl/client.py b/api/core/rag/extractor/watercrawl/client.py index 6d596e07d8..7cf6c4d289 100644 --- a/api/core/rag/extractor/watercrawl/client.py +++ b/api/core/rag/extractor/watercrawl/client.py @@ -3,8 +3,8 @@ from collections.abc import Generator from typing import Union from urllib.parse import urljoin -import requests -from requests import Response +import httpx +from httpx import Response from core.rag.extractor.watercrawl.exceptions import ( WaterCrawlAuthenticationError, @@ -20,28 +20,45 @@ class BaseAPIClient: self.session = self.init_session() def init_session(self): - session = requests.Session() - session.headers.update({"X-API-Key": self.api_key}) - session.headers.update({"Content-Type": "application/json"}) - session.headers.update({"Accept": "application/json"}) - session.headers.update({"User-Agent": "WaterCrawl-Plugin"}) - session.headers.update({"Accept-Language": "en-US"}) - return session + headers = { + "X-API-Key": self.api_key, + "Content-Type": "application/json", + "Accept": "application/json", + "User-Agent": "WaterCrawl-Plugin", + "Accept-Language": "en-US", + } + return httpx.Client(headers=headers, timeout=None) + + def _request( + self, + method: str, + endpoint: str, + query_params: dict | None = None, + data: dict | None = None, + **kwargs, + ) -> Response: + stream = kwargs.pop("stream", False) + url = urljoin(self.base_url, endpoint) + if stream: + request = self.session.build_request(method, url, params=query_params, json=data) + return self.session.send(request, stream=True, **kwargs) + + return self.session.request(method, url, params=query_params, json=data, **kwargs) def _get(self, endpoint: str, query_params: dict | None = None, **kwargs): - return self.session.get(urljoin(self.base_url, endpoint), params=query_params, **kwargs) + return self._request("GET", endpoint, query_params=query_params, **kwargs) def _post(self, endpoint: str, query_params: dict | None = None, data: dict | None = None, **kwargs): - return self.session.post(urljoin(self.base_url, endpoint), params=query_params, json=data, **kwargs) + return self._request("POST", endpoint, query_params=query_params, data=data, **kwargs) def _put(self, endpoint: str, query_params: dict | None = None, data: dict | None = None, **kwargs): - return self.session.put(urljoin(self.base_url, endpoint), params=query_params, json=data, **kwargs) + return self._request("PUT", endpoint, query_params=query_params, data=data, **kwargs) def _delete(self, endpoint: str, query_params: dict | None = None, **kwargs): - return self.session.delete(urljoin(self.base_url, endpoint), params=query_params, **kwargs) + return self._request("DELETE", endpoint, query_params=query_params, **kwargs) def _patch(self, endpoint: str, query_params: dict | None = None, data: dict | None = None, **kwargs): - return self.session.patch(urljoin(self.base_url, endpoint), params=query_params, json=data, **kwargs) + return self._request("PATCH", endpoint, query_params=query_params, data=data, **kwargs) class WaterCrawlAPIClient(BaseAPIClient): @@ -49,14 +66,17 @@ class WaterCrawlAPIClient(BaseAPIClient): super().__init__(api_key, base_url) def process_eventstream(self, response: Response, download: bool = False) -> Generator: - for line in response.iter_lines(): - line = line.decode("utf-8") - if line.startswith("data:"): - line = line[5:].strip() - data = json.loads(line) - if data["type"] == "result" and download: - data["data"] = self.download_result(data["data"]) - yield data + try: + for raw_line in response.iter_lines(): + line = raw_line.decode("utf-8") if isinstance(raw_line, bytes) else raw_line + if line.startswith("data:"): + line = line[5:].strip() + data = json.loads(line) + if data["type"] == "result" and download: + data["data"] = self.download_result(data["data"]) + yield data + finally: + response.close() def process_response(self, response: Response) -> dict | bytes | list | None | Generator: if response.status_code == 401: @@ -170,7 +190,10 @@ class WaterCrawlAPIClient(BaseAPIClient): return event_data["data"] def download_result(self, result_object: dict): - response = requests.get(result_object["result"]) - response.raise_for_status() - result_object["result"] = response.json() + response = httpx.get(result_object["result"], timeout=None) + try: + response.raise_for_status() + result_object["result"] = response.json() + finally: + response.close() return result_object diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index f25f92cf81..1a9704688a 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -9,7 +9,7 @@ import uuid from urllib.parse import urlparse from xml.etree import ElementTree -import requests +import httpx from docx import Document as DocxDocument from configs import dify_config @@ -43,15 +43,19 @@ class WordExtractor(BaseExtractor): # If the file is a web path, download it to a temporary file, and use that if not os.path.isfile(self.file_path) and self._is_valid_url(self.file_path): - r = requests.get(self.file_path) + response = httpx.get(self.file_path, timeout=None) - if r.status_code != 200: - raise ValueError(f"Check the url of your file; returned status code {r.status_code}") + if response.status_code != 200: + response.close() + raise ValueError(f"Check the url of your file; returned status code {response.status_code}") self.web_path = self.file_path # TODO: use a better way to handle the file self.temp_file = tempfile.NamedTemporaryFile() # noqa SIM115 - self.temp_file.write(r.content) + try: + self.temp_file.write(response.content) + finally: + response.close() self.file_path = self.temp_file.name elif not os.path.isfile(self.file_path): raise ValueError(f"File path {self.file_path} is not a valid file or url") diff --git a/api/core/rag/index_processor/index_processor_base.py b/api/core/rag/index_processor/index_processor_base.py index 05cffb5a55..d4eff53204 100644 --- a/api/core/rag/index_processor/index_processor_base.py +++ b/api/core/rag/index_processor/index_processor_base.py @@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any, Optional from configs import dify_config from core.rag.extractor.entity.extract_setting import ExtractSetting from core.rag.models.document import Document +from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.rag.splitter.fixed_text_splitter import ( EnhanceRecursiveCharacterTextSplitter, FixedRecursiveCharacterTextSplitter, @@ -49,7 +50,7 @@ class BaseIndexProcessor(ABC): @abstractmethod def retrieve( self, - retrieval_method: str, + retrieval_method: RetrievalMethod, query: str, dataset: Dataset, top_k: int, diff --git a/api/core/rag/index_processor/processor/paragraph_index_processor.py b/api/core/rag/index_processor/processor/paragraph_index_processor.py index 755aa88d08..5e5fea7ea9 100644 --- a/api/core/rag/index_processor/processor/paragraph_index_processor.py +++ b/api/core/rag/index_processor/processor/paragraph_index_processor.py @@ -14,6 +14,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.models.document import Document +from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.tools.utils.text_processing_utils import remove_leading_symbols from libs import helper from models.dataset import Dataset, DatasetProcessRule @@ -38,11 +39,11 @@ class ParagraphIndexProcessor(BaseIndexProcessor): raise ValueError("No process rule found.") if process_rule.get("mode") == "automatic": automatic_rule = DatasetProcessRule.AUTOMATIC_RULES - rules = Rule(**automatic_rule) + rules = Rule.model_validate(automatic_rule) else: if not process_rule.get("rules"): raise ValueError("No rules found in process rule.") - rules = Rule(**process_rule.get("rules")) + rules = Rule.model_validate(process_rule.get("rules")) # Split the text documents into nodes. if not rules.segmentation: raise ValueError("No segmentation found in rules.") @@ -106,7 +107,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor): def retrieve( self, - retrieval_method: str, + retrieval_method: RetrievalMethod, query: str, dataset: Dataset, top_k: int, diff --git a/api/core/rag/index_processor/processor/parent_child_index_processor.py b/api/core/rag/index_processor/processor/parent_child_index_processor.py index e0ccd8b567..4fa78e2f95 100644 --- a/api/core/rag/index_processor/processor/parent_child_index_processor.py +++ b/api/core/rag/index_processor/processor/parent_child_index_processor.py @@ -16,6 +16,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.models.document import ChildDocument, Document, ParentChildStructureChunk +from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from libs import helper from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegment @@ -40,7 +41,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): raise ValueError("No process rule found.") if not process_rule.get("rules"): raise ValueError("No rules found in process rule.") - rules = Rule(**process_rule.get("rules")) + rules = Rule.model_validate(process_rule.get("rules")) all_documents: list[Document] = [] if rules.parent_mode == ParentMode.PARAGRAPH: # Split the text documents into nodes. @@ -110,7 +111,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): child_documents = document.children if child_documents: formatted_child_documents = [ - Document(**child_document.model_dump()) for child_document in child_documents + Document.model_validate(child_document.model_dump()) for child_document in child_documents ] vector.create(formatted_child_documents) @@ -161,7 +162,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): def retrieve( self, - retrieval_method: str, + retrieval_method: RetrievalMethod, query: str, dataset: Dataset, top_k: int, @@ -224,7 +225,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): return child_nodes def index(self, dataset: Dataset, document: DatasetDocument, chunks: Any): - parent_childs = ParentChildStructureChunk(**chunks) + parent_childs = ParentChildStructureChunk.model_validate(chunks) documents = [] for parent_child in parent_childs.parent_child_chunks: metadata = { @@ -274,7 +275,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): vector.create(all_child_documents) def format_preview(self, chunks: Any) -> Mapping[str, Any]: - parent_childs = ParentChildStructureChunk(**chunks) + parent_childs = ParentChildStructureChunk.model_validate(chunks) preview = [] for parent_child in parent_childs.parent_child_chunks: preview.append({"content": parent_child.parent_content, "child_chunks": parent_child.child_contents}) diff --git a/api/core/rag/index_processor/processor/qa_index_processor.py b/api/core/rag/index_processor/processor/qa_index_processor.py index 2054031643..3e3deb0180 100644 --- a/api/core/rag/index_processor/processor/qa_index_processor.py +++ b/api/core/rag/index_processor/processor/qa_index_processor.py @@ -21,6 +21,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.models.document import Document, QAStructureChunk +from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.tools.utils.text_processing_utils import remove_leading_symbols from libs import helper from models.dataset import Dataset @@ -47,7 +48,7 @@ class QAIndexProcessor(BaseIndexProcessor): raise ValueError("No process rule found.") if not process_rule.get("rules"): raise ValueError("No rules found in process rule.") - rules = Rule(**process_rule.get("rules")) + rules = Rule.model_validate(process_rule.get("rules")) splitter = self._get_splitter( processing_rule_mode=process_rule.get("mode"), max_tokens=rules.segmentation.max_tokens if rules.segmentation else 0, @@ -141,7 +142,7 @@ class QAIndexProcessor(BaseIndexProcessor): def retrieve( self, - retrieval_method: str, + retrieval_method: RetrievalMethod, query: str, dataset: Dataset, top_k: int, @@ -168,7 +169,7 @@ class QAIndexProcessor(BaseIndexProcessor): return docs def index(self, dataset: Dataset, document: DatasetDocument, chunks: Any): - qa_chunks = QAStructureChunk(**chunks) + qa_chunks = QAStructureChunk.model_validate(chunks) documents = [] for qa_chunk in qa_chunks.qa_chunks: metadata = { @@ -191,7 +192,7 @@ class QAIndexProcessor(BaseIndexProcessor): raise ValueError("Indexing technique must be high quality.") def format_preview(self, chunks: Any) -> Mapping[str, Any]: - qa_chunks = QAStructureChunk(**chunks) + qa_chunks = QAStructureChunk.model_validate(chunks) preview = [] for qa_chunk in qa_chunks.qa_chunks: preview.append({"question": qa_chunk.question, "answer": qa_chunk.answer}) diff --git a/api/core/rag/rerank/rerank_factory.py b/api/core/rag/rerank/rerank_factory.py index 1a3cf85736..524e83824c 100644 --- a/api/core/rag/rerank/rerank_factory.py +++ b/api/core/rag/rerank/rerank_factory.py @@ -8,9 +8,9 @@ class RerankRunnerFactory: @staticmethod def create_rerank_runner(runner_type: str, *args, **kwargs) -> BaseRerankRunner: match runner_type: - case RerankMode.RERANKING_MODEL.value: + case RerankMode.RERANKING_MODEL: return RerankModelRunner(*args, **kwargs) - case RerankMode.WEIGHTED_SCORE.value: + case RerankMode.WEIGHTED_SCORE: return WeightRerankRunner(*args, **kwargs) case _: raise ValueError(f"Unknown runner type: {runner_type}") diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index b08f80da49..45b19f25a0 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -61,7 +61,7 @@ from models.dataset import Document as DatasetDocument from services.external_knowledge_service import ExternalDatasetService default_retrieval_model: dict[str, Any] = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -72,6 +72,19 @@ default_retrieval_model: dict[str, Any] = { class DatasetRetrieval: def __init__(self, application_generate_entity=None): self.application_generate_entity = application_generate_entity + self._llm_usage = LLMUsage.empty_usage() + + @property + def llm_usage(self) -> LLMUsage: + return self._llm_usage.model_copy() + + def _record_usage(self, usage: LLMUsage | None) -> None: + if usage is None or usage.total_tokens <= 0: + return + if self._llm_usage.total_tokens == 0: + self._llm_usage = usage + else: + self._llm_usage = self._llm_usage.plus(usage) def retrieve( self, @@ -312,15 +325,18 @@ class DatasetRetrieval: ) tools.append(message_tool) dataset_id = None + router_usage = LLMUsage.empty_usage() if planning_strategy == PlanningStrategy.REACT_ROUTER: react_multi_dataset_router = ReactMultiDatasetRouter() - dataset_id = react_multi_dataset_router.invoke( + dataset_id, router_usage = react_multi_dataset_router.invoke( query, tools, model_config, model_instance, user_id, tenant_id ) elif planning_strategy == PlanningStrategy.ROUTER: function_call_router = FunctionCallMultiDatasetRouter() - dataset_id = function_call_router.invoke(query, tools, model_config, model_instance) + dataset_id, router_usage = function_call_router.invoke(query, tools, model_config, model_instance) + + self._record_usage(router_usage) if dataset_id: # get retrieval model config @@ -364,7 +380,7 @@ class DatasetRetrieval: top_k = retrieval_model_config["top_k"] # get retrieval method if dataset.indexing_technique == "economy": - retrieval_method = "keyword_search" + retrieval_method = RetrievalMethod.KEYWORD_SEARCH else: retrieval_method = retrieval_model_config["search_method"] # get reranking model @@ -623,7 +639,7 @@ class DatasetRetrieval: if dataset.indexing_technique == "economy": # use keyword table query documents = RetrievalService.retrieve( - retrieval_method="keyword_search", + retrieval_method=RetrievalMethod.KEYWORD_SEARCH, dataset_id=dataset.id, query=query, top_k=top_k, @@ -692,7 +708,7 @@ class DatasetRetrieval: if retrieve_config.retrieve_strategy == DatasetRetrieveConfigEntity.RetrieveStrategy.SINGLE: # get retrieval model config default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 2, @@ -983,7 +999,8 @@ class DatasetRetrieval: ) # handle invoke result - result_text, _ = self._handle_invoke_result(invoke_result=invoke_result) + result_text, usage = self._handle_invoke_result(invoke_result=invoke_result) + self._record_usage(usage) result_text_json = parse_and_check_json_markdown(result_text, []) automatic_metadata_filters = [] diff --git a/api/core/rag/retrieval/retrieval_methods.py b/api/core/rag/retrieval/retrieval_methods.py index c7c6e60c8d..c77a026351 100644 --- a/api/core/rag/retrieval/retrieval_methods.py +++ b/api/core/rag/retrieval/retrieval_methods.py @@ -1,7 +1,7 @@ -from enum import Enum +from enum import StrEnum -class RetrievalMethod(Enum): +class RetrievalMethod(StrEnum): SEMANTIC_SEARCH = "semantic_search" FULL_TEXT_SEARCH = "full_text_search" HYBRID_SEARCH = "hybrid_search" @@ -9,8 +9,8 @@ class RetrievalMethod(Enum): @staticmethod def is_support_semantic_search(retrieval_method: str) -> bool: - return retrieval_method in {RetrievalMethod.SEMANTIC_SEARCH.value, RetrievalMethod.HYBRID_SEARCH.value} + return retrieval_method in {RetrievalMethod.SEMANTIC_SEARCH, RetrievalMethod.HYBRID_SEARCH} @staticmethod def is_support_fulltext_search(retrieval_method: str) -> bool: - return retrieval_method in {RetrievalMethod.FULL_TEXT_SEARCH.value, RetrievalMethod.HYBRID_SEARCH.value} + return retrieval_method in {RetrievalMethod.FULL_TEXT_SEARCH, RetrievalMethod.HYBRID_SEARCH} diff --git a/api/core/rag/retrieval/router/multi_dataset_function_call_router.py b/api/core/rag/retrieval/router/multi_dataset_function_call_router.py index de59c6380e..5f3e1a8cae 100644 --- a/api/core/rag/retrieval/router/multi_dataset_function_call_router.py +++ b/api/core/rag/retrieval/router/multi_dataset_function_call_router.py @@ -2,7 +2,7 @@ from typing import Union from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.model_manager import ModelInstance -from core.model_runtime.entities.llm_entities import LLMResult +from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage from core.model_runtime.entities.message_entities import PromptMessageTool, SystemPromptMessage, UserPromptMessage @@ -13,15 +13,15 @@ class FunctionCallMultiDatasetRouter: dataset_tools: list[PromptMessageTool], model_config: ModelConfigWithCredentialsEntity, model_instance: ModelInstance, - ) -> Union[str, None]: + ) -> tuple[Union[str, None], LLMUsage]: """Given input, decided what to do. Returns: Action specifying what tool to use. """ if len(dataset_tools) == 0: - return None + return None, LLMUsage.empty_usage() elif len(dataset_tools) == 1: - return dataset_tools[0].name + return dataset_tools[0].name, LLMUsage.empty_usage() try: prompt_messages = [ @@ -34,9 +34,10 @@ class FunctionCallMultiDatasetRouter: stream=False, model_parameters={"temperature": 0.2, "top_p": 0.3, "max_tokens": 1500}, ) + usage = result.usage or LLMUsage.empty_usage() if result.message.tool_calls: # get retrieval model config - return result.message.tool_calls[0].function.name - return None + return result.message.tool_calls[0].function.name, usage + return None, usage except Exception: - return None + return None, LLMUsage.empty_usage() diff --git a/api/core/rag/retrieval/router/multi_dataset_react_route.py b/api/core/rag/retrieval/router/multi_dataset_react_route.py index 59d36229b3..8f3bec2704 100644 --- a/api/core/rag/retrieval/router/multi_dataset_react_route.py +++ b/api/core/rag/retrieval/router/multi_dataset_react_route.py @@ -58,15 +58,15 @@ class ReactMultiDatasetRouter: model_instance: ModelInstance, user_id: str, tenant_id: str, - ) -> Union[str, None]: + ) -> tuple[Union[str, None], LLMUsage]: """Given input, decided what to do. Returns: Action specifying what tool to use. """ if len(dataset_tools) == 0: - return None + return None, LLMUsage.empty_usage() elif len(dataset_tools) == 1: - return dataset_tools[0].name + return dataset_tools[0].name, LLMUsage.empty_usage() try: return self._react_invoke( @@ -78,7 +78,7 @@ class ReactMultiDatasetRouter: tenant_id=tenant_id, ) except Exception: - return None + return None, LLMUsage.empty_usage() def _react_invoke( self, @@ -91,7 +91,7 @@ class ReactMultiDatasetRouter: prefix: str = PREFIX, suffix: str = SUFFIX, format_instructions: str = FORMAT_INSTRUCTIONS, - ) -> Union[str, None]: + ) -> tuple[Union[str, None], LLMUsage]: prompt: Union[list[ChatModelMessage], CompletionModelPromptTemplate] if model_config.mode == "chat": prompt = self.create_chat_prompt( @@ -120,7 +120,7 @@ class ReactMultiDatasetRouter: memory=None, model_config=model_config, ) - result_text, _ = self._invoke_llm( + result_text, usage = self._invoke_llm( completion_param=model_config.parameters, model_instance=model_instance, prompt_messages=prompt_messages, @@ -131,8 +131,8 @@ class ReactMultiDatasetRouter: output_parser = StructuredChatOutputParser() react_decision = output_parser.parse(result_text) if isinstance(react_decision, ReactAction): - return react_decision.tool - return None + return react_decision.tool, usage + return None, usage def _invoke_llm( self, diff --git a/api/core/rag/splitter/fixed_text_splitter.py b/api/core/rag/splitter/fixed_text_splitter.py index 8356861242..801d2a2a52 100644 --- a/api/core/rag/splitter/fixed_text_splitter.py +++ b/api/core/rag/splitter/fixed_text_splitter.py @@ -2,6 +2,7 @@ from __future__ import annotations +import re from typing import Any from core.model_manager import ModelInstance @@ -52,7 +53,7 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter) """Create a new TextSplitter.""" super().__init__(**kwargs) self._fixed_separator = fixed_separator - self._separators = separators or ["\n\n", "\n", " ", ""] + self._separators = separators or ["\n\n", "\n", "。", ". ", " ", ""] def split_text(self, text: str) -> list[str]: """Split incoming text and return chunks.""" @@ -90,16 +91,19 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter) # Now that we have the separator, split the text if separator: if separator == " ": - splits = text.split() + splits = re.split(r" +", text) else: splits = text.split(separator) splits = [item + separator if i < len(splits) else item for i, item in enumerate(splits)] else: splits = list(text) - splits = [s for s in splits if (s not in {"", "\n"})] + if separator == "\n": + splits = [s for s in splits if s != ""] + else: + splits = [s for s in splits if (s not in {"", "\n"})] _good_splits = [] _good_splits_lengths = [] # cache the lengths of the splits - _separator = "" if self._keep_separator else separator + _separator = separator if self._keep_separator else "" s_lens = self._length_function(splits) if separator != "": for s, s_len in zip(splits, s_lens): diff --git a/api/core/repositories/celery_workflow_execution_repository.py b/api/core/repositories/celery_workflow_execution_repository.py index eda7b54d6a..c7f5942f5f 100644 --- a/api/core/repositories/celery_workflow_execution_repository.py +++ b/api/core/repositories/celery_workflow_execution_repository.py @@ -74,7 +74,7 @@ class CeleryWorkflowExecutionRepository(WorkflowExecutionRepository): tenant_id = extract_tenant_id(user) if not tenant_id: raise ValueError("User must have a tenant_id or current_tenant_id") - self._tenant_id = tenant_id # type: ignore[assignment] # We've already checked tenant_id is not None + self._tenant_id = tenant_id # Store app context self._app_id = app_id @@ -108,7 +108,7 @@ class CeleryWorkflowExecutionRepository(WorkflowExecutionRepository): execution_data = execution.model_dump() # Queue the save operation as a Celery task (fire and forget) - save_workflow_execution_task.delay( + save_workflow_execution_task.delay( # type: ignore execution_data=execution_data, tenant_id=self._tenant_id, app_id=self._app_id or "", diff --git a/api/core/repositories/celery_workflow_node_execution_repository.py b/api/core/repositories/celery_workflow_node_execution_repository.py index 21a0b7eefe..9b8e45b1eb 100644 --- a/api/core/repositories/celery_workflow_node_execution_repository.py +++ b/api/core/repositories/celery_workflow_node_execution_repository.py @@ -81,7 +81,7 @@ class CeleryWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository): tenant_id = extract_tenant_id(user) if not tenant_id: raise ValueError("User must have a tenant_id or current_tenant_id") - self._tenant_id = tenant_id # type: ignore[assignment] # We've already checked tenant_id is not None + self._tenant_id = tenant_id # Store app context self._app_id = app_id diff --git a/api/core/repositories/factory.py b/api/core/repositories/factory.py index 854c122331..02fcabab5d 100644 --- a/api/core/repositories/factory.py +++ b/api/core/repositories/factory.py @@ -60,7 +60,7 @@ class DifyCoreRepositoryFactory: try: repository_class = import_string(class_path) - return repository_class( # type: ignore[no-any-return] + return repository_class( session_factory=session_factory, user=user, app_id=app_id, @@ -96,7 +96,7 @@ class DifyCoreRepositoryFactory: try: repository_class = import_string(class_path) - return repository_class( # type: ignore[no-any-return] + return repository_class( session_factory=session_factory, user=user, app_id=app_id, diff --git a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py index 4399ec01cc..4436773d25 100644 --- a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py @@ -104,7 +104,6 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER # Initialize in-memory cache for node executions - # Key: node_execution_id, Value: WorkflowNodeExecution (DB model) self._node_execution_cache: dict[str, WorkflowNodeExecutionModel] = {} # Initialize FileService for handling offloaded data @@ -332,17 +331,10 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) Args: execution: The NodeExecution domain entity to persist """ - # NOTE: As per the implementation of `WorkflowCycleManager`, - # the `save` method is invoked multiple times during the node's execution lifecycle, including: - # - # - When the node starts execution - # - When the node retries execution - # - When the node completes execution (either successfully or with failure) - # - # Only the final invocation will have `inputs` and `outputs` populated. - # - # This simplifies the logic for saving offloaded variables but introduces a tight coupling - # between this module and `WorkflowCycleManager`. + # NOTE: The workflow engine triggers `save` multiple times for a single node execution: + # when the node starts, any time it retries, and once more when it reaches a terminal state. + # Only the final call contains the complete inputs and outputs payloads, so earlier invocations + # must tolerate missing data without attempting to offload variables. # Convert domain model to database model using tenant context and other attributes db_model = self._to_db_model(execution) diff --git a/api/core/tools/__base/tool.py b/api/core/tools/__base/tool.py index 6e0462c530..82616596f8 100644 --- a/api/core/tools/__base/tool.py +++ b/api/core/tools/__base/tool.py @@ -217,3 +217,16 @@ class Tool(ABC): return ToolInvokeMessage( type=ToolInvokeMessage.MessageType.JSON, message=ToolInvokeMessage.JsonMessage(json_object=object) ) + + def create_variable_message( + self, variable_name: str, variable_value: Any, stream: bool = False + ) -> ToolInvokeMessage: + """ + create a variable message + """ + return ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.VARIABLE, + message=ToolInvokeMessage.VariableMessage( + variable_name=variable_name, variable_value=variable_value, stream=stream + ), + ) diff --git a/api/core/tools/__base/tool_runtime.py b/api/core/tools/__base/tool_runtime.py index 3de0014c61..09bc817c01 100644 --- a/api/core/tools/__base/tool_runtime.py +++ b/api/core/tools/__base/tool_runtime.py @@ -1,7 +1,6 @@ from typing import Any -from openai import BaseModel -from pydantic import Field +from pydantic import BaseModel, Field from core.app.entities.app_invoke_entities import InvokeFrom from core.tools.entities.tool_entities import CredentialType, ToolInvokeFrom diff --git a/api/core/tools/builtin_tool/provider.py b/api/core/tools/builtin_tool/provider.py index 45fd16d684..a391136a5c 100644 --- a/api/core/tools/builtin_tool/provider.py +++ b/api/core/tools/builtin_tool/provider.py @@ -90,7 +90,7 @@ class BuiltinToolProviderController(ToolProviderController): tools.append( assistant_tool_class( provider=provider, - entity=ToolEntity(**tool), + entity=ToolEntity.model_validate(tool), runtime=ToolRuntime(tenant_id=""), ) ) @@ -111,7 +111,7 @@ class BuiltinToolProviderController(ToolProviderController): :return: the credentials schema """ - return self.get_credentials_schema_by_type(CredentialType.API_KEY.value) + return self.get_credentials_schema_by_type(CredentialType.API_KEY) def get_credentials_schema_by_type(self, credential_type: str) -> list[ProviderConfig]: """ @@ -122,7 +122,7 @@ class BuiltinToolProviderController(ToolProviderController): """ if credential_type == CredentialType.OAUTH2.value: return self.entity.oauth_schema.credentials_schema.copy() if self.entity.oauth_schema else [] - if credential_type == CredentialType.API_KEY.value: + if credential_type == CredentialType.API_KEY: return self.entity.credentials_schema.copy() if self.entity.credentials_schema else [] raise ValueError(f"Invalid credential type: {credential_type}") @@ -134,15 +134,15 @@ class BuiltinToolProviderController(ToolProviderController): """ return self.entity.oauth_schema.client_schema.copy() if self.entity.oauth_schema else [] - def get_supported_credential_types(self) -> list[str]: + def get_supported_credential_types(self) -> list[CredentialType]: """ returns the credential support type of the provider """ types = [] if self.entity.credentials_schema is not None and len(self.entity.credentials_schema) > 0: - types.append(CredentialType.API_KEY.value) + types.append(CredentialType.API_KEY) if self.entity.oauth_schema is not None and len(self.entity.oauth_schema.credentials_schema) > 0: - types.append(CredentialType.OAUTH2.value) + types.append(CredentialType.OAUTH2) return types def get_tools(self) -> list[BuiltinTool]: @@ -157,7 +157,7 @@ class BuiltinToolProviderController(ToolProviderController): """ returns the tool that the provider can provide """ - return next(filter(lambda x: x.entity.identity.name == tool_name, self.get_tools()), None) # type: ignore + return next(filter(lambda x: x.entity.identity.name == tool_name, self.get_tools()), None) @property def need_credentials(self) -> bool: diff --git a/api/core/tools/builtin_tool/providers/audio/tools/tts.py b/api/core/tools/builtin_tool/providers/audio/tools/tts.py index 8bc159bb85..5009f7ac21 100644 --- a/api/core/tools/builtin_tool/providers/audio/tools/tts.py +++ b/api/core/tools/builtin_tool/providers/audio/tools/tts.py @@ -43,7 +43,7 @@ class TTSTool(BuiltinTool): content_text=tool_parameters.get("text"), # type: ignore user=user_id, tenant_id=self.runtime.tenant_id, - voice=voice, # type: ignore + voice=voice, ) buffer = io.BytesIO() for chunk in tts: diff --git a/api/core/tools/builtin_tool/providers/time/tools/localtime_to_timestamp.py b/api/core/tools/builtin_tool/providers/time/tools/localtime_to_timestamp.py index 197b062e44..d0a41b940f 100644 --- a/api/core/tools/builtin_tool/providers/time/tools/localtime_to_timestamp.py +++ b/api/core/tools/builtin_tool/providers/time/tools/localtime_to_timestamp.py @@ -34,6 +34,7 @@ class LocaltimeToTimestampTool(BuiltinTool): yield self.create_text_message(f"{timestamp}") + # TODO: this method's type is messy @staticmethod def localtime_to_timestamp(localtime: str, time_format: str, local_tz=None) -> int | None: try: diff --git a/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py b/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py index babfa9bcd9..e23ae3b001 100644 --- a/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py +++ b/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py @@ -48,6 +48,6 @@ class TimezoneConversionTool(BuiltinTool): datetime_with_tz = input_timezone.localize(local_time) # timezone convert converted_datetime = datetime_with_tz.astimezone(output_timezone) - return converted_datetime.strftime(format=time_format) # type: ignore + return converted_datetime.strftime(time_format) except Exception as e: raise ToolInvokeError(str(e)) diff --git a/api/core/tools/custom_tool/tool.py b/api/core/tools/custom_tool/tool.py index 34d0f5c622..54c266ffcc 100644 --- a/api/core/tools/custom_tool/tool.py +++ b/api/core/tools/custom_tool/tool.py @@ -290,6 +290,7 @@ class ApiTool(Tool): method_lc ]( # https://discuss.python.org/t/type-inference-for-function-return-types/42926 url, + max_retries=0, params=params, headers=headers, cookies=cookies, @@ -394,11 +395,13 @@ class ApiTool(Tool): parsed_response = self.validate_and_parse_response(response) # assemble invoke message based on response type - if parsed_response.is_json and isinstance(parsed_response.content, dict): - yield self.create_json_message(parsed_response.content) + if parsed_response.is_json: + if isinstance(parsed_response.content, dict): + yield self.create_json_message(parsed_response.content) - # FIXES: https://github.com/langgenius/dify/pull/23456#issuecomment-3182413088 - # We need never break the original flows + # The yield below must be preserved to keep backward compatibility. + # + # ref: https://github.com/langgenius/dify/pull/23456#issuecomment-3182413088 yield self.create_text_message(response.text) else: # Convert to string if needed and create text message diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index 00c4ab9dd7..8f7d1101cb 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -4,6 +4,7 @@ from typing import Any, Literal from pydantic import BaseModel, Field, field_validator +from core.entities.mcp_provider import MCPAuthentication, MCPConfiguration from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.__base.tool import ToolParameter from core.tools.entities.common_entities import I18nObject @@ -44,10 +45,14 @@ class ToolProviderApiEntity(BaseModel): server_url: str | None = Field(default="", description="The server url of the tool") updated_at: int = Field(default_factory=lambda: int(datetime.now().timestamp())) server_identifier: str | None = Field(default="", description="The server identifier of the MCP tool") - timeout: float | None = Field(default=30.0, description="The timeout of the MCP tool") - sse_read_timeout: float | None = Field(default=300.0, description="The SSE read timeout of the MCP tool") + masked_headers: dict[str, str] | None = Field(default=None, description="The masked headers of the MCP tool") original_headers: dict[str, str] | None = Field(default=None, description="The original headers of the MCP tool") + authentication: MCPAuthentication | None = Field(default=None, description="The OAuth config of the MCP tool") + is_dynamic_registration: bool = Field(default=True, description="Whether the MCP tool is dynamically registered") + configuration: MCPConfiguration | None = Field( + default=None, description="The timeout and sse_read_timeout of the MCP tool" + ) @field_validator("tools", mode="before") @classmethod @@ -61,7 +66,7 @@ class ToolProviderApiEntity(BaseModel): for tool in tools: if tool.get("parameters"): for parameter in tool.get("parameters"): - if parameter.get("type") == ToolParameter.ToolParameterType.SYSTEM_FILES.value: + if parameter.get("type") == ToolParameter.ToolParameterType.SYSTEM_FILES: parameter["type"] = "files" if parameter.get("input_schema") is None: parameter.pop("input_schema", None) @@ -70,8 +75,15 @@ class ToolProviderApiEntity(BaseModel): if self.type == ToolProviderType.MCP: optional_fields.update(self.optional_field("updated_at", self.updated_at)) optional_fields.update(self.optional_field("server_identifier", self.server_identifier)) - optional_fields.update(self.optional_field("timeout", self.timeout)) - optional_fields.update(self.optional_field("sse_read_timeout", self.sse_read_timeout)) + optional_fields.update( + self.optional_field( + "configuration", self.configuration.model_dump() if self.configuration else MCPConfiguration() + ) + ) + optional_fields.update( + self.optional_field("authentication", self.authentication.model_dump() if self.authentication else None) + ) + optional_fields.update(self.optional_field("is_dynamic_registration", self.is_dynamic_registration)) optional_fields.update(self.optional_field("masked_headers", self.masked_headers)) optional_fields.update(self.optional_field("original_headers", self.original_headers)) return { @@ -110,7 +122,9 @@ class ToolProviderCredentialApiEntity(BaseModel): class ToolProviderCredentialInfoApiEntity(BaseModel): - supported_credential_types: list[str] = Field(description="The supported credential types of the provider") + supported_credential_types: list[CredentialType] = Field( + description="The supported credential types of the provider" + ) is_oauth_custom_client_enabled: bool = Field( default=False, description="Whether the OAuth custom client is enabled for the provider" ) diff --git a/api/core/tools/entities/common_entities.py b/api/core/tools/entities/common_entities.py index 2c6d9c1964..21d310bbb9 100644 --- a/api/core/tools/entities/common_entities.py +++ b/api/core/tools/entities/common_entities.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, model_validator class I18nObject(BaseModel): @@ -11,11 +11,12 @@ class I18nObject(BaseModel): pt_BR: str | None = Field(default=None) ja_JP: str | None = Field(default=None) - def __init__(self, **data): - super().__init__(**data) + @model_validator(mode="after") + def _populate_missing_locales(self): self.zh_Hans = self.zh_Hans or self.en_US self.pt_BR = self.pt_BR or self.en_US self.ja_JP = self.ja_JP or self.en_US + return self def to_dict(self): return {"zh_Hans": self.zh_Hans, "en_US": self.en_US, "pt_BR": self.pt_BR, "ja_JP": self.ja_JP} diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index a59b54216f..15a4f0aafd 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -113,7 +113,7 @@ class ApiProviderAuthType(StrEnum): # normalize & tiny alias for backward compatibility v = (value or "").strip().lower() if v == "api_key": - v = cls.API_KEY_HEADER.value + v = cls.API_KEY_HEADER for mode in cls: if mode.value == v: @@ -189,6 +189,11 @@ class ToolInvokeMessage(BaseModel): data: Mapping[str, Any] = Field(..., description="Detailed log data") metadata: Mapping[str, Any] = Field(default_factory=dict, description="The metadata of the log") + @field_validator("metadata", mode="before") + @classmethod + def _normalize_metadata(cls, value: Mapping[str, Any] | None) -> Mapping[str, Any]: + return value or {} + class RetrieverResourceMessage(BaseModel): retriever_resources: list[RetrievalSourceMetadata] = Field(..., description="retriever resources") context: str = Field(..., description="context") @@ -376,6 +381,11 @@ class ToolEntity(BaseModel): def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]: return v or [] + @field_validator("output_schema", mode="before") + @classmethod + def _normalize_output_schema(cls, value: Mapping[str, object] | None) -> Mapping[str, object]: + return value or {} + class OAuthSchema(BaseModel): client_schema: list[ProviderConfig] = Field( diff --git a/api/core/tools/mcp_tool/provider.py b/api/core/tools/mcp_tool/provider.py index 5b04f0edbe..557211c8c8 100644 --- a/api/core/tools/mcp_tool/provider.py +++ b/api/core/tools/mcp_tool/provider.py @@ -1,6 +1,6 @@ -import json from typing import Any, Self +from core.entities.mcp_provider import MCPProviderEntity from core.mcp.types import Tool as RemoteMCPTool from core.tools.__base.tool_provider import ToolProviderController from core.tools.__base.tool_runtime import ToolRuntime @@ -52,18 +52,25 @@ class MCPToolProviderController(ToolProviderController): """ from db provider """ - tools = [] - tools_data = json.loads(db_provider.tools) - remote_mcp_tools = [RemoteMCPTool(**tool) for tool in tools_data] - user = db_provider.load_user() + # Convert to entity first + provider_entity = db_provider.to_entity() + return cls.from_entity(provider_entity) + + @classmethod + def from_entity(cls, entity: MCPProviderEntity) -> Self: + """ + create a MCPToolProviderController from a MCPProviderEntity + """ + remote_mcp_tools = [RemoteMCPTool(**tool) for tool in entity.tools] + tools = [ ToolEntity( identity=ToolIdentity( - author=user.name if user else "Anonymous", + author="Anonymous", # Tool level author is not stored name=remote_mcp_tool.name, label=I18nObject(en_US=remote_mcp_tool.name, zh_Hans=remote_mcp_tool.name), - provider=db_provider.server_identifier, - icon=db_provider.icon, + provider=entity.provider_id, + icon=entity.icon if isinstance(entity.icon, str) else "", ), parameters=ToolTransformService.convert_mcp_schema_to_parameter(remote_mcp_tool.inputSchema), description=ToolDescription( @@ -72,30 +79,32 @@ class MCPToolProviderController(ToolProviderController): ), llm=remote_mcp_tool.description or "", ), + output_schema=remote_mcp_tool.outputSchema or {}, has_runtime_parameters=len(remote_mcp_tool.inputSchema) > 0, ) for remote_mcp_tool in remote_mcp_tools ] - + if not entity.icon: + raise ValueError("Database provider icon is required") return cls( entity=ToolProviderEntityWithPlugin( identity=ToolProviderIdentity( - author=user.name if user else "Anonymous", - name=db_provider.name, - label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name), + author="Anonymous", # Provider level author is not stored in entity + name=entity.name, + label=I18nObject(en_US=entity.name, zh_Hans=entity.name), description=I18nObject(en_US="", zh_Hans=""), - icon=db_provider.icon, + icon=entity.icon if isinstance(entity.icon, str) else "", ), plugin_id=None, credentials_schema=[], tools=tools, ), - provider_id=db_provider.server_identifier or "", - tenant_id=db_provider.tenant_id or "", - server_url=db_provider.decrypted_server_url, - headers=db_provider.decrypted_headers or {}, - timeout=db_provider.timeout, - sse_read_timeout=db_provider.sse_read_timeout, + provider_id=entity.provider_id, + tenant_id=entity.tenant_id, + server_url=entity.server_url, + headers=entity.headers, + timeout=entity.timeout, + sse_read_timeout=entity.sse_read_timeout, ) def _validate_credentials(self, user_id: str, credentials: dict[str, Any]): @@ -104,7 +113,7 @@ class MCPToolProviderController(ToolProviderController): """ pass - def get_tool(self, tool_name: str) -> MCPTool: # type: ignore + def get_tool(self, tool_name: str) -> MCPTool: """ return tool with given name """ @@ -127,7 +136,7 @@ class MCPToolProviderController(ToolProviderController): sse_read_timeout=self.sse_read_timeout, ) - def get_tools(self) -> list[MCPTool]: # type: ignore + def get_tools(self) -> list[MCPTool]: """ get all tools """ diff --git a/api/core/tools/mcp_tool/tool.py b/api/core/tools/mcp_tool/tool.py index 976d4dc942..a476859f29 100644 --- a/api/core/tools/mcp_tool/tool.py +++ b/api/core/tools/mcp_tool/tool.py @@ -3,12 +3,13 @@ import json from collections.abc import Generator from typing import Any -from core.mcp.error import MCPAuthError, MCPConnectionError -from core.mcp.mcp_client import MCPClient -from core.mcp.types import ImageContent, TextContent +from core.mcp.auth_client import MCPClientWithAuthRetry +from core.mcp.error import MCPConnectionError +from core.mcp.types import CallToolResult, ImageContent, TextContent from core.tools.__base.tool import Tool from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolProviderType +from core.tools.errors import ToolInvokeError class MCPTool(Tool): @@ -44,40 +45,32 @@ class MCPTool(Tool): app_id: str | None = None, message_id: str | None = None, ) -> Generator[ToolInvokeMessage, None, None]: - from core.tools.errors import ToolInvokeError - - try: - with MCPClient( - self.server_url, - self.provider_id, - self.tenant_id, - authed=True, - headers=self.headers, - timeout=self.timeout, - sse_read_timeout=self.sse_read_timeout, - ) as mcp_client: - tool_parameters = self._handle_none_parameter(tool_parameters) - result = mcp_client.invoke_tool(tool_name=self.entity.identity.name, tool_args=tool_parameters) - except MCPAuthError as e: - raise ToolInvokeError("Please auth the tool first") from e - except MCPConnectionError as e: - raise ToolInvokeError(f"Failed to connect to MCP server: {e}") from e - except Exception as e: - raise ToolInvokeError(f"Failed to invoke tool: {e}") from e - + result = self.invoke_remote_mcp_tool(tool_parameters) + # handle dify tool output for content in result.content: if isinstance(content, TextContent): yield from self._process_text_content(content) elif isinstance(content, ImageContent): yield self._process_image_content(content) + # handle MCP structured output + if self.entity.output_schema and result.structuredContent: + for k, v in result.structuredContent.items(): + yield self.create_variable_message(k, v) def _process_text_content(self, content: TextContent) -> Generator[ToolInvokeMessage, None, None]: """Process text content and yield appropriate messages.""" - try: - content_json = json.loads(content.text) - yield from self._process_json_content(content_json) - except json.JSONDecodeError: - yield self.create_text_message(content.text) + # Check if content looks like JSON before attempting to parse + text = content.text.strip() + if text and text[0] in ("{", "[") and text[-1] in ("}", "]"): + try: + content_json = json.loads(text) + yield from self._process_json_content(content_json) + return + except json.JSONDecodeError: + pass + + # If not JSON or parsing failed, treat as plain text + yield self.create_text_message(content.text) def _process_json_content(self, content_json: Any) -> Generator[ToolInvokeMessage, None, None]: """Process JSON content based on its type.""" @@ -126,3 +119,44 @@ class MCPTool(Tool): for key, value in parameter.items() if value is not None and not (isinstance(value, str) and value.strip() == "") } + + def invoke_remote_mcp_tool(self, tool_parameters: dict[str, Any]) -> CallToolResult: + headers = self.headers.copy() if self.headers else {} + tool_parameters = self._handle_none_parameter(tool_parameters) + + from sqlalchemy.orm import Session + + from extensions.ext_database import db + from services.tools.mcp_tools_manage_service import MCPToolManageService + + # Step 1: Load provider entity and credentials in a short-lived session + # This minimizes database connection hold time + with Session(db.engine, expire_on_commit=False) as session: + mcp_service = MCPToolManageService(session=session) + provider_entity = mcp_service.get_provider_entity(self.provider_id, self.tenant_id, by_server_id=True) + + # Decrypt and prepare all credentials before closing session + server_url = provider_entity.decrypt_server_url() + headers = provider_entity.decrypt_headers() + + # Try to get existing token and add to headers + if not headers: + tokens = provider_entity.retrieve_tokens() + if tokens and tokens.access_token: + headers["Authorization"] = f"{tokens.token_type.capitalize()} {tokens.access_token}" + + # Step 2: Session is now closed, perform network operations without holding database connection + # MCPClientWithAuthRetry will create a new session lazily only if auth retry is needed + try: + with MCPClientWithAuthRetry( + server_url=server_url, + headers=headers, + timeout=self.timeout, + sse_read_timeout=self.sse_read_timeout, + provider_entity=provider_entity, + ) as mcp_client: + return mcp_client.invoke_tool(tool_name=self.entity.identity.name, tool_args=tool_parameters) + except MCPConnectionError as e: + raise ToolInvokeError(f"Failed to connect to MCP server: {e}") from e + except Exception as e: + raise ToolInvokeError(f"Failed to invoke tool: {e}") from e diff --git a/api/core/tools/tool_label_manager.py b/api/core/tools/tool_label_manager.py index 39646b7fc8..90d5a647e9 100644 --- a/api/core/tools/tool_label_manager.py +++ b/api/core/tools/tool_label_manager.py @@ -26,7 +26,7 @@ class ToolLabelManager: labels = cls.filter_tool_labels(labels) if isinstance(controller, ApiToolProviderController | WorkflowToolProviderController): - provider_id = controller.provider_id # ty: ignore [unresolved-attribute] + provider_id = controller.provider_id else: raise ValueError("Unsupported tool type") @@ -51,7 +51,7 @@ class ToolLabelManager: Get tool labels """ if isinstance(controller, ApiToolProviderController | WorkflowToolProviderController): - provider_id = controller.provider_id # ty: ignore [unresolved-attribute] + provider_id = controller.provider_id elif isinstance(controller, BuiltinToolProviderController): return controller.tool_labels else: @@ -85,7 +85,7 @@ class ToolLabelManager: provider_ids = [] for controller in tool_providers: assert isinstance(controller, ApiToolProviderController | WorkflowToolProviderController) - provider_ids.append(controller.provider_id) # ty: ignore [unresolved-attribute] + provider_ids.append(controller.provider_id) labels = db.session.scalars(select(ToolLabelBinding).where(ToolLabelBinding.tool_id.in_(provider_ids))).all() diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 9e5f5a7c23..ff7dcc0e55 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -14,17 +14,32 @@ from sqlalchemy.orm import Session from yarl import URL import contexts +from core.helper.provider_cache import ToolProviderCredentialsCache +from core.plugin.impl.tool import PluginToolManager +from core.tools.__base.tool_provider import ToolProviderController +from core.tools.__base.tool_runtime import ToolRuntime +from core.tools.mcp_tool.provider import MCPToolProviderController +from core.tools.mcp_tool.tool import MCPTool +from core.tools.plugin_tool.provider import PluginToolProviderController +from core.tools.plugin_tool.tool import PluginTool +from core.tools.utils.uuid_utils import is_valid_uuid +from core.tools.workflow_as_tool.provider import WorkflowToolProviderController +from core.workflow.runtime.variable_pool import VariablePool +from extensions.ext_database import db +from models.provider_ids import ToolProviderID +from services.enterprise.plugin_manager_service import PluginCredentialType +from services.tools.mcp_tools_manage_service import MCPToolManageService + +if TYPE_CHECKING: + from core.workflow.nodes.tool.entities import ToolEntity + from configs import dify_config from core.agent.entities import AgentToolEntity from core.app.entities.app_invoke_entities import InvokeFrom from core.helper.module_import_helper import load_single_subclass_from_source from core.helper.position_helper import is_filtered -from core.helper.provider_cache import ToolProviderCredentialsCache from core.model_runtime.utils.encoders import jsonable_encoder -from core.plugin.impl.tool import PluginToolManager from core.tools.__base.tool import Tool -from core.tools.__base.tool_provider import ToolProviderController -from core.tools.__base.tool_runtime import ToolRuntime from core.tools.builtin_tool.provider import BuiltinToolProviderController from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort from core.tools.builtin_tool.tool import BuiltinTool @@ -40,26 +55,16 @@ from core.tools.entities.tool_entities import ( ToolProviderType, ) from core.tools.errors import ToolProviderNotFoundError -from core.tools.mcp_tool.provider import MCPToolProviderController -from core.tools.mcp_tool.tool import MCPTool -from core.tools.plugin_tool.provider import PluginToolProviderController -from core.tools.plugin_tool.tool import PluginTool from core.tools.tool_label_manager import ToolLabelManager from core.tools.utils.configuration import ToolParameterConfigurationManager from core.tools.utils.encryption import create_provider_encrypter, create_tool_provider_encrypter -from core.tools.utils.uuid_utils import is_valid_uuid -from core.tools.workflow_as_tool.provider import WorkflowToolProviderController from core.tools.workflow_as_tool.tool import WorkflowTool -from extensions.ext_database import db -from models.provider_ids import ToolProviderID -from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider -from services.enterprise.plugin_manager_service import PluginCredentialType -from services.tools.mcp_tools_manage_service import MCPToolManageService +from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider from services.tools.tools_transform_service import ToolTransformService if TYPE_CHECKING: - from core.workflow.entities import VariablePool from core.workflow.nodes.tool.entities import ToolEntity + from core.workflow.runtime import VariablePool logger = logging.getLogger(__name__) @@ -326,7 +331,8 @@ class ToolManager: workflow_provider_stmt = select(WorkflowToolProvider).where( WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id ) - workflow_provider = db.session.scalar(workflow_provider_stmt) + with Session(db.engine, expire_on_commit=False) as session, session.begin(): + workflow_provider = session.scalar(workflow_provider_stmt) if workflow_provider is None: raise ToolProviderNotFoundError(f"workflow provider {provider_id} not found") @@ -718,7 +724,9 @@ class ToolManager: ) result_providers[f"workflow_provider.{user_provider.name}"] = user_provider if "mcp" in filters: - mcp_providers = MCPToolManageService.retrieve_mcp_tools(tenant_id, for_list=True) + with Session(db.engine) as session: + mcp_service = MCPToolManageService(session=session) + mcp_providers = mcp_service.list_providers(tenant_id=tenant_id, for_list=True) for mcp_provider in mcp_providers: result_providers[f"mcp_provider.{mcp_provider.name}"] = mcp_provider @@ -773,17 +781,12 @@ class ToolManager: :return: the provider controller, the credentials """ - provider: MCPToolProvider | None = ( - db.session.query(MCPToolProvider) - .where( - MCPToolProvider.server_identifier == provider_id, - MCPToolProvider.tenant_id == tenant_id, - ) - .first() - ) - - if provider is None: - raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found") + with Session(db.engine) as session: + mcp_service = MCPToolManageService(session=session) + try: + provider = mcp_service.get_provider(server_identifier=provider_id, tenant_id=tenant_id) + except ValueError: + raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found") controller = MCPToolProviderController.from_db(provider) @@ -921,16 +924,15 @@ class ToolManager: @classmethod def generate_mcp_tool_icon_url(cls, tenant_id: str, provider_id: str) -> Mapping[str, str] | str: try: - mcp_provider: MCPToolProvider | None = ( - db.session.query(MCPToolProvider) - .where(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == provider_id) - .first() - ) - - if mcp_provider is None: - raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found") - - return mcp_provider.provider_icon + with Session(db.engine) as session: + mcp_service = MCPToolManageService(session=session) + try: + mcp_provider = mcp_service.get_provider_entity( + provider_id=provider_id, tenant_id=tenant_id, by_server_id=True + ) + return mcp_provider.provider_icon + except ValueError: + raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found") except Exception: return {"background": "#252525", "content": "\ud83d\ude01"} @@ -1008,7 +1010,7 @@ class ToolManager: config = tool_configurations.get(parameter.name, {}) if not (config and isinstance(config, dict) and config.get("value") is not None): continue - tool_input = ToolNodeData.ToolInput(**tool_configurations.get(parameter.name, {})) + tool_input = ToolNodeData.ToolInput.model_validate(tool_configurations.get(parameter.name, {})) if tool_input.type == "variable": variable = variable_pool.get(tool_input.value) if variable is None: diff --git a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py index 75c0c6738e..20e10be075 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py @@ -18,7 +18,7 @@ from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment default_retrieval_model: dict[str, Any] = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 2, @@ -126,7 +126,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool): data_source_type=document.data_source_type, segment_id=segment.id, retriever_from=self.retriever_from, - score=document_score_list.get(segment.index_node_id, None), + score=document_score_list.get(segment.index_node_id), doc_metadata=document.doc_metadata, ) @@ -172,7 +172,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool): if dataset.indexing_technique == "economy": # use keyword table query documents = RetrievalService.retrieve( - retrieval_method="keyword_search", + retrieval_method=RetrievalMethod.KEYWORD_SEARCH, dataset_id=dataset.id, query=query, top_k=retrieval_model.get("top_k") or 4, diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py index ac2967d0c1..dd0b4bedcf 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py @@ -18,6 +18,10 @@ class DatasetRetrieverBaseTool(BaseModel, ABC): retriever_from: str model_config = ConfigDict(arbitrary_types_allowed=True) + def run(self, query: str) -> str: + """Use the tool.""" + return self._run(query) + @abstractmethod def _run(self, query: str) -> str: """Use the tool. diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py index 0e2237befd..f96510fb45 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py @@ -17,7 +17,7 @@ from models.dataset import Document as DatasetDocument from services.external_knowledge_service import ExternalDatasetService default_retrieval_model: dict[str, Any] = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "reranking_mode": "reranking_model", @@ -130,7 +130,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): if dataset.indexing_technique == "economy": # use keyword table query documents = RetrievalService.retrieve( - retrieval_method="keyword_search", + retrieval_method=RetrievalMethod.KEYWORD_SEARCH, dataset_id=dataset.id, query=query, top_k=self.top_k, @@ -193,18 +193,18 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): DatasetDocument.enabled == True, DatasetDocument.archived == False, ) - document = db.session.scalar(dataset_document_stmt) # type: ignore + document = db.session.scalar(dataset_document_stmt) if dataset and document: source = RetrievalSourceMetadata( dataset_id=dataset.id, dataset_name=dataset.name, - document_id=document.id, # type: ignore - document_name=document.name, # type: ignore - data_source_type=document.data_source_type, # type: ignore + document_id=document.id, + document_name=document.name, + data_source_type=document.data_source_type, segment_id=segment.id, retriever_from=self.retriever_from, score=record.score or 0.0, - doc_metadata=document.doc_metadata, # type: ignore + doc_metadata=document.doc_metadata, ) if self.retriever_from == "dev": diff --git a/api/core/tools/utils/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever_tool.py index a62d419243..fca6e6f1c7 100644 --- a/api/core/tools/utils/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever_tool.py @@ -124,7 +124,7 @@ class DatasetRetrieverTool(Tool): yield self.create_text_message(text="please input query") else: # invoke dataset retriever tool - result = self.retrieval_tool._run(query=query) + result = self.retrieval_tool.run(query=query) yield self.create_text_message(text=result) def validate_credentials( diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index 0851a54338..ca2aa39861 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -12,7 +12,7 @@ from core.file import File, FileTransferMethod, FileType from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool_file_manager import ToolFileManager from libs.login import current_user -from models.account import Account +from models import Account logger = logging.getLogger(__name__) diff --git a/api/core/tools/utils/model_invocation_utils.py b/api/core/tools/utils/model_invocation_utils.py index 526f5c8b9a..b4bae08a9b 100644 --- a/api/core/tools/utils/model_invocation_utils.py +++ b/api/core/tools/utils/model_invocation_utils.py @@ -5,6 +5,7 @@ Therefore, a model manager is needed to list/invoke/validate models. """ import json +from decimal import Decimal from typing import cast from core.model_manager import ModelManager @@ -118,10 +119,10 @@ class ModelInvocationUtils: model_response="", prompt_tokens=prompt_tokens, answer_tokens=0, - answer_unit_price=0, - answer_price_unit=0, + answer_unit_price=Decimal(), + answer_price_unit=Decimal(), provider_response_latency=0, - total_price=0, + total_price=Decimal(), currency="USD", ) @@ -152,7 +153,7 @@ class ModelInvocationUtils: raise InvokeModelError(f"Invoke error: {e}") # update tool model invoke - tool_model_invoke.model_response = response.message.content + tool_model_invoke.model_response = str(response.message.content) if response.usage: tool_model_invoke.answer_tokens = response.usage.completion_tokens tool_model_invoke.answer_unit_price = response.usage.completion_unit_price diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index 2e306db6c7..6eabde3991 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -2,9 +2,10 @@ import re from json import dumps as json_dumps from json import loads as json_loads from json.decoder import JSONDecodeError +from typing import Any +import httpx from flask import request -from requests import get from yaml import YAMLError, safe_load from core.tools.entities.common_entities import I18nObject @@ -61,6 +62,11 @@ class ApiBasedToolSchemaParser: root = root[ref] interface["operation"]["parameters"][i] = root for parameter in interface["operation"]["parameters"]: + # Handle complex type defaults that are not supported by PluginParameter + default_value = None + if "schema" in parameter and "default" in parameter["schema"]: + default_value = ApiBasedToolSchemaParser._sanitize_default_value(parameter["schema"]["default"]) + tool_parameter = ToolParameter( name=parameter["name"], label=I18nObject(en_US=parameter["name"], zh_Hans=parameter["name"]), @@ -71,9 +77,7 @@ class ApiBasedToolSchemaParser: required=parameter.get("required", False), form=ToolParameter.ToolParameterForm.LLM, llm_description=parameter.get("description"), - default=parameter["schema"]["default"] - if "schema" in parameter and "default" in parameter["schema"] - else None, + default=default_value, placeholder=I18nObject( en_US=parameter.get("description", ""), zh_Hans=parameter.get("description", "") ), @@ -127,34 +131,38 @@ class ApiBasedToolSchemaParser: if "allOf" in prop_dict: del prop_dict["allOf"] - # parse body parameters - if "schema" in interface["operation"]["requestBody"]["content"][content_type]: - body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"] - required = body_schema.get("required", []) - properties = body_schema.get("properties", {}) - for name, property in properties.items(): - tool = ToolParameter( - name=name, - label=I18nObject(en_US=name, zh_Hans=name), - human_description=I18nObject( - en_US=property.get("description", ""), zh_Hans=property.get("description", "") - ), - type=ToolParameter.ToolParameterType.STRING, - required=name in required, - form=ToolParameter.ToolParameterForm.LLM, - llm_description=property.get("description", ""), - default=property.get("default", None), - placeholder=I18nObject( - en_US=property.get("description", ""), zh_Hans=property.get("description", "") - ), - ) + # parse body parameters + if "schema" in interface["operation"]["requestBody"]["content"][content_type]: + body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"] + required = body_schema.get("required", []) + properties = body_schema.get("properties", {}) + for name, property in properties.items(): + # Handle complex type defaults that are not supported by PluginParameter + default_value = ApiBasedToolSchemaParser._sanitize_default_value( + property.get("default", None) + ) - # check if there is a type - typ = ApiBasedToolSchemaParser._get_tool_parameter_type(property) - if typ: - tool.type = typ + tool = ToolParameter( + name=name, + label=I18nObject(en_US=name, zh_Hans=name), + human_description=I18nObject( + en_US=property.get("description", ""), zh_Hans=property.get("description", "") + ), + type=ToolParameter.ToolParameterType.STRING, + required=name in required, + form=ToolParameter.ToolParameterForm.LLM, + llm_description=property.get("description", ""), + default=default_value, + placeholder=I18nObject( + en_US=property.get("description", ""), zh_Hans=property.get("description", "") + ), + ) + # check if there is a type + typ = ApiBasedToolSchemaParser._get_tool_parameter_type(property) + if typ: + tool.type = typ - parameters.append(tool) + parameters.append(tool) # check if parameters is duplicated parameters_count = {} @@ -196,6 +204,22 @@ class ApiBasedToolSchemaParser: return bundles + @staticmethod + def _sanitize_default_value(value): + """ + Sanitize default values for PluginParameter compatibility. + Complex types (list, dict) are converted to None to avoid validation errors. + + Args: + value: The default value from OpenAPI schema + + Returns: + None for complex types (list, dict), otherwise the original value + """ + if isinstance(value, (list, dict)): + return None + return value + @staticmethod def _get_tool_parameter_type(parameter: dict) -> ToolParameter.ToolParameterType | None: parameter = parameter or {} @@ -216,7 +240,11 @@ class ApiBasedToolSchemaParser: return ToolParameter.ToolParameterType.STRING elif typ == "array": items = parameter.get("items") or parameter.get("schema", {}).get("items") - return ToolParameter.ToolParameterType.FILES if items and items.get("format") == "binary" else None + if items and items.get("format") == "binary": + return ToolParameter.ToolParameterType.FILES + else: + # For regular arrays, return ARRAY type instead of None + return ToolParameter.ToolParameterType.ARRAY else: return None @@ -241,7 +269,9 @@ class ApiBasedToolSchemaParser: return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(openapi, extra_info=extra_info, warning=warning) @staticmethod - def parse_swagger_to_openapi(swagger: dict, extra_info: dict | None = None, warning: dict | None = None): + def parse_swagger_to_openapi( + swagger: dict, extra_info: dict | None = None, warning: dict | None = None + ) -> dict[str, Any]: warning = warning or {} """ parse swagger to openapi @@ -257,7 +287,7 @@ class ApiBasedToolSchemaParser: if len(servers) == 0: raise ToolApiSchemaError("No server found in the swagger yaml.") - openapi = { + converted_openapi: dict[str, Any] = { "openapi": "3.0.0", "info": { "title": info.get("title", "Swagger"), @@ -275,7 +305,7 @@ class ApiBasedToolSchemaParser: # convert paths for path, path_item in swagger["paths"].items(): - openapi["paths"][path] = {} + converted_openapi["paths"][path] = {} for method, operation in path_item.items(): if "operationId" not in operation: raise ToolApiSchemaError(f"No operationId found in operation {method} {path}.") @@ -286,7 +316,7 @@ class ApiBasedToolSchemaParser: if warning is not None: warning["missing_summary"] = f"No summary or description found in operation {method} {path}." - openapi["paths"][path][method] = { + converted_openapi["paths"][path][method] = { "operationId": operation["operationId"], "summary": operation.get("summary", ""), "description": operation.get("description", ""), @@ -295,13 +325,14 @@ class ApiBasedToolSchemaParser: } if "requestBody" in operation: - openapi["paths"][path][method]["requestBody"] = operation["requestBody"] + converted_openapi["paths"][path][method]["requestBody"] = operation["requestBody"] # convert definitions - for name, definition in swagger["definitions"].items(): - openapi["components"]["schemas"][name] = definition + if "definitions" in swagger: + for name, definition in swagger["definitions"].items(): + converted_openapi["components"]["schemas"][name] = definition - return openapi + return converted_openapi @staticmethod def parse_openai_plugin_json_to_tool_bundle( @@ -330,15 +361,20 @@ class ApiBasedToolSchemaParser: raise ToolNotSupportedError("Only openapi is supported now.") # get openapi yaml - response = get(api_url, headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "}, timeout=5) - - if response.status_code != 200: - raise ToolProviderNotFoundError("cannot get openapi yaml from url.") - - return ApiBasedToolSchemaParser.parse_openapi_yaml_to_tool_bundle( - response.text, extra_info=extra_info, warning=warning + response = httpx.get( + api_url, headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "}, timeout=5 ) + try: + if response.status_code != 200: + raise ToolProviderNotFoundError("cannot get openapi yaml from url.") + + return ApiBasedToolSchemaParser.parse_openapi_yaml_to_tool_bundle( + response.text, extra_info=extra_info, warning=warning + ) + finally: + response.close() + @staticmethod def auto_parse_to_tool_bundle( content: str, extra_info: dict | None = None, warning: dict | None = None @@ -384,7 +420,7 @@ class ApiBasedToolSchemaParser: openapi = ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle( loaded_content, extra_info=extra_info, warning=warning ) - schema_type = ApiProviderSchemaType.OPENAPI.value + schema_type = ApiProviderSchemaType.OPENAPI return openapi, schema_type except ToolApiSchemaError as e: openapi_error = e @@ -394,7 +430,7 @@ class ApiBasedToolSchemaParser: converted_swagger = ApiBasedToolSchemaParser.parse_swagger_to_openapi( loaded_content, extra_info=extra_info, warning=warning ) - schema_type = ApiProviderSchemaType.SWAGGER.value + schema_type = ApiProviderSchemaType.SWAGGER return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle( converted_swagger, extra_info=extra_info, warning=warning ), schema_type @@ -406,7 +442,7 @@ class ApiBasedToolSchemaParser: openapi_plugin = ApiBasedToolSchemaParser.parse_openai_plugin_json_to_tool_bundle( json_dumps(loaded_content), extra_info=extra_info, warning=warning ) - return openapi_plugin, ApiProviderSchemaType.OPENAI_PLUGIN.value + return openapi_plugin, ApiProviderSchemaType.OPENAI_PLUGIN except ToolNotSupportedError as e: # maybe it's not plugin at all openapi_plugin_error = e diff --git a/api/core/tools/utils/web_reader_tool.py b/api/core/tools/utils/web_reader_tool.py index 52c16c34a0..ef6913d0bd 100644 --- a/api/core/tools/utils/web_reader_tool.py +++ b/api/core/tools/utils/web_reader_tool.py @@ -6,8 +6,8 @@ from typing import Any, cast from urllib.parse import unquote import chardet -import cloudscraper # type: ignore -from readabilipy import simple_json_from_html_string # type: ignore +import cloudscraper +from readabilipy import simple_json_from_html_string from core.helper import ssrf_proxy from core.rag.extractor import extract_processor @@ -63,8 +63,8 @@ def get_url(url: str, user_agent: str | None = None) -> str: response = ssrf_proxy.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) elif response.status_code == 403: scraper = cloudscraper.create_scraper() - scraper.perform_request = ssrf_proxy.make_request # type: ignore - response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) # type: ignore + scraper.perform_request = ssrf_proxy.make_request + response = scraper.get(url, headers=headers, timeout=(120, 300)) if response.status_code != 200: return f"URL returned status code {response.status_code}." diff --git a/api/core/tools/utils/yaml_utils.py b/api/core/tools/utils/yaml_utils.py index e9b5dab7d3..071154ee71 100644 --- a/api/core/tools/utils/yaml_utils.py +++ b/api/core/tools/utils/yaml_utils.py @@ -3,7 +3,7 @@ from functools import lru_cache from pathlib import Path from typing import Any -import yaml # type: ignore +import yaml from yaml import YAMLError logger = logging.getLogger(__name__) diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py index 4d9c8895fc..c8e91413cd 100644 --- a/api/core/tools/workflow_as_tool/provider.py +++ b/api/core/tools/workflow_as_tool/provider.py @@ -1,6 +1,7 @@ from collections.abc import Mapping from pydantic import Field +from sqlalchemy.orm import Session from core.app.app_config.entities import VariableEntity, VariableEntityType from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager @@ -20,6 +21,7 @@ from core.tools.entities.tool_entities import ( from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurationUtils from core.tools.workflow_as_tool.tool import WorkflowTool from extensions.ext_database import db +from models.account import Account from models.model import App, AppMode from models.tools import WorkflowToolProvider from models.workflow import Workflow @@ -29,6 +31,7 @@ VARIABLE_TO_PARAMETER_TYPE_MAPPING = { VariableEntityType.PARAGRAPH: ToolParameter.ToolParameterType.STRING, VariableEntityType.SELECT: ToolParameter.ToolParameterType.SELECT, VariableEntityType.NUMBER: ToolParameter.ToolParameterType.NUMBER, + VariableEntityType.CHECKBOX: ToolParameter.ToolParameterType.BOOLEAN, VariableEntityType.FILE: ToolParameter.ToolParameterType.FILE, VariableEntityType.FILE_LIST: ToolParameter.ToolParameterType.FILES, } @@ -44,29 +47,34 @@ class WorkflowToolProviderController(ToolProviderController): @classmethod def from_db(cls, db_provider: WorkflowToolProvider) -> "WorkflowToolProviderController": - app = db_provider.app + with Session(db.engine, expire_on_commit=False) as session, session.begin(): + provider = session.get(WorkflowToolProvider, db_provider.id) if db_provider.id else None + if not provider: + raise ValueError("workflow provider not found") + app = session.get(App, provider.app_id) + if not app: + raise ValueError("app not found") - if not app: - raise ValueError("app not found") + user = session.get(Account, provider.user_id) if provider.user_id else None - controller = WorkflowToolProviderController( - entity=ToolProviderEntity( - identity=ToolProviderIdentity( - author=db_provider.user.name if db_provider.user_id and db_provider.user else "", - name=db_provider.label, - label=I18nObject(en_US=db_provider.label, zh_Hans=db_provider.label), - description=I18nObject(en_US=db_provider.description, zh_Hans=db_provider.description), - icon=db_provider.icon, + controller = WorkflowToolProviderController( + entity=ToolProviderEntity( + identity=ToolProviderIdentity( + author=user.name if user else "", + name=provider.label, + label=I18nObject(en_US=provider.label, zh_Hans=provider.label), + description=I18nObject(en_US=provider.description, zh_Hans=provider.description), + icon=provider.icon, + ), + credentials_schema=[], + plugin_id=None, ), - credentials_schema=[], - plugin_id=None, - ), - provider_id=db_provider.id or "", - ) + provider_id=provider.id or "", + ) - # init tools - - controller.tools = [controller._get_db_provider_tool(db_provider, app)] + controller.tools = [ + controller._get_db_provider_tool(provider, app, session=session, user=user), + ] return controller @@ -74,7 +82,14 @@ class WorkflowToolProviderController(ToolProviderController): def provider_type(self) -> ToolProviderType: return ToolProviderType.WORKFLOW - def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> WorkflowTool: + def _get_db_provider_tool( + self, + db_provider: WorkflowToolProvider, + app: App, + *, + session: Session, + user: Account | None = None, + ) -> WorkflowTool: """ get db provider tool :param db_provider: the db provider @@ -82,7 +97,7 @@ class WorkflowToolProviderController(ToolProviderController): :return: the tool """ workflow: Workflow | None = ( - db.session.query(Workflow) + session.query(Workflow) .where(Workflow.app_id == db_provider.app_id, Workflow.version == db_provider.version) .first() ) @@ -99,9 +114,7 @@ class WorkflowToolProviderController(ToolProviderController): variables = WorkflowToolConfigurationUtils.get_workflow_graph_variables(graph) def fetch_workflow_variable(variable_name: str) -> VariableEntity | None: - return next(filter(lambda x: x.variable == variable_name, variables), None) # type: ignore - - user = db_provider.user + return next(filter(lambda x: x.variable == variable_name, variables), None) workflow_tool_parameters = [] for parameter in parameters: @@ -187,22 +200,25 @@ class WorkflowToolProviderController(ToolProviderController): if self.tools is not None: return self.tools - db_providers: WorkflowToolProvider | None = ( - db.session.query(WorkflowToolProvider) - .where( - WorkflowToolProvider.tenant_id == tenant_id, - WorkflowToolProvider.app_id == self.provider_id, + with Session(db.engine, expire_on_commit=False) as session, session.begin(): + db_provider: WorkflowToolProvider | None = ( + session.query(WorkflowToolProvider) + .where( + WorkflowToolProvider.tenant_id == tenant_id, + WorkflowToolProvider.app_id == self.provider_id, + ) + .first() ) - .first() - ) - if not db_providers: - return [] - if not db_providers.app: - raise ValueError("app not found") + if not db_provider: + return [] - app = db_providers.app - self.tools = [self._get_db_provider_tool(db_providers, app)] + app = session.get(App, db_provider.app_id) + if not app: + raise ValueError("app not found") + + user = session.get(Account, db_provider.user_id) if db_provider.user_id else None + self.tools = [self._get_db_provider_tool(db_provider, app, session=session, user=user)] return self.tools diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 5adf04611d..2cd46647a0 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -1,11 +1,14 @@ import json import logging -from collections.abc import Generator -from typing import Any +from collections.abc import Generator, Mapping, Sequence +from typing import Any, cast +from flask import has_request_context from sqlalchemy import select +from sqlalchemy.orm import Session from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod +from core.model_runtime.entities.llm_entities import LLMUsage, LLMUsageMetadata from core.tools.__base.tool import Tool from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.tool_entities import ( @@ -18,7 +21,8 @@ from core.tools.errors import ToolInvokeError from extensions.ext_database import db from factories.file_factory import build_from_mapping from libs.login import current_user -from models.model import App +from models import Account, Tenant +from models.model import App, EndUser from models.workflow import Workflow logger = logging.getLogger(__name__) @@ -46,6 +50,7 @@ class WorkflowTool(Tool): self.workflow_entities = workflow_entities self.workflow_call_depth = workflow_call_depth self.label = label + self._latest_usage = LLMUsage.empty_usage() super().__init__(entity=entity, runtime=runtime) @@ -79,11 +84,17 @@ class WorkflowTool(Tool): generator = WorkflowAppGenerator() assert self.runtime is not None assert self.runtime.invoke_from is not None - assert current_user is not None + + user = self._resolve_user(user_id=user_id) + if user is None: + raise ToolInvokeError("User not found") + + self._latest_usage = LLMUsage.empty_usage() + result = generator.generate( app_model=app, workflow=workflow, - user=current_user, + user=user, args={"inputs": tool_parameters, "files": files}, invoke_from=self.runtime.invoke_from, streaming=False, @@ -103,9 +114,68 @@ class WorkflowTool(Tool): for file in files: yield self.create_file_message(file) # type: ignore + self._latest_usage = self._derive_usage_from_result(data) + yield self.create_text_message(json.dumps(outputs, ensure_ascii=False)) yield self.create_json_message(outputs) + @property + def latest_usage(self) -> LLMUsage: + return self._latest_usage + + @classmethod + def _derive_usage_from_result(cls, data: Mapping[str, Any]) -> LLMUsage: + usage_dict = cls._extract_usage_dict(data) + if usage_dict is not None: + return LLMUsage.from_metadata(cast(LLMUsageMetadata, dict(usage_dict))) + + total_tokens = data.get("total_tokens") + total_price = data.get("total_price") + if total_tokens is None and total_price is None: + return LLMUsage.empty_usage() + + usage_metadata: dict[str, Any] = {} + if total_tokens is not None: + try: + usage_metadata["total_tokens"] = int(str(total_tokens)) + except (TypeError, ValueError): + pass + if total_price is not None: + usage_metadata["total_price"] = str(total_price) + currency = data.get("currency") + if currency is not None: + usage_metadata["currency"] = currency + + if not usage_metadata: + return LLMUsage.empty_usage() + + return LLMUsage.from_metadata(cast(LLMUsageMetadata, usage_metadata)) + + @classmethod + def _extract_usage_dict(cls, payload: Mapping[str, Any]) -> Mapping[str, Any] | None: + usage_candidate = payload.get("usage") + if isinstance(usage_candidate, Mapping): + return usage_candidate + + metadata_candidate = payload.get("metadata") + if isinstance(metadata_candidate, Mapping): + usage_candidate = metadata_candidate.get("usage") + if isinstance(usage_candidate, Mapping): + return usage_candidate + + for value in payload.values(): + if isinstance(value, Mapping): + found = cls._extract_usage_dict(value) + if found is not None: + return found + elif isinstance(value, Sequence) and not isinstance(value, (str, bytes, bytearray)): + for item in value: + if isinstance(item, Mapping): + found = cls._extract_usage_dict(item) + if found is not None: + return found + return None + def fork_tool_runtime(self, runtime: ToolRuntime) -> "WorkflowTool": """ fork a new tool with metadata @@ -123,20 +193,66 @@ class WorkflowTool(Tool): label=self.label, ) + def _resolve_user(self, user_id: str) -> Account | EndUser | None: + """ + Resolve user object in both HTTP and worker contexts. + + In HTTP context: dereference the current_user LocalProxy (can return Account or EndUser). + In worker context: load Account from database by user_id (only returns Account, never EndUser). + + Returns: + Account | EndUser | None: The resolved user object, or None if resolution fails. + """ + if has_request_context(): + return self._resolve_user_from_request() + else: + return self._resolve_user_from_database(user_id=user_id) + + def _resolve_user_from_request(self) -> Account | EndUser | None: + """ + Resolve user from Flask request context. + """ + try: + # Note: `current_user` is a LocalProxy. Never compare it with None directly. + return getattr(current_user, "_get_current_object", lambda: current_user)() + except Exception as e: + logger.warning("Failed to resolve user from request context: %s", e) + return None + + def _resolve_user_from_database(self, user_id: str) -> Account | None: + """ + Resolve user from database (worker/Celery context). + """ + + user_stmt = select(Account).where(Account.id == user_id) + user = db.session.scalar(user_stmt) + if not user: + return None + + tenant_stmt = select(Tenant).where(Tenant.id == self.runtime.tenant_id) + tenant = db.session.scalar(tenant_stmt) + if not tenant: + return None + + user.current_tenant = tenant + + return user + def _get_workflow(self, app_id: str, version: str) -> Workflow: """ get the workflow by app id and version """ - if not version: - workflow = ( - db.session.query(Workflow) - .where(Workflow.app_id == app_id, Workflow.version != Workflow.VERSION_DRAFT) - .order_by(Workflow.created_at.desc()) - .first() - ) - else: - stmt = select(Workflow).where(Workflow.app_id == app_id, Workflow.version == version) - workflow = db.session.scalar(stmt) + with Session(db.engine, expire_on_commit=False) as session, session.begin(): + if not version: + stmt = ( + select(Workflow) + .where(Workflow.app_id == app_id, Workflow.version != Workflow.VERSION_DRAFT) + .order_by(Workflow.created_at.desc()) + ) + workflow = session.scalars(stmt).first() + else: + stmt = select(Workflow).where(Workflow.app_id == app_id, Workflow.version == version) + workflow = session.scalar(stmt) if not workflow: raise ValueError("workflow not found or not published") @@ -148,7 +264,8 @@ class WorkflowTool(Tool): get the app by app id """ stmt = select(App).where(App.id == app_id) - app = db.session.scalar(stmt) + with Session(db.engine, expire_on_commit=False) as session, session.begin(): + app = session.scalar(stmt) if not app: raise ValueError("app not found") diff --git a/api/core/variables/segment_group.py b/api/core/variables/segment_group.py index 0a41b64228..b363255b2c 100644 --- a/api/core/variables/segment_group.py +++ b/api/core/variables/segment_group.py @@ -4,7 +4,7 @@ from .types import SegmentType class SegmentGroup(Segment): value_type: SegmentType = SegmentType.GROUP - value: list[Segment] = None # type: ignore + value: list[Segment] @property def text(self): diff --git a/api/core/variables/segments.py b/api/core/variables/segments.py index 6c9e6d726e..406b4e6f93 100644 --- a/api/core/variables/segments.py +++ b/api/core/variables/segments.py @@ -19,7 +19,7 @@ class Segment(BaseModel): model_config = ConfigDict(frozen=True) value_type: SegmentType - value: Any = None + value: Any @field_validator("value_type") @classmethod @@ -74,12 +74,12 @@ class NoneSegment(Segment): class StringSegment(Segment): value_type: SegmentType = SegmentType.STRING - value: str = None # type: ignore + value: str class FloatSegment(Segment): value_type: SegmentType = SegmentType.FLOAT - value: float = None # type: ignore + value: float # NOTE(QuantumGhost): seems that the equality for FloatSegment with `NaN` value has some problems. # The following tests cannot pass. # @@ -98,12 +98,12 @@ class FloatSegment(Segment): class IntegerSegment(Segment): value_type: SegmentType = SegmentType.INTEGER - value: int = None # type: ignore + value: int class ObjectSegment(Segment): value_type: SegmentType = SegmentType.OBJECT - value: Mapping[str, Any] = None # type: ignore + value: Mapping[str, Any] @property def text(self) -> str: @@ -136,7 +136,7 @@ class ArraySegment(Segment): class FileSegment(Segment): value_type: SegmentType = SegmentType.FILE - value: File = None # type: ignore + value: File @property def markdown(self) -> str: @@ -153,17 +153,17 @@ class FileSegment(Segment): class BooleanSegment(Segment): value_type: SegmentType = SegmentType.BOOLEAN - value: bool = None # type: ignore + value: bool class ArrayAnySegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_ANY - value: Sequence[Any] = None # type: ignore + value: Sequence[Any] class ArrayStringSegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_STRING - value: Sequence[str] = None # type: ignore + value: Sequence[str] @property def text(self) -> str: @@ -175,17 +175,17 @@ class ArrayStringSegment(ArraySegment): class ArrayNumberSegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_NUMBER - value: Sequence[float | int] = None # type: ignore + value: Sequence[float | int] class ArrayObjectSegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_OBJECT - value: Sequence[Mapping[str, Any]] = None # type: ignore + value: Sequence[Mapping[str, Any]] class ArrayFileSegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_FILE - value: Sequence[File] = None # type: ignore + value: Sequence[File] @property def markdown(self) -> str: @@ -205,7 +205,7 @@ class ArrayFileSegment(ArraySegment): class ArrayBooleanSegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_BOOLEAN - value: Sequence[bool] = None # type: ignore + value: Sequence[bool] def get_segment_discriminator(v: Any) -> SegmentType | None: diff --git a/api/core/workflow/entities/__init__.py b/api/core/workflow/entities/__init__.py index 007bf42aa6..185f0ad620 100644 --- a/api/core/workflow/entities/__init__.py +++ b/api/core/workflow/entities/__init__.py @@ -1,8 +1,7 @@ +from ..runtime.graph_runtime_state import GraphRuntimeState +from ..runtime.variable_pool import VariablePool from .agent import AgentNodeStrategyInit from .graph_init_params import GraphInitParams -from .graph_runtime_state import GraphRuntimeState -from .run_condition import RunCondition -from .variable_pool import VariablePool, VariableValue from .workflow_execution import WorkflowExecution from .workflow_node_execution import WorkflowNodeExecution @@ -10,9 +9,7 @@ __all__ = [ "AgentNodeStrategyInit", "GraphInitParams", "GraphRuntimeState", - "RunCondition", "VariablePool", - "VariableValue", "WorkflowExecution", "WorkflowNodeExecution", ] diff --git a/api/core/workflow/entities/graph_runtime_state.py b/api/core/workflow/entities/graph_runtime_state.py deleted file mode 100644 index 6362f291ea..0000000000 --- a/api/core/workflow/entities/graph_runtime_state.py +++ /dev/null @@ -1,160 +0,0 @@ -from copy import deepcopy - -from pydantic import BaseModel, PrivateAttr - -from core.model_runtime.entities.llm_entities import LLMUsage - -from .variable_pool import VariablePool - - -class GraphRuntimeState(BaseModel): - # Private attributes to prevent direct modification - _variable_pool: VariablePool = PrivateAttr() - _start_at: float = PrivateAttr() - _total_tokens: int = PrivateAttr(default=0) - _llm_usage: LLMUsage = PrivateAttr(default_factory=LLMUsage.empty_usage) - _outputs: dict[str, object] = PrivateAttr(default_factory=dict[str, object]) - _node_run_steps: int = PrivateAttr(default=0) - _ready_queue_json: str = PrivateAttr() - _graph_execution_json: str = PrivateAttr() - _response_coordinator_json: str = PrivateAttr() - - def __init__( - self, - *, - variable_pool: VariablePool, - start_at: float, - total_tokens: int = 0, - llm_usage: LLMUsage | None = None, - outputs: dict[str, object] | None = None, - node_run_steps: int = 0, - ready_queue_json: str = "", - graph_execution_json: str = "", - response_coordinator_json: str = "", - **kwargs: object, - ): - """Initialize the GraphRuntimeState with validation.""" - super().__init__(**kwargs) - - # Initialize private attributes with validation - self._variable_pool = variable_pool - - self._start_at = start_at - - if total_tokens < 0: - raise ValueError("total_tokens must be non-negative") - self._total_tokens = total_tokens - - if llm_usage is None: - llm_usage = LLMUsage.empty_usage() - self._llm_usage = llm_usage - - if outputs is None: - outputs = {} - self._outputs = deepcopy(outputs) - - if node_run_steps < 0: - raise ValueError("node_run_steps must be non-negative") - self._node_run_steps = node_run_steps - - self._ready_queue_json = ready_queue_json - self._graph_execution_json = graph_execution_json - self._response_coordinator_json = response_coordinator_json - - @property - def variable_pool(self) -> VariablePool: - """Get the variable pool.""" - return self._variable_pool - - @property - def start_at(self) -> float: - """Get the start time.""" - return self._start_at - - @start_at.setter - def start_at(self, value: float) -> None: - """Set the start time.""" - self._start_at = value - - @property - def total_tokens(self) -> int: - """Get the total tokens count.""" - return self._total_tokens - - @total_tokens.setter - def total_tokens(self, value: int): - """Set the total tokens count.""" - if value < 0: - raise ValueError("total_tokens must be non-negative") - self._total_tokens = value - - @property - def llm_usage(self) -> LLMUsage: - """Get the LLM usage info.""" - # Return a copy to prevent external modification - return self._llm_usage.model_copy() - - @llm_usage.setter - def llm_usage(self, value: LLMUsage): - """Set the LLM usage info.""" - self._llm_usage = value.model_copy() - - @property - def outputs(self) -> dict[str, object]: - """Get a copy of the outputs dictionary.""" - return deepcopy(self._outputs) - - @outputs.setter - def outputs(self, value: dict[str, object]) -> None: - """Set the outputs dictionary.""" - self._outputs = deepcopy(value) - - def set_output(self, key: str, value: object) -> None: - """Set a single output value.""" - self._outputs[key] = deepcopy(value) - - def get_output(self, key: str, default: object = None) -> object: - """Get a single output value.""" - return deepcopy(self._outputs.get(key, default)) - - def update_outputs(self, updates: dict[str, object]) -> None: - """Update multiple output values.""" - for key, value in updates.items(): - self._outputs[key] = deepcopy(value) - - @property - def node_run_steps(self) -> int: - """Get the node run steps count.""" - return self._node_run_steps - - @node_run_steps.setter - def node_run_steps(self, value: int) -> None: - """Set the node run steps count.""" - if value < 0: - raise ValueError("node_run_steps must be non-negative") - self._node_run_steps = value - - def increment_node_run_steps(self) -> None: - """Increment the node run steps by 1.""" - self._node_run_steps += 1 - - def add_tokens(self, tokens: int) -> None: - """Add tokens to the total count.""" - if tokens < 0: - raise ValueError("tokens must be non-negative") - self._total_tokens += tokens - - @property - def ready_queue_json(self) -> str: - """Get a copy of the ready queue state.""" - return self._ready_queue_json - - @property - def graph_execution_json(self) -> str: - """Get a copy of the serialized graph execution state.""" - return self._graph_execution_json - - @property - def response_coordinator_json(self) -> str: - """Get a copy of the serialized response coordinator state.""" - return self._response_coordinator_json diff --git a/api/core/workflow/entities/run_condition.py b/api/core/workflow/entities/run_condition.py deleted file mode 100644 index 7b9a379215..0000000000 --- a/api/core/workflow/entities/run_condition.py +++ /dev/null @@ -1,21 +0,0 @@ -import hashlib -from typing import Literal - -from pydantic import BaseModel - -from core.workflow.utils.condition.entities import Condition - - -class RunCondition(BaseModel): - type: Literal["branch_identify", "condition"] - """condition type""" - - branch_identify: str | None = None - """branch identify like: sourceHandle, required when type is branch_identify""" - - conditions: list[Condition] | None = None - """conditions to run the node, required when type is condition""" - - @property - def hash(self) -> str: - return hashlib.sha256(self.model_dump_json().encode()).hexdigest() diff --git a/api/core/workflow/enums.py b/api/core/workflow/enums.py index 00a125660a..83b9281e51 100644 --- a/api/core/workflow/enums.py +++ b/api/core/workflow/enums.py @@ -1,7 +1,7 @@ -from enum import Enum, StrEnum +from enum import StrEnum -class NodeState(Enum): +class NodeState(StrEnum): """State of a node or edge during workflow execution.""" UNKNOWN = "unknown" @@ -58,6 +58,7 @@ class NodeType(StrEnum): DOCUMENT_EXTRACTOR = "document-extractor" LIST_OPERATOR = "list-operator" AGENT = "agent" + HUMAN_INPUT = "human-input" class NodeExecutionType(StrEnum): @@ -96,6 +97,7 @@ class WorkflowExecutionStatus(StrEnum): FAILED = "failed" STOPPED = "stopped" PARTIAL_SUCCEEDED = "partial-succeeded" + PAUSED = "paused" class WorkflowNodeExecutionMetadataKey(StrEnum): diff --git a/api/core/workflow/graph/__init__.py b/api/core/workflow/graph/__init__.py index 31a81d494e..4830ea83d3 100644 --- a/api/core/workflow/graph/__init__.py +++ b/api/core/workflow/graph/__init__.py @@ -1,16 +1,11 @@ from .edge import Edge -from .graph import Graph, NodeFactory -from .graph_runtime_state_protocol import ReadOnlyGraphRuntimeState, ReadOnlyVariablePool +from .graph import Graph, GraphBuilder, NodeFactory from .graph_template import GraphTemplate -from .read_only_state_wrapper import ReadOnlyGraphRuntimeStateWrapper, ReadOnlyVariablePoolWrapper __all__ = [ "Edge", "Graph", + "GraphBuilder", "GraphTemplate", "NodeFactory", - "ReadOnlyGraphRuntimeState", - "ReadOnlyGraphRuntimeStateWrapper", - "ReadOnlyVariablePool", - "ReadOnlyVariablePoolWrapper", ] diff --git a/api/core/workflow/graph/graph.py b/api/core/workflow/graph/graph.py index 330e14de81..d04724425c 100644 --- a/api/core/workflow/graph/graph.py +++ b/api/core/workflow/graph/graph.py @@ -3,11 +3,12 @@ from collections import defaultdict from collections.abc import Mapping, Sequence from typing import Protocol, cast, final -from core.workflow.enums import NodeExecutionType, NodeState, NodeType +from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeState, NodeType from core.workflow.nodes.base.node import Node from libs.typing import is_str, is_str_dict from .edge import Edge +from .validation import get_graph_validator logger = logging.getLogger(__name__) @@ -195,6 +196,23 @@ class Graph: return nodes + @classmethod + def new(cls) -> "GraphBuilder": + """Create a fluent builder for assembling a graph programmatically.""" + + return GraphBuilder(graph_cls=cls) + + @classmethod + def _promote_fail_branch_nodes(cls, nodes: dict[str, Node]) -> None: + """ + Promote nodes configured with FAIL_BRANCH error strategy to branch execution type. + + :param nodes: mapping of node ID to node instance + """ + for node in nodes.values(): + if node.error_strategy == ErrorStrategy.FAIL_BRANCH: + node.execution_type = NodeExecutionType.BRANCH + @classmethod def _mark_inactive_root_branches( cls, @@ -301,6 +319,9 @@ class Graph: # Create node instances nodes = cls._create_node_instances(node_configs_map, node_factory) + # Promote fail-branch nodes to branch execution type at graph level + cls._promote_fail_branch_nodes(nodes) + # Get root node instance root_node = nodes[root_node_id] @@ -308,7 +329,7 @@ class Graph: cls._mark_inactive_root_branches(nodes, edges, in_edges, out_edges, root_node_id) # Create and return the graph - return cls( + graph = cls( nodes=nodes, edges=edges, in_edges=in_edges, @@ -316,6 +337,11 @@ class Graph: root_node=root_node, ) + # Validate the graph structure using built-in validators + get_graph_validator().validate(graph) + + return graph + @property def node_ids(self) -> list[str]: """ @@ -344,3 +370,96 @@ class Graph: """ edge_ids = self.in_edges.get(node_id, []) return [self.edges[eid] for eid in edge_ids if eid in self.edges] + + +@final +class GraphBuilder: + """Fluent helper for constructing simple graphs, primarily for tests.""" + + def __init__(self, *, graph_cls: type[Graph]): + self._graph_cls = graph_cls + self._nodes: list[Node] = [] + self._nodes_by_id: dict[str, Node] = {} + self._edges: list[Edge] = [] + self._edge_counter = 0 + + def add_root(self, node: Node) -> "GraphBuilder": + """Register the root node. Must be called exactly once.""" + + if self._nodes: + raise ValueError("Root node has already been added") + self._register_node(node) + self._nodes.append(node) + return self + + def add_node( + self, + node: Node, + *, + from_node_id: str | None = None, + source_handle: str = "source", + ) -> "GraphBuilder": + """Append a node and connect it from the specified predecessor.""" + + if not self._nodes: + raise ValueError("Root node must be added before adding other nodes") + + predecessor_id = from_node_id or self._nodes[-1].id + if predecessor_id not in self._nodes_by_id: + raise ValueError(f"Predecessor node '{predecessor_id}' not found") + + predecessor = self._nodes_by_id[predecessor_id] + self._register_node(node) + self._nodes.append(node) + + edge_id = f"edge_{self._edge_counter}" + self._edge_counter += 1 + edge = Edge(id=edge_id, tail=predecessor.id, head=node.id, source_handle=source_handle) + self._edges.append(edge) + + return self + + def connect(self, *, tail: str, head: str, source_handle: str = "source") -> "GraphBuilder": + """Connect two existing nodes without adding a new node.""" + + if tail not in self._nodes_by_id: + raise ValueError(f"Tail node '{tail}' not found") + if head not in self._nodes_by_id: + raise ValueError(f"Head node '{head}' not found") + + edge_id = f"edge_{self._edge_counter}" + self._edge_counter += 1 + edge = Edge(id=edge_id, tail=tail, head=head, source_handle=source_handle) + self._edges.append(edge) + + return self + + def build(self) -> Graph: + """Materialize the graph instance from the accumulated nodes and edges.""" + + if not self._nodes: + raise ValueError("Cannot build an empty graph") + + nodes = {node.id: node for node in self._nodes} + edges = {edge.id: edge for edge in self._edges} + in_edges: dict[str, list[str]] = defaultdict(list) + out_edges: dict[str, list[str]] = defaultdict(list) + + for edge in self._edges: + out_edges[edge.tail].append(edge.id) + in_edges[edge.head].append(edge.id) + + return self._graph_cls( + nodes=nodes, + edges=edges, + in_edges=dict(in_edges), + out_edges=dict(out_edges), + root_node=self._nodes[0], + ) + + def _register_node(self, node: Node) -> None: + if not node.id: + raise ValueError("Node must have a non-empty id") + if node.id in self._nodes_by_id: + raise ValueError(f"Duplicate node id detected: {node.id}") + self._nodes_by_id[node.id] = node diff --git a/api/core/workflow/graph/validation.py b/api/core/workflow/graph/validation.py new file mode 100644 index 0000000000..87aa7db2e4 --- /dev/null +++ b/api/core/workflow/graph/validation.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +from collections.abc import Sequence +from dataclasses import dataclass +from typing import TYPE_CHECKING, Protocol + +from core.workflow.enums import NodeExecutionType, NodeType + +if TYPE_CHECKING: + from .graph import Graph + + +@dataclass(frozen=True, slots=True) +class GraphValidationIssue: + """Immutable value object describing a single validation issue.""" + + code: str + message: str + node_id: str | None = None + + +class GraphValidationError(ValueError): + """Raised when graph validation fails.""" + + def __init__(self, issues: Sequence[GraphValidationIssue]) -> None: + if not issues: + raise ValueError("GraphValidationError requires at least one issue.") + self.issues: tuple[GraphValidationIssue, ...] = tuple(issues) + message = "; ".join(f"[{issue.code}] {issue.message}" for issue in self.issues) + super().__init__(message) + + +class GraphValidationRule(Protocol): + """Protocol that individual validation rules must satisfy.""" + + def validate(self, graph: Graph) -> Sequence[GraphValidationIssue]: + """Validate the provided graph and return any discovered issues.""" + ... + + +@dataclass(frozen=True, slots=True) +class _EdgeEndpointValidator: + """Ensures all edges reference existing nodes.""" + + missing_node_code: str = "MISSING_NODE" + + def validate(self, graph: Graph) -> Sequence[GraphValidationIssue]: + issues: list[GraphValidationIssue] = [] + for edge in graph.edges.values(): + if edge.tail not in graph.nodes: + issues.append( + GraphValidationIssue( + code=self.missing_node_code, + message=f"Edge {edge.id} references unknown source node '{edge.tail}'.", + node_id=edge.tail, + ) + ) + if edge.head not in graph.nodes: + issues.append( + GraphValidationIssue( + code=self.missing_node_code, + message=f"Edge {edge.id} references unknown target node '{edge.head}'.", + node_id=edge.head, + ) + ) + return issues + + +@dataclass(frozen=True, slots=True) +class _RootNodeValidator: + """Validates root node invariants.""" + + invalid_root_code: str = "INVALID_ROOT" + container_entry_types: tuple[NodeType, ...] = (NodeType.ITERATION_START, NodeType.LOOP_START) + + def validate(self, graph: Graph) -> Sequence[GraphValidationIssue]: + root_node = graph.root_node + issues: list[GraphValidationIssue] = [] + if root_node.id not in graph.nodes: + issues.append( + GraphValidationIssue( + code=self.invalid_root_code, + message=f"Root node '{root_node.id}' is missing from the node registry.", + node_id=root_node.id, + ) + ) + return issues + + node_type = getattr(root_node, "node_type", None) + if root_node.execution_type != NodeExecutionType.ROOT and node_type not in self.container_entry_types: + issues.append( + GraphValidationIssue( + code=self.invalid_root_code, + message=f"Root node '{root_node.id}' must declare execution type 'root'.", + node_id=root_node.id, + ) + ) + return issues + + +@dataclass(frozen=True, slots=True) +class GraphValidator: + """Coordinates execution of graph validation rules.""" + + rules: tuple[GraphValidationRule, ...] + + def validate(self, graph: Graph) -> None: + """Validate the graph against all configured rules.""" + issues: list[GraphValidationIssue] = [] + for rule in self.rules: + issues.extend(rule.validate(graph)) + + if issues: + raise GraphValidationError(issues) + + +_DEFAULT_RULES: tuple[GraphValidationRule, ...] = ( + _EdgeEndpointValidator(), + _RootNodeValidator(), +) + + +def get_graph_validator() -> GraphValidator: + """Construct the validator composed of default rules.""" + return GraphValidator(_DEFAULT_RULES) diff --git a/api/core/workflow/graph_engine/command_channels/redis_channel.py b/api/core/workflow/graph_engine/command_channels/redis_channel.py index 056e17bf5d..4be3adb8f8 100644 --- a/api/core/workflow/graph_engine/command_channels/redis_channel.py +++ b/api/core/workflow/graph_engine/command_channels/redis_channel.py @@ -9,7 +9,7 @@ Each instance uses a unique key for its command queue. import json from typing import TYPE_CHECKING, Any, final -from ..entities.commands import AbortCommand, CommandType, GraphEngineCommand +from ..entities.commands import AbortCommand, CommandType, GraphEngineCommand, PauseCommand if TYPE_CHECKING: from extensions.ext_redis import RedisClientWrapper @@ -41,6 +41,7 @@ class RedisChannel: self._redis = redis_client self._key = channel_key self._command_ttl = command_ttl + self._pending_key = f"{channel_key}:pending" def fetch_commands(self) -> list[GraphEngineCommand]: """ @@ -49,6 +50,9 @@ class RedisChannel: Returns: List of pending commands (drains the Redis list) """ + if not self._has_pending_commands(): + return [] + commands: list[GraphEngineCommand] = [] # Use pipeline for atomic operations @@ -85,6 +89,7 @@ class RedisChannel: with self._redis.pipeline() as pipe: pipe.rpush(self._key, command_json) pipe.expire(self._key, self._command_ttl) + pipe.set(self._pending_key, "1", ex=self._command_ttl) pipe.execute() def _deserialize_command(self, data: dict[str, Any]) -> GraphEngineCommand | None: @@ -105,10 +110,26 @@ class RedisChannel: command_type = CommandType(command_type_value) if command_type == CommandType.ABORT: - return AbortCommand(**data) - else: - # For other command types, use base class - return GraphEngineCommand(**data) + return AbortCommand.model_validate(data) + if command_type == CommandType.PAUSE: + return PauseCommand.model_validate(data) + + # For other command types, use base class + return GraphEngineCommand.model_validate(data) except (ValueError, TypeError): return None + + def _has_pending_commands(self) -> bool: + """ + Check and consume the pending marker to avoid unnecessary list reads. + + Returns: + True if commands should be fetched from Redis. + """ + with self._redis.pipeline() as pipe: + pipe.get(self._pending_key) + pipe.delete(self._pending_key) + pending_value, _ = pipe.execute() + + return pending_value is not None diff --git a/api/core/workflow/graph_engine/command_processing/__init__.py b/api/core/workflow/graph_engine/command_processing/__init__.py index 3460b52226..837f5e55fd 100644 --- a/api/core/workflow/graph_engine/command_processing/__init__.py +++ b/api/core/workflow/graph_engine/command_processing/__init__.py @@ -5,10 +5,11 @@ This package handles external commands sent to the engine during execution. """ -from .command_handlers import AbortCommandHandler +from .command_handlers import AbortCommandHandler, PauseCommandHandler from .command_processor import CommandProcessor __all__ = [ "AbortCommandHandler", "CommandProcessor", + "PauseCommandHandler", ] diff --git a/api/core/workflow/graph_engine/command_processing/command_handlers.py b/api/core/workflow/graph_engine/command_processing/command_handlers.py index 3c51de99f3..c26c98c496 100644 --- a/api/core/workflow/graph_engine/command_processing/command_handlers.py +++ b/api/core/workflow/graph_engine/command_processing/command_handlers.py @@ -1,14 +1,10 @@ -""" -Command handler implementations. -""" - import logging from typing import final from typing_extensions import override from ..domain.graph_execution import GraphExecution -from ..entities.commands import AbortCommand, GraphEngineCommand +from ..entities.commands import AbortCommand, GraphEngineCommand, PauseCommand from .command_processor import CommandHandler logger = logging.getLogger(__name__) @@ -16,17 +12,17 @@ logger = logging.getLogger(__name__) @final class AbortCommandHandler(CommandHandler): - """Handles abort commands.""" - @override def handle(self, command: GraphEngineCommand, execution: GraphExecution) -> None: - """ - Handle an abort command. - - Args: - command: The abort command - execution: Graph execution to abort - """ assert isinstance(command, AbortCommand) logger.debug("Aborting workflow %s: %s", execution.workflow_id, command.reason) execution.abort(command.reason or "User requested abort") + + +@final +class PauseCommandHandler(CommandHandler): + @override + def handle(self, command: GraphEngineCommand, execution: GraphExecution) -> None: + assert isinstance(command, PauseCommand) + logger.debug("Pausing workflow %s: %s", execution.workflow_id, command.reason) + execution.pause(command.reason) diff --git a/api/core/workflow/graph_engine/domain/graph_execution.py b/api/core/workflow/graph_engine/domain/graph_execution.py index b273ee9969..6482c927d6 100644 --- a/api/core/workflow/graph_engine/domain/graph_execution.py +++ b/api/core/workflow/graph_engine/domain/graph_execution.py @@ -40,6 +40,8 @@ class GraphExecutionState(BaseModel): started: bool = Field(default=False) completed: bool = Field(default=False) aborted: bool = Field(default=False) + paused: bool = Field(default=False) + pause_reason: str | None = Field(default=None) error: GraphExecutionErrorState | None = Field(default=None) exceptions_count: int = Field(default=0) node_executions: list[NodeExecutionState] = Field(default_factory=list[NodeExecutionState]) @@ -103,6 +105,8 @@ class GraphExecution: started: bool = False completed: bool = False aborted: bool = False + paused: bool = False + pause_reason: str | None = None error: Exception | None = None node_executions: dict[str, NodeExecution] = field(default_factory=dict[str, NodeExecution]) exceptions_count: int = 0 @@ -126,6 +130,17 @@ class GraphExecution: self.aborted = True self.error = RuntimeError(f"Aborted: {reason}") + def pause(self, reason: str | None = None) -> None: + """Pause the graph execution without marking it complete.""" + if self.completed: + raise RuntimeError("Cannot pause execution that has completed") + if self.aborted: + raise RuntimeError("Cannot pause execution that has been aborted") + if self.paused: + return + self.paused = True + self.pause_reason = reason + def fail(self, error: Exception) -> None: """Mark the graph execution as failed.""" self.error = error @@ -140,7 +155,12 @@ class GraphExecution: @property def is_running(self) -> bool: """Check if the execution is currently running.""" - return self.started and not self.completed and not self.aborted + return self.started and not self.completed and not self.aborted and not self.paused + + @property + def is_paused(self) -> bool: + """Check if the execution is currently paused.""" + return self.paused @property def has_error(self) -> bool: @@ -173,6 +193,8 @@ class GraphExecution: started=self.started, completed=self.completed, aborted=self.aborted, + paused=self.paused, + pause_reason=self.pause_reason, error=_serialize_error(self.error), exceptions_count=self.exceptions_count, node_executions=node_states, @@ -197,6 +219,8 @@ class GraphExecution: self.started = state.started self.completed = state.completed self.aborted = state.aborted + self.paused = state.paused + self.pause_reason = state.pause_reason self.error = _deserialize_error(state.error) self.exceptions_count = state.exceptions_count self.node_executions = { diff --git a/api/core/workflow/graph_engine/entities/commands.py b/api/core/workflow/graph_engine/entities/commands.py index 123ef3d449..6070ed8812 100644 --- a/api/core/workflow/graph_engine/entities/commands.py +++ b/api/core/workflow/graph_engine/entities/commands.py @@ -16,7 +16,6 @@ class CommandType(StrEnum): ABORT = "abort" PAUSE = "pause" - RESUME = "resume" class GraphEngineCommand(BaseModel): @@ -31,3 +30,10 @@ class AbortCommand(GraphEngineCommand): command_type: CommandType = Field(default=CommandType.ABORT, description="Type of command") reason: str | None = Field(default=None, description="Optional reason for abort") + + +class PauseCommand(GraphEngineCommand): + """Command to pause a running workflow execution.""" + + command_type: CommandType = Field(default=CommandType.PAUSE, description="Type of command") + reason: str | None = Field(default=None, description="Optional reason for pause") diff --git a/api/core/workflow/graph_engine/event_management/event_handlers.py b/api/core/workflow/graph_engine/event_management/event_handlers.py index 7247b17967..b054ebd7ad 100644 --- a/api/core/workflow/graph_engine/event_management/event_handlers.py +++ b/api/core/workflow/graph_engine/event_management/event_handlers.py @@ -7,8 +7,8 @@ from collections.abc import Mapping from functools import singledispatchmethod from typing import TYPE_CHECKING, final -from core.workflow.entities import GraphRuntimeState -from core.workflow.enums import ErrorStrategy, NodeExecutionType +from core.model_runtime.entities.llm_entities import LLMUsage +from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeState from core.workflow.graph import Graph from core.workflow.graph_events import ( GraphNodeEventBase, @@ -23,11 +23,14 @@ from core.workflow.graph_events import ( NodeRunLoopNextEvent, NodeRunLoopStartedEvent, NodeRunLoopSucceededEvent, + NodeRunPauseRequestedEvent, + NodeRunRetrieverResourceEvent, NodeRunRetryEvent, NodeRunStartedEvent, NodeRunStreamChunkEvent, NodeRunSucceededEvent, ) +from core.workflow.runtime import GraphRuntimeState from ..domain.graph_execution import GraphExecution from ..response_coordinator import ResponseStreamCoordinator @@ -110,6 +113,7 @@ class EventHandler: @_dispatch.register(NodeRunLoopSucceededEvent) @_dispatch.register(NodeRunLoopFailedEvent) @_dispatch.register(NodeRunAgentLogEvent) + @_dispatch.register(NodeRunRetrieverResourceEvent) def _(self, event: GraphNodeEventBase) -> None: self._event_collector.collect(event) @@ -125,6 +129,7 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) is_initial_attempt = node_execution.retry_count == 0 node_execution.mark_started(event.id) + self._graph_runtime_state.increment_node_run_steps() # Track in response coordinator for stream ordering self._response_coordinator.track_node_execution(event.node_id, event.id) @@ -163,6 +168,8 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.mark_taken() + self._accumulate_node_usage(event.node_run_result.llm_usage) + # Store outputs in variable pool self._store_node_outputs(event.node_id, event.node_run_result.outputs) @@ -199,6 +206,18 @@ class EventHandler: # Collect the event self._event_collector.collect(event) + @_dispatch.register + def _(self, event: NodeRunPauseRequestedEvent) -> None: + """Handle pause requests emitted by nodes.""" + + pause_reason = event.reason or "Awaiting human input" + self._graph_execution.pause(pause_reason) + self._state_manager.finish_execution(event.node_id) + if event.node_id in self._graph.nodes: + self._graph.nodes[event.node_id].state = NodeState.UNKNOWN + self._graph_runtime_state.register_paused_node(event.node_id) + self._event_collector.collect(event) + @_dispatch.register def _(self, event: NodeRunFailedEvent) -> None: """ @@ -212,6 +231,8 @@ class EventHandler: node_execution.mark_failed(event.error) self._graph_execution.record_node_failure() + self._accumulate_node_usage(event.node_run_result.llm_usage) + result = self._error_handler.handle_node_failure(event) if result: @@ -235,6 +256,8 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.mark_taken() + self._accumulate_node_usage(event.node_run_result.llm_usage) + # Persist outputs produced by the exception strategy (e.g. default values) self._store_node_outputs(event.node_id, event.node_run_result.outputs) @@ -286,6 +309,19 @@ class EventHandler: self._state_manager.enqueue_node(event.node_id) self._state_manager.start_execution(event.node_id) + def _accumulate_node_usage(self, usage: LLMUsage) -> None: + """Accumulate token usage into the shared runtime state.""" + if usage.total_tokens <= 0: + return + + self._graph_runtime_state.add_tokens(usage.total_tokens) + + current_usage = self._graph_runtime_state.llm_usage + if current_usage.total_tokens == 0: + self._graph_runtime_state.llm_usage = usage + else: + self._graph_runtime_state.llm_usage = current_usage.plus(usage) + def _store_node_outputs(self, node_id: str, outputs: Mapping[str, object]) -> None: """ Store node outputs in the variable pool. diff --git a/api/core/workflow/graph_engine/event_management/event_manager.py b/api/core/workflow/graph_engine/event_management/event_manager.py index 751a2a4352..689cf53cf0 100644 --- a/api/core/workflow/graph_engine/event_management/event_manager.py +++ b/api/core/workflow/graph_engine/event_management/event_manager.py @@ -97,6 +97,10 @@ class EventManager: """ self._layers = layers + def notify_layers(self, event: GraphEngineEvent) -> None: + """Notify registered layers about an event without buffering it.""" + self._notify_layers(event) + def collect(self, event: GraphEngineEvent) -> None: """ Thread-safe method to collect an event. diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index a21fb7c022..dd2ca3f93b 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -9,28 +9,29 @@ import contextvars import logging import queue from collections.abc import Generator -from typing import final +from typing import TYPE_CHECKING, cast, final from flask import Flask, current_app -from core.workflow.entities import GraphRuntimeState from core.workflow.enums import NodeExecutionType from core.workflow.graph import Graph -from core.workflow.graph.read_only_state_wrapper import ReadOnlyGraphRuntimeStateWrapper -from core.workflow.graph_engine.ready_queue import InMemoryReadyQueue from core.workflow.graph_events import ( GraphEngineEvent, GraphNodeEventBase, GraphRunAbortedEvent, GraphRunFailedEvent, GraphRunPartialSucceededEvent, + GraphRunPausedEvent, GraphRunStartedEvent, GraphRunSucceededEvent, ) +from core.workflow.runtime import GraphRuntimeState, ReadOnlyGraphRuntimeStateWrapper -from .command_processing import AbortCommandHandler, CommandProcessor -from .domain import GraphExecution -from .entities.commands import AbortCommand +if TYPE_CHECKING: # pragma: no cover - used only for static analysis + from core.workflow.runtime.graph_runtime_state import GraphProtocol + +from .command_processing import AbortCommandHandler, CommandProcessor, PauseCommandHandler +from .entities.commands import AbortCommand, PauseCommand from .error_handler import ErrorHandler from .event_management import EventHandler, EventManager from .graph_state_manager import GraphStateManager @@ -38,10 +39,13 @@ from .graph_traversal import EdgeProcessor, SkipPropagator from .layers.base import GraphEngineLayer from .orchestration import Dispatcher, ExecutionCoordinator from .protocols.command_channel import CommandChannel -from .ready_queue import ReadyQueue, ReadyQueueState, create_ready_queue_from_state -from .response_coordinator import ResponseStreamCoordinator +from .ready_queue import ReadyQueue from .worker_management import WorkerPool +if TYPE_CHECKING: + from core.workflow.graph_engine.domain.graph_execution import GraphExecution + from core.workflow.graph_engine.response_coordinator import ResponseStreamCoordinator + logger = logging.getLogger(__name__) @@ -67,17 +71,16 @@ class GraphEngine: ) -> None: """Initialize the graph engine with all subsystems and dependencies.""" - # Graph execution tracks the overall execution state - self._graph_execution = GraphExecution(workflow_id=workflow_id) - if graph_runtime_state.graph_execution_json != "": - self._graph_execution.loads(graph_runtime_state.graph_execution_json) - - # === Core Dependencies === - # Graph structure and configuration + # Bind runtime state to current workflow context self._graph = graph self._graph_runtime_state = graph_runtime_state + self._graph_runtime_state.configure(graph=cast("GraphProtocol", graph)) self._command_channel = command_channel + # Graph execution tracks the overall execution state + self._graph_execution = cast("GraphExecution", self._graph_runtime_state.graph_execution) + self._graph_execution.workflow_id = workflow_id + # === Worker Management Parameters === # Parameters for dynamic worker pool scaling self._min_workers = min_workers @@ -86,13 +89,7 @@ class GraphEngine: self._scale_down_idle_time = scale_down_idle_time # === Execution Queues === - # Create ready queue from saved state or initialize new one - self._ready_queue: ReadyQueue - if self._graph_runtime_state.ready_queue_json == "": - self._ready_queue = InMemoryReadyQueue() - else: - ready_queue_state = ReadyQueueState.model_validate_json(self._graph_runtime_state.ready_queue_json) - self._ready_queue = create_ready_queue_from_state(ready_queue_state) + self._ready_queue = cast(ReadyQueue, self._graph_runtime_state.ready_queue) # Queue for events generated during execution self._event_queue: queue.Queue[GraphNodeEventBase] = queue.Queue() @@ -103,11 +100,7 @@ class GraphEngine: # === Response Coordination === # Coordinates response streaming from response nodes - self._response_coordinator = ResponseStreamCoordinator( - variable_pool=self._graph_runtime_state.variable_pool, graph=self._graph - ) - if graph_runtime_state.response_coordinator_json != "": - self._response_coordinator.loads(graph_runtime_state.response_coordinator_json) + self._response_coordinator = cast("ResponseStreamCoordinator", self._graph_runtime_state.response_coordinator) # === Event Management === # Event manager handles both collection and emission of events @@ -133,19 +126,6 @@ class GraphEngine: skip_propagator=self._skip_propagator, ) - # === Event Handler Registry === - # Central registry for handling all node execution events - self._event_handler_registry = EventHandler( - graph=self._graph, - graph_runtime_state=self._graph_runtime_state, - graph_execution=self._graph_execution, - response_coordinator=self._response_coordinator, - event_collector=self._event_manager, - edge_processor=self._edge_processor, - state_manager=self._state_manager, - error_handler=self._error_handler, - ) - # === Command Processing === # Processes external commands (e.g., abort requests) self._command_processor = CommandProcessor( @@ -153,12 +133,12 @@ class GraphEngine: graph_execution=self._graph_execution, ) - # Register abort command handler + # Register command handlers abort_handler = AbortCommandHandler() - self._command_processor.register_handler( - AbortCommand, - abort_handler, - ) + self._command_processor.register_handler(AbortCommand, abort_handler) + + pause_handler = PauseCommandHandler() + self._command_processor.register_handler(PauseCommand, pause_handler) # === Worker Pool Setup === # Capture Flask app context for worker threads @@ -191,12 +171,23 @@ class GraphEngine: self._execution_coordinator = ExecutionCoordinator( graph_execution=self._graph_execution, state_manager=self._state_manager, - event_handler=self._event_handler_registry, - event_collector=self._event_manager, command_processor=self._command_processor, worker_pool=self._worker_pool, ) + # === Event Handler Registry === + # Central registry for handling all node execution events + self._event_handler_registry = EventHandler( + graph=self._graph, + graph_runtime_state=self._graph_runtime_state, + graph_execution=self._graph_execution, + response_coordinator=self._response_coordinator, + event_collector=self._event_manager, + edge_processor=self._edge_processor, + state_manager=self._state_manager, + error_handler=self._error_handler, + ) + # Dispatches events and manages execution flow self._dispatcher = Dispatcher( event_queue=self._event_queue, @@ -237,26 +228,41 @@ class GraphEngine: # Initialize layers self._initialize_layers() - # Start execution - self._graph_execution.start() + is_resume = self._graph_execution.started + if not is_resume: + self._graph_execution.start() + else: + self._graph_execution.paused = False + self._graph_execution.pause_reason = None + start_event = GraphRunStartedEvent() + self._event_manager.notify_layers(start_event) yield start_event # Start subsystems - self._start_execution() + self._start_execution(resume=is_resume) # Yield events as they occur yield from self._event_manager.emit_events() # Handle completion - if self._graph_execution.aborted: + if self._graph_execution.is_paused: + paused_event = GraphRunPausedEvent( + reason=self._graph_execution.pause_reason, + outputs=self._graph_runtime_state.outputs, + ) + self._event_manager.notify_layers(paused_event) + yield paused_event + elif self._graph_execution.aborted: abort_reason = "Workflow execution aborted by user command" if self._graph_execution.error: abort_reason = str(self._graph_execution.error) - yield GraphRunAbortedEvent( + aborted_event = GraphRunAbortedEvent( reason=abort_reason, outputs=self._graph_runtime_state.outputs, ) + self._event_manager.notify_layers(aborted_event) + yield aborted_event elif self._graph_execution.has_error: if self._graph_execution.error: raise self._graph_execution.error @@ -264,20 +270,26 @@ class GraphEngine: outputs = self._graph_runtime_state.outputs exceptions_count = self._graph_execution.exceptions_count if exceptions_count > 0: - yield GraphRunPartialSucceededEvent( + partial_event = GraphRunPartialSucceededEvent( exceptions_count=exceptions_count, outputs=outputs, ) + self._event_manager.notify_layers(partial_event) + yield partial_event else: - yield GraphRunSucceededEvent( + succeeded_event = GraphRunSucceededEvent( outputs=outputs, ) + self._event_manager.notify_layers(succeeded_event) + yield succeeded_event except Exception as e: - yield GraphRunFailedEvent( + failed_event = GraphRunFailedEvent( error=str(e), exceptions_count=self._graph_execution.exceptions_count, ) + self._event_manager.notify_layers(failed_event) + yield failed_event raise finally: @@ -299,8 +311,12 @@ class GraphEngine: except Exception as e: logger.warning("Layer %s failed on_graph_start: %s", layer.__class__.__name__, e) - def _start_execution(self) -> None: + def _start_execution(self, *, resume: bool = False) -> None: """Start execution subsystems.""" + paused_nodes: list[str] = [] + if resume: + paused_nodes = self._graph_runtime_state.consume_paused_nodes() + # Start worker pool (it calculates initial workers internally) self._worker_pool.start() @@ -309,10 +325,15 @@ class GraphEngine: if node.execution_type == NodeExecutionType.RESPONSE: self._response_coordinator.register(node.id) - # Enqueue root node - root_node = self._graph.root_node - self._state_manager.enqueue_node(root_node.id) - self._state_manager.start_execution(root_node.id) + if not resume: + # Enqueue root node + root_node = self._graph.root_node + self._state_manager.enqueue_node(root_node.id) + self._state_manager.start_execution(root_node.id) + else: + for node_id in paused_nodes: + self._state_manager.enqueue_node(node_id) + self._state_manager.start_execution(node_id) # Start dispatcher self._dispatcher.start() diff --git a/api/core/workflow/graph_engine/layers/base.py b/api/core/workflow/graph_engine/layers/base.py index dfac49e11a..24c12c2934 100644 --- a/api/core/workflow/graph_engine/layers/base.py +++ b/api/core/workflow/graph_engine/layers/base.py @@ -7,9 +7,9 @@ intercept and respond to GraphEngine events. from abc import ABC, abstractmethod -from core.workflow.graph.graph_runtime_state_protocol import ReadOnlyGraphRuntimeState from core.workflow.graph_engine.protocols.command_channel import CommandChannel from core.workflow.graph_events import GraphEngineEvent +from core.workflow.runtime import ReadOnlyGraphRuntimeState class GraphEngineLayer(ABC): diff --git a/api/core/workflow/graph_engine/layers/execution_limits.py b/api/core/workflow/graph_engine/layers/execution_limits.py index e39af89837..a2d36d142d 100644 --- a/api/core/workflow/graph_engine/layers/execution_limits.py +++ b/api/core/workflow/graph_engine/layers/execution_limits.py @@ -10,7 +10,7 @@ When limits are exceeded, the layer automatically aborts execution. import logging import time -from enum import Enum +from enum import StrEnum from typing import final from typing_extensions import override @@ -24,7 +24,7 @@ from core.workflow.graph_events import ( from core.workflow.graph_events.node import NodeRunFailedEvent, NodeRunSucceededEvent -class LimitType(Enum): +class LimitType(StrEnum): """Types of execution limits that can be exceeded.""" STEP_LIMIT = "step_limit" diff --git a/api/core/workflow/graph_engine/layers/persistence.py b/api/core/workflow/graph_engine/layers/persistence.py new file mode 100644 index 0000000000..ecd8e12ca5 --- /dev/null +++ b/api/core/workflow/graph_engine/layers/persistence.py @@ -0,0 +1,410 @@ +"""Workflow persistence layer for GraphEngine. + +This layer mirrors the former ``WorkflowCycleManager`` responsibilities by +listening to ``GraphEngineEvent`` instances directly and persisting workflow +and node execution state via the injected repositories. + +The design keeps domain persistence concerns inside the engine thread, while +allowing presentation layers to remain read-only observers of repository +state. +""" + +from collections.abc import Mapping +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Union + +from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity +from core.ops.entities.trace_entity import TraceTaskName +from core.ops.ops_trace_manager import TraceQueueManager, TraceTask +from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID +from core.workflow.entities import WorkflowExecution, WorkflowNodeExecution +from core.workflow.enums import ( + SystemVariableKey, + WorkflowExecutionStatus, + WorkflowNodeExecutionMetadataKey, + WorkflowNodeExecutionStatus, + WorkflowType, +) +from core.workflow.graph_engine.layers.base import GraphEngineLayer +from core.workflow.graph_events import ( + GraphEngineEvent, + GraphRunAbortedEvent, + GraphRunFailedEvent, + GraphRunPartialSucceededEvent, + GraphRunPausedEvent, + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunExceptionEvent, + NodeRunFailedEvent, + NodeRunPauseRequestedEvent, + NodeRunRetryEvent, + NodeRunStartedEvent, + NodeRunSucceededEvent, +) +from core.workflow.node_events import NodeRunResult +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.workflow_entry import WorkflowEntry +from libs.datetime_utils import naive_utc_now + + +@dataclass(slots=True) +class PersistenceWorkflowInfo: + """Static workflow metadata required for persistence.""" + + workflow_id: str + workflow_type: WorkflowType + version: str + graph_data: Mapping[str, Any] + + +@dataclass(slots=True) +class _NodeRuntimeSnapshot: + """Lightweight cache to keep node metadata across event phases.""" + + node_id: str + title: str + predecessor_node_id: str | None + iteration_id: str | None + loop_id: str | None + created_at: datetime + + +class WorkflowPersistenceLayer(GraphEngineLayer): + """GraphEngine layer that persists workflow and node execution state.""" + + def __init__( + self, + *, + application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity], + workflow_info: PersistenceWorkflowInfo, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, + trace_manager: TraceQueueManager | None = None, + ) -> None: + super().__init__() + self._application_generate_entity = application_generate_entity + self._workflow_info = workflow_info + self._workflow_execution_repository = workflow_execution_repository + self._workflow_node_execution_repository = workflow_node_execution_repository + self._trace_manager = trace_manager + + self._workflow_execution: WorkflowExecution | None = None + self._node_execution_cache: dict[str, WorkflowNodeExecution] = {} + self._node_snapshots: dict[str, _NodeRuntimeSnapshot] = {} + self._node_sequence: int = 0 + + # ------------------------------------------------------------------ + # GraphEngineLayer lifecycle + # ------------------------------------------------------------------ + def on_graph_start(self) -> None: + self._workflow_execution = None + self._node_execution_cache.clear() + self._node_snapshots.clear() + self._node_sequence = 0 + + def on_event(self, event: GraphEngineEvent) -> None: + if isinstance(event, GraphRunStartedEvent): + self._handle_graph_run_started() + return + + if isinstance(event, GraphRunSucceededEvent): + self._handle_graph_run_succeeded(event) + return + + if isinstance(event, GraphRunPartialSucceededEvent): + self._handle_graph_run_partial_succeeded(event) + return + + if isinstance(event, GraphRunFailedEvent): + self._handle_graph_run_failed(event) + return + + if isinstance(event, GraphRunAbortedEvent): + self._handle_graph_run_aborted(event) + return + + if isinstance(event, GraphRunPausedEvent): + self._handle_graph_run_paused(event) + return + + if isinstance(event, NodeRunStartedEvent): + self._handle_node_started(event) + return + + if isinstance(event, NodeRunRetryEvent): + self._handle_node_retry(event) + return + + if isinstance(event, NodeRunSucceededEvent): + self._handle_node_succeeded(event) + return + + if isinstance(event, NodeRunFailedEvent): + self._handle_node_failed(event) + return + + if isinstance(event, NodeRunExceptionEvent): + self._handle_node_exception(event) + return + + if isinstance(event, NodeRunPauseRequestedEvent): + self._handle_node_pause_requested(event) + + def on_graph_end(self, error: Exception | None) -> None: + return + + # ------------------------------------------------------------------ + # Graph-level handlers + # ------------------------------------------------------------------ + def _handle_graph_run_started(self) -> None: + execution_id = self._get_execution_id() + workflow_execution = WorkflowExecution.new( + id_=execution_id, + workflow_id=self._workflow_info.workflow_id, + workflow_type=self._workflow_info.workflow_type, + workflow_version=self._workflow_info.version, + graph=self._workflow_info.graph_data, + inputs=self._prepare_workflow_inputs(), + started_at=naive_utc_now(), + ) + + self._workflow_execution_repository.save(workflow_execution) + self._workflow_execution = workflow_execution + + def _handle_graph_run_succeeded(self, event: GraphRunSucceededEvent) -> None: + execution = self._get_workflow_execution() + execution.outputs = event.outputs + execution.status = WorkflowExecutionStatus.SUCCEEDED + self._populate_completion_statistics(execution) + + self._workflow_execution_repository.save(execution) + self._enqueue_trace_task(execution) + + def _handle_graph_run_partial_succeeded(self, event: GraphRunPartialSucceededEvent) -> None: + execution = self._get_workflow_execution() + execution.outputs = event.outputs + execution.status = WorkflowExecutionStatus.PARTIAL_SUCCEEDED + execution.exceptions_count = event.exceptions_count + self._populate_completion_statistics(execution) + + self._workflow_execution_repository.save(execution) + self._enqueue_trace_task(execution) + + def _handle_graph_run_failed(self, event: GraphRunFailedEvent) -> None: + execution = self._get_workflow_execution() + execution.status = WorkflowExecutionStatus.FAILED + execution.error_message = event.error + execution.exceptions_count = event.exceptions_count + self._populate_completion_statistics(execution) + + self._fail_running_node_executions(error_message=event.error) + self._workflow_execution_repository.save(execution) + self._enqueue_trace_task(execution) + + def _handle_graph_run_aborted(self, event: GraphRunAbortedEvent) -> None: + execution = self._get_workflow_execution() + execution.status = WorkflowExecutionStatus.STOPPED + execution.error_message = event.reason or "Workflow execution aborted" + self._populate_completion_statistics(execution) + + self._fail_running_node_executions(error_message=execution.error_message or "") + self._workflow_execution_repository.save(execution) + self._enqueue_trace_task(execution) + + def _handle_graph_run_paused(self, event: GraphRunPausedEvent) -> None: + execution = self._get_workflow_execution() + execution.status = WorkflowExecutionStatus.PAUSED + execution.error_message = event.reason or "Workflow execution paused" + execution.outputs = event.outputs + self._populate_completion_statistics(execution, update_finished=False) + + self._workflow_execution_repository.save(execution) + + # ------------------------------------------------------------------ + # Node-level handlers + # ------------------------------------------------------------------ + def _handle_node_started(self, event: NodeRunStartedEvent) -> None: + execution = self._get_workflow_execution() + + metadata = { + WorkflowNodeExecutionMetadataKey.ITERATION_ID: event.in_iteration_id, + WorkflowNodeExecutionMetadataKey.LOOP_ID: event.in_loop_id, + } + + domain_execution = WorkflowNodeExecution( + id=event.id, + node_execution_id=event.id, + workflow_id=execution.workflow_id, + workflow_execution_id=execution.id_, + predecessor_node_id=event.predecessor_node_id, + index=self._next_node_sequence(), + node_id=event.node_id, + node_type=event.node_type, + title=event.node_title, + status=WorkflowNodeExecutionStatus.RUNNING, + metadata=metadata, + created_at=event.start_at, + ) + + self._node_execution_cache[event.id] = domain_execution + self._workflow_node_execution_repository.save(domain_execution) + + snapshot = _NodeRuntimeSnapshot( + node_id=event.node_id, + title=event.node_title, + predecessor_node_id=event.predecessor_node_id, + iteration_id=event.in_iteration_id, + loop_id=event.in_loop_id, + created_at=event.start_at, + ) + self._node_snapshots[event.id] = snapshot + + def _handle_node_retry(self, event: NodeRunRetryEvent) -> None: + domain_execution = self._get_node_execution(event.id) + domain_execution.status = WorkflowNodeExecutionStatus.RETRY + domain_execution.error = event.error + self._workflow_node_execution_repository.save(domain_execution) + self._workflow_node_execution_repository.save_execution_data(domain_execution) + + def _handle_node_succeeded(self, event: NodeRunSucceededEvent) -> None: + domain_execution = self._get_node_execution(event.id) + self._update_node_execution(domain_execution, event.node_run_result, WorkflowNodeExecutionStatus.SUCCEEDED) + + def _handle_node_failed(self, event: NodeRunFailedEvent) -> None: + domain_execution = self._get_node_execution(event.id) + self._update_node_execution( + domain_execution, + event.node_run_result, + WorkflowNodeExecutionStatus.FAILED, + error=event.error, + ) + + def _handle_node_exception(self, event: NodeRunExceptionEvent) -> None: + domain_execution = self._get_node_execution(event.id) + self._update_node_execution( + domain_execution, + event.node_run_result, + WorkflowNodeExecutionStatus.EXCEPTION, + error=event.error, + ) + + def _handle_node_pause_requested(self, event: NodeRunPauseRequestedEvent) -> None: + domain_execution = self._get_node_execution(event.id) + self._update_node_execution( + domain_execution, + event.node_run_result, + WorkflowNodeExecutionStatus.PAUSED, + error=event.reason, + update_outputs=False, + ) + + # ------------------------------------------------------------------ + # Helpers + # ------------------------------------------------------------------ + def _get_execution_id(self) -> str: + workflow_execution_id = self._system_variables().get(SystemVariableKey.WORKFLOW_EXECUTION_ID) + if not workflow_execution_id: + raise ValueError("workflow_execution_id must be provided in system variables for pause/resume flows") + return str(workflow_execution_id) + + def _prepare_workflow_inputs(self) -> Mapping[str, Any]: + inputs = {**self._application_generate_entity.inputs} + for field_name, value in self._system_variables().items(): + if field_name == SystemVariableKey.CONVERSATION_ID.value: + # Conversation IDs are tied to the current session; omit them so persisted + # workflow inputs stay reusable without binding future runs to this conversation. + continue + inputs[f"sys.{field_name}"] = value + handled = WorkflowEntry.handle_special_values(inputs) + return handled or {} + + def _get_workflow_execution(self) -> WorkflowExecution: + if self._workflow_execution is None: + raise ValueError("workflow execution not initialized") + return self._workflow_execution + + def _get_node_execution(self, node_execution_id: str) -> WorkflowNodeExecution: + if node_execution_id not in self._node_execution_cache: + raise ValueError(f"Node execution not found for id={node_execution_id}") + return self._node_execution_cache[node_execution_id] + + def _next_node_sequence(self) -> int: + self._node_sequence += 1 + return self._node_sequence + + def _populate_completion_statistics(self, execution: WorkflowExecution, *, update_finished: bool = True) -> None: + if update_finished: + execution.finished_at = naive_utc_now() + runtime_state = self.graph_runtime_state + if runtime_state is None: + return + execution.total_tokens = runtime_state.total_tokens + execution.total_steps = runtime_state.node_run_steps + execution.outputs = execution.outputs or runtime_state.outputs + execution.exceptions_count = runtime_state.exceptions_count + + def _update_node_execution( + self, + domain_execution: WorkflowNodeExecution, + node_result: NodeRunResult, + status: WorkflowNodeExecutionStatus, + *, + error: str | None = None, + update_outputs: bool = True, + ) -> None: + finished_at = naive_utc_now() + snapshot = self._node_snapshots.get(domain_execution.id) + start_at = snapshot.created_at if snapshot else domain_execution.created_at + domain_execution.status = status + domain_execution.finished_at = finished_at + domain_execution.elapsed_time = max((finished_at - start_at).total_seconds(), 0.0) + + if error: + domain_execution.error = error + + if update_outputs: + domain_execution.update_from_mapping( + inputs=node_result.inputs, + process_data=node_result.process_data, + outputs=node_result.outputs, + metadata=node_result.metadata, + ) + + self._workflow_node_execution_repository.save(domain_execution) + self._workflow_node_execution_repository.save_execution_data(domain_execution) + + def _fail_running_node_executions(self, *, error_message: str) -> None: + now = naive_utc_now() + for execution in self._node_execution_cache.values(): + if execution.status == WorkflowNodeExecutionStatus.RUNNING: + execution.status = WorkflowNodeExecutionStatus.FAILED + execution.error = error_message + execution.finished_at = now + execution.elapsed_time = max((now - execution.created_at).total_seconds(), 0.0) + self._workflow_node_execution_repository.save(execution) + + def _enqueue_trace_task(self, execution: WorkflowExecution) -> None: + if not self._trace_manager: + return + + conversation_id = self._system_variables().get(SystemVariableKey.CONVERSATION_ID.value) + external_trace_id = None + if isinstance(self._application_generate_entity, (WorkflowAppGenerateEntity, AdvancedChatAppGenerateEntity)): + external_trace_id = self._application_generate_entity.extras.get("external_trace_id") + + trace_task = TraceTask( + TraceTaskName.WORKFLOW_TRACE, + workflow_execution=execution, + conversation_id=conversation_id, + user_id=self._trace_manager.user_id, + external_trace_id=external_trace_id, + ) + self._trace_manager.add_trace_task(trace_task) + + def _system_variables(self) -> Mapping[str, Any]: + runtime_state = self.graph_runtime_state + if runtime_state is None: + return {} + return runtime_state.variable_pool.get_by_prefix(SYSTEM_VARIABLE_NODE_ID) diff --git a/api/core/workflow/graph_engine/manager.py b/api/core/workflow/graph_engine/manager.py index ed62209acb..f05d43d8ad 100644 --- a/api/core/workflow/graph_engine/manager.py +++ b/api/core/workflow/graph_engine/manager.py @@ -9,7 +9,7 @@ Supports stop, pause, and resume operations. from typing import final from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel -from core.workflow.graph_engine.entities.commands import AbortCommand +from core.workflow.graph_engine.entities.commands import AbortCommand, GraphEngineCommand, PauseCommand from extensions.ext_redis import redis_client @@ -20,7 +20,7 @@ class GraphEngineManager: This class provides a simple interface for controlling workflow executions by sending commands through Redis channels, without user validation. - Supports stop, pause, and resume operations. + Supports stop and pause operations. """ @staticmethod @@ -32,19 +32,29 @@ class GraphEngineManager: task_id: The task ID of the workflow to stop reason: Optional reason for stopping (defaults to "User requested stop") """ + abort_command = AbortCommand(reason=reason or "User requested stop") + GraphEngineManager._send_command(task_id, abort_command) + + @staticmethod + def send_pause_command(task_id: str, reason: str | None = None) -> None: + """Send a pause command to a running workflow.""" + + pause_command = PauseCommand(reason=reason or "User requested pause") + GraphEngineManager._send_command(task_id, pause_command) + + @staticmethod + def _send_command(task_id: str, command: GraphEngineCommand) -> None: + """Send a command to the workflow-specific Redis channel.""" + if not task_id: return - # Create Redis channel for this task channel_key = f"workflow:{task_id}:commands" channel = RedisChannel(redis_client, channel_key) - # Create and send abort command - abort_command = AbortCommand(reason=reason or "User requested stop") - try: - channel.send_command(abort_command) + channel.send_command(command) except Exception: # Silently fail if Redis is unavailable - # The legacy stop flag mechanism will still work + # The legacy control mechanisms will still work pass diff --git a/api/core/workflow/graph_engine/orchestration/dispatcher.py b/api/core/workflow/graph_engine/orchestration/dispatcher.py index a7229ce4e8..4097cead9c 100644 --- a/api/core/workflow/graph_engine/orchestration/dispatcher.py +++ b/api/core/workflow/graph_engine/orchestration/dispatcher.py @@ -8,7 +8,12 @@ import threading import time from typing import TYPE_CHECKING, final -from core.workflow.graph_events.base import GraphNodeEventBase +from core.workflow.graph_events import ( + GraphNodeEventBase, + NodeRunExceptionEvent, + NodeRunFailedEvent, + NodeRunSucceededEvent, +) from ..event_management import EventManager from .execution_coordinator import ExecutionCoordinator @@ -28,6 +33,12 @@ class Dispatcher: with timeout and completion detection. """ + _COMMAND_TRIGGER_EVENTS = ( + NodeRunSucceededEvent, + NodeRunFailedEvent, + NodeRunExceptionEvent, + ) + def __init__( self, event_queue: queue.Queue[GraphNodeEventBase], @@ -76,22 +87,37 @@ class Dispatcher: """Main dispatcher loop.""" try: while not self._stop_event.is_set(): - # Check for commands - self._execution_coordinator.check_commands() + commands_checked = False + should_check_commands = False + should_break = False - # Check for scaling - self._execution_coordinator.check_scaling() + if self._execution_coordinator.is_execution_complete(): + should_check_commands = True + should_break = True + else: + # Check for scaling + self._execution_coordinator.check_scaling() - # Process events - try: - event = self._event_queue.get(timeout=0.1) - # Route to the event handler - self._event_handler.dispatch(event) - self._event_queue.task_done() - except queue.Empty: - # Check if execution is complete - if self._execution_coordinator.is_execution_complete(): - break + # Process events + try: + event = self._event_queue.get(timeout=0.1) + # Route to the event handler + self._event_handler.dispatch(event) + should_check_commands = self._should_check_commands(event) + self._event_queue.task_done() + except queue.Empty: + # Process commands even when no new events arrive so abort requests are not missed + should_check_commands = True + time.sleep(0.1) + + if should_check_commands and not commands_checked: + self._execution_coordinator.check_commands() + commands_checked = True + + if should_break: + if not commands_checked: + self._execution_coordinator.check_commands() + break except Exception as e: logger.exception("Dispatcher error") @@ -102,3 +128,7 @@ class Dispatcher: # Signal the event emitter that execution is complete if self._event_emitter: self._event_emitter.mark_complete() + + def _should_check_commands(self, event: GraphNodeEventBase) -> bool: + """Return True if the event represents a node completion.""" + return isinstance(event, self._COMMAND_TRIGGER_EVENTS) diff --git a/api/core/workflow/graph_engine/orchestration/execution_coordinator.py b/api/core/workflow/graph_engine/orchestration/execution_coordinator.py index b35e8bb6d8..a3162de244 100644 --- a/api/core/workflow/graph_engine/orchestration/execution_coordinator.py +++ b/api/core/workflow/graph_engine/orchestration/execution_coordinator.py @@ -2,17 +2,13 @@ Execution coordinator for managing overall workflow execution. """ -from typing import TYPE_CHECKING, final +from typing import final from ..command_processing import CommandProcessor from ..domain import GraphExecution -from ..event_management import EventManager from ..graph_state_manager import GraphStateManager from ..worker_management import WorkerPool -if TYPE_CHECKING: - from ..event_management import EventHandler - @final class ExecutionCoordinator: @@ -27,8 +23,6 @@ class ExecutionCoordinator: self, graph_execution: GraphExecution, state_manager: GraphStateManager, - event_handler: "EventHandler", - event_collector: EventManager, command_processor: CommandProcessor, worker_pool: WorkerPool, ) -> None: @@ -38,15 +32,11 @@ class ExecutionCoordinator: Args: graph_execution: Graph execution aggregate state_manager: Unified state manager - event_handler: Event handler registry for processing events - event_collector: Event manager for collecting events command_processor: Processor for commands worker_pool: Pool of workers """ self._graph_execution = graph_execution self._state_manager = state_manager - self._event_handler = event_handler - self._event_collector = event_collector self._command_processor = command_processor self._worker_pool = worker_pool @@ -65,15 +55,24 @@ class ExecutionCoordinator: Returns: True if execution is complete """ - # Check if aborted or failed + # Treat paused, aborted, or failed executions as terminal states + if self._graph_execution.is_paused: + return True + if self._graph_execution.aborted or self._graph_execution.has_error: return True - # Complete if no work remains return self._state_manager.is_execution_complete() + @property + def is_paused(self) -> bool: + """Expose whether the underlying graph execution is paused.""" + return self._graph_execution.is_paused + def mark_complete(self) -> None: """Mark execution as complete.""" + if self._graph_execution.is_paused: + return if not self._graph_execution.completed: self._graph_execution.complete() @@ -85,3 +84,21 @@ class ExecutionCoordinator: error: The error that caused failure """ self._graph_execution.fail(error) + + def handle_pause_if_needed(self) -> None: + """If the execution has been paused, stop workers immediately.""" + + if not self._graph_execution.is_paused: + return + + self._worker_pool.stop() + self._state_manager.clear_executing() + + def handle_abort_if_needed(self) -> None: + """If the execution has been aborted, stop workers immediately.""" + + if not self._graph_execution.aborted: + return + + self._worker_pool.stop() + self._state_manager.clear_executing() diff --git a/api/core/workflow/graph_engine/response_coordinator/coordinator.py b/api/core/workflow/graph_engine/response_coordinator/coordinator.py index 3db40c545e..98e0ea91ef 100644 --- a/api/core/workflow/graph_engine/response_coordinator/coordinator.py +++ b/api/core/workflow/graph_engine/response_coordinator/coordinator.py @@ -14,11 +14,11 @@ from uuid import uuid4 from pydantic import BaseModel, Field -from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import NodeExecutionType, NodeState from core.workflow.graph import Graph from core.workflow.graph_events import NodeRunStreamChunkEvent, NodeRunSucceededEvent from core.workflow.nodes.base.template import TextSegment, VariableSegment +from core.workflow.runtime import VariablePool from .path import Path from .session import ResponseSession diff --git a/api/core/workflow/graph_events/__init__.py b/api/core/workflow/graph_events/__init__.py index 42a376d4ad..7a5edbb331 100644 --- a/api/core/workflow/graph_events/__init__.py +++ b/api/core/workflow/graph_events/__init__.py @@ -13,6 +13,7 @@ from .graph import ( GraphRunAbortedEvent, GraphRunFailedEvent, GraphRunPartialSucceededEvent, + GraphRunPausedEvent, GraphRunStartedEvent, GraphRunSucceededEvent, ) @@ -37,6 +38,7 @@ from .loop import ( from .node import ( NodeRunExceptionEvent, NodeRunFailedEvent, + NodeRunPauseRequestedEvent, NodeRunRetrieverResourceEvent, NodeRunRetryEvent, NodeRunStartedEvent, @@ -51,6 +53,7 @@ __all__ = [ "GraphRunAbortedEvent", "GraphRunFailedEvent", "GraphRunPartialSucceededEvent", + "GraphRunPausedEvent", "GraphRunStartedEvent", "GraphRunSucceededEvent", "NodeRunAgentLogEvent", @@ -64,6 +67,7 @@ __all__ = [ "NodeRunLoopNextEvent", "NodeRunLoopStartedEvent", "NodeRunLoopSucceededEvent", + "NodeRunPauseRequestedEvent", "NodeRunRetrieverResourceEvent", "NodeRunRetryEvent", "NodeRunStartedEvent", diff --git a/api/core/workflow/graph_events/graph.py b/api/core/workflow/graph_events/graph.py index 5d13833faa..0da962aa1c 100644 --- a/api/core/workflow/graph_events/graph.py +++ b/api/core/workflow/graph_events/graph.py @@ -8,7 +8,12 @@ class GraphRunStartedEvent(BaseGraphEvent): class GraphRunSucceededEvent(BaseGraphEvent): - outputs: dict[str, object] = Field(default_factory=dict) + """Event emitted when a run completes successfully with final outputs.""" + + outputs: dict[str, object] = Field( + default_factory=dict, + description="Final workflow outputs keyed by output selector.", + ) class GraphRunFailedEvent(BaseGraphEvent): @@ -17,12 +22,30 @@ class GraphRunFailedEvent(BaseGraphEvent): class GraphRunPartialSucceededEvent(BaseGraphEvent): + """Event emitted when a run finishes with partial success and failures.""" + exceptions_count: int = Field(..., description="exception count") - outputs: dict[str, object] = Field(default_factory=dict) + outputs: dict[str, object] = Field( + default_factory=dict, + description="Outputs that were materialised before failures occurred.", + ) class GraphRunAbortedEvent(BaseGraphEvent): """Event emitted when a graph run is aborted by user command.""" reason: str | None = Field(default=None, description="reason for abort") - outputs: dict[str, object] = Field(default_factory=dict, description="partial outputs if any") + outputs: dict[str, object] = Field( + default_factory=dict, + description="Outputs produced before the abort was requested.", + ) + + +class GraphRunPausedEvent(BaseGraphEvent): + """Event emitted when a graph run is paused by user command.""" + + reason: str | None = Field(default=None, description="reason for pause") + outputs: dict[str, object] = Field( + default_factory=dict, + description="Outputs available to the client while the run is paused.", + ) diff --git a/api/core/workflow/graph_events/node.py b/api/core/workflow/graph_events/node.py index 1d35a69c4a..b880df60d1 100644 --- a/api/core/workflow/graph_events/node.py +++ b/api/core/workflow/graph_events/node.py @@ -51,3 +51,7 @@ class NodeRunExceptionEvent(GraphNodeEventBase): class NodeRunRetryEvent(NodeRunStartedEvent): error: str = Field(..., description="error") retry_index: int = Field(..., description="which retry attempt is about to be performed") + + +class NodeRunPauseRequestedEvent(GraphNodeEventBase): + reason: str | None = Field(default=None, description="Optional pause reason") diff --git a/api/core/workflow/node_events/__init__.py b/api/core/workflow/node_events/__init__.py index c3bcda0483..f14a594c85 100644 --- a/api/core/workflow/node_events/__init__.py +++ b/api/core/workflow/node_events/__init__.py @@ -14,6 +14,7 @@ from .loop import ( ) from .node import ( ModelInvokeCompletedEvent, + PauseRequestedEvent, RunRetrieverResourceEvent, RunRetryEvent, StreamChunkEvent, @@ -33,6 +34,7 @@ __all__ = [ "ModelInvokeCompletedEvent", "NodeEventBase", "NodeRunResult", + "PauseRequestedEvent", "RunRetrieverResourceEvent", "RunRetryEvent", "StreamChunkEvent", diff --git a/api/core/workflow/node_events/node.py b/api/core/workflow/node_events/node.py index c1aeb9fe27..4fd5684436 100644 --- a/api/core/workflow/node_events/node.py +++ b/api/core/workflow/node_events/node.py @@ -20,6 +20,7 @@ class ModelInvokeCompletedEvent(NodeEventBase): usage: LLMUsage finish_reason: str | None = None reasoning_content: str | None = None + structured_output: dict | None = None class RunRetryEvent(NodeEventBase): @@ -39,3 +40,7 @@ class StreamChunkEvent(NodeEventBase): class StreamCompletedEvent(NodeEventBase): node_run_result: NodeRunResult = Field(..., description="run result") + + +class PauseRequestedEvent(NodeEventBase): + reason: str | None = Field(default=None, description="Optional pause reason") diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index a01686a4b8..626ef1df7b 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -25,7 +25,6 @@ from core.tools.entities.tool_entities import ( from core.tools.tool_manager import ToolManager from core.tools.utils.message_transformer import ToolFileMessageTransformer from core.variables.segments import ArrayFileSegment, StringSegment -from core.workflow.entities import VariablePool from core.workflow.enums import ( ErrorStrategy, NodeType, @@ -44,6 +43,7 @@ from core.workflow.nodes.agent.entities import AgentNodeData, AgentOldVersionMod from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.variable_template_parser import VariableTemplateParser +from core.workflow.runtime import VariablePool from extensions.ext_database import db from factories import file_factory from factories.agent_factory import get_plugin_agent_strategy @@ -252,7 +252,10 @@ class AgentNode(Node): if all(isinstance(v, dict) for _, v in parameters.items()): params = {} for key, param in parameters.items(): - if param.get("auto", ParamsAutoGenerated.OPEN.value) == ParamsAutoGenerated.CLOSE.value: + if param.get("auto", ParamsAutoGenerated.OPEN) in ( + ParamsAutoGenerated.CLOSE, + 0, + ): value_param = param.get("value", {}) params[key] = value_param.get("value", "") if value_param is not None else None else: @@ -266,7 +269,7 @@ class AgentNode(Node): value = cast(list[dict[str, Any]], value) tool_value = [] for tool in value: - provider_type = ToolProviderType(tool.get("type", ToolProviderType.BUILT_IN.value)) + provider_type = ToolProviderType(tool.get("type", ToolProviderType.BUILT_IN)) setting_params = tool.get("settings", {}) parameters = tool.get("parameters", {}) manual_input_params = [key for key, value in parameters.items() if value is not None] @@ -417,7 +420,7 @@ class AgentNode(Node): def _fetch_memory(self, model_instance: ModelInstance) -> TokenBufferMemory | None: # get conversation id conversation_id_variable = self.graph_runtime_state.variable_pool.get( - ["sys", SystemVariableKey.CONVERSATION_ID.value] + ["sys", SystemVariableKey.CONVERSATION_ID] ) if not isinstance(conversation_id_variable, StringSegment): return None @@ -476,7 +479,7 @@ class AgentNode(Node): if meta_version and Version(meta_version) > Version("0.0.1"): return tools else: - return [tool for tool in tools if tool.get("type") != ToolProviderType.MCP.value] + return [tool for tool in tools if tool.get("type") != ToolProviderType.MCP] def _transform_message( self, diff --git a/api/core/workflow/nodes/agent/entities.py b/api/core/workflow/nodes/agent/entities.py index ce6eb33ecc..985ee5eef2 100644 --- a/api/core/workflow/nodes/agent/entities.py +++ b/api/core/workflow/nodes/agent/entities.py @@ -26,8 +26,8 @@ class AgentNodeData(BaseNodeData): class ParamsAutoGenerated(IntEnum): - CLOSE = auto() - OPEN = auto() + CLOSE = 0 + OPEN = 1 class AgentOldVersionModelFeatures(StrEnum): diff --git a/api/core/workflow/nodes/base/__init__.py b/api/core/workflow/nodes/base/__init__.py index 8cf31dc342..f83df0e323 100644 --- a/api/core/workflow/nodes/base/__init__.py +++ b/api/core/workflow/nodes/base/__init__.py @@ -1,4 +1,5 @@ from .entities import BaseIterationNodeData, BaseIterationState, BaseLoopNodeData, BaseLoopState, BaseNodeData +from .usage_tracking_mixin import LLMUsageTrackingMixin __all__ = [ "BaseIterationNodeData", @@ -6,4 +7,5 @@ __all__ = [ "BaseLoopNodeData", "BaseLoopState", "BaseNodeData", + "LLMUsageTrackingMixin", ] diff --git a/api/core/workflow/nodes/base/entities.py b/api/core/workflow/nodes/base/entities.py index 5aef9d79cf..94b0d1d8bc 100644 --- a/api/core/workflow/nodes/base/entities.py +++ b/api/core/workflow/nodes/base/entities.py @@ -1,5 +1,6 @@ import json from abc import ABC +from builtins import type as type_ from collections.abc import Sequence from enum import StrEnum from typing import Any, Union @@ -58,10 +59,9 @@ class DefaultValue(BaseModel): raise DefaultValueTypeError(f"Invalid JSON format for value: {value}") @staticmethod - def _validate_array(value: Any, element_type: DefaultValueType) -> bool: + def _validate_array(value: Any, element_type: type_ | tuple[type_, ...]) -> bool: """Unified array type validation""" - # FIXME, type ignore here for do not find the reason mypy complain, if find the root cause, please fix it - return isinstance(value, list) and all(isinstance(x, element_type) for x in value) # type: ignore + return isinstance(value, list) and all(isinstance(x, element_type) for x in value) @staticmethod def _convert_number(value: str) -> float: diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index 41212abb0e..7f8c1eddff 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -6,7 +6,7 @@ from typing import Any, ClassVar from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import AgentNodeStrategyInit, GraphInitParams, GraphRuntimeState +from core.workflow.entities import AgentNodeStrategyInit, GraphInitParams from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeState, NodeType, WorkflowNodeExecutionStatus from core.workflow.graph_events import ( GraphNodeEventBase, @@ -20,6 +20,7 @@ from core.workflow.graph_events import ( NodeRunLoopNextEvent, NodeRunLoopStartedEvent, NodeRunLoopSucceededEvent, + NodeRunPauseRequestedEvent, NodeRunRetrieverResourceEvent, NodeRunStartedEvent, NodeRunStreamChunkEvent, @@ -37,10 +38,12 @@ from core.workflow.node_events import ( LoopSucceededEvent, NodeEventBase, NodeRunResult, + PauseRequestedEvent, RunRetrieverResourceEvent, StreamChunkEvent, StreamCompletedEvent, ) +from core.workflow.runtime import GraphRuntimeState from libs.datetime_utils import naive_utc_now from models.enums import UserFrom @@ -385,6 +388,16 @@ class Node: f"Node {self._node_id} does not support status {event.node_run_result.status}" ) + @_dispatch.register + def _(self, event: PauseRequestedEvent) -> NodeRunPauseRequestedEvent: + return NodeRunPauseRequestedEvent( + id=self._node_execution_id, + node_id=self._node_id, + node_type=self.node_type, + node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.PAUSED), + reason=event.reason, + ) + @_dispatch.register def _(self, event: AgentLogEvent) -> NodeRunAgentLogEvent: return NodeRunAgentLogEvent( diff --git a/api/core/workflow/nodes/base/usage_tracking_mixin.py b/api/core/workflow/nodes/base/usage_tracking_mixin.py new file mode 100644 index 0000000000..d9a0ef8972 --- /dev/null +++ b/api/core/workflow/nodes/base/usage_tracking_mixin.py @@ -0,0 +1,28 @@ +from core.model_runtime.entities.llm_entities import LLMUsage +from core.workflow.runtime import GraphRuntimeState + + +class LLMUsageTrackingMixin: + """Provides shared helpers for merging and recording LLM usage within workflow nodes.""" + + graph_runtime_state: GraphRuntimeState + + @staticmethod + def _merge_usage(current: LLMUsage, new_usage: LLMUsage | None) -> LLMUsage: + """Return a combined usage snapshot, preserving zero-value inputs.""" + if new_usage is None or new_usage.total_tokens <= 0: + return current + if current.total_tokens == 0: + return new_usage + return current.plus(new_usage) + + def _accumulate_usage(self, usage: LLMUsage) -> None: + """Push usage into the graph runtime accumulator for downstream reporting.""" + if usage.total_tokens <= 0: + return + + current_usage = self.graph_runtime_state.llm_usage + if current_usage.total_tokens == 0: + self.graph_runtime_state.llm_usage = usage.model_copy() + else: + self.graph_runtime_state.llm_usage = current_usage.plus(usage) diff --git a/api/core/workflow/nodes/datasource/datasource_node.py b/api/core/workflow/nodes/datasource/datasource_node.py index 937f4c944f..34c1db9468 100644 --- a/api/core/workflow/nodes/datasource/datasource_node.py +++ b/api/core/workflow/nodes/datasource/datasource_node.py @@ -19,7 +19,6 @@ from core.file.enums import FileTransferMethod, FileType from core.plugin.impl.exc import PluginDaemonClientSideError from core.variables.segments import ArrayAnySegment from core.variables.variables import ArrayAnyVariable -from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType, SystemVariableKey from core.workflow.node_events import NodeRunResult, StreamChunkEvent, StreamCompletedEvent @@ -27,6 +26,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.variable_template_parser import VariableTemplateParser from core.workflow.nodes.tool.exc import ToolFileError +from core.workflow.runtime import VariablePool from extensions.ext_database import db from factories import file_factory from models.model import UploadFile @@ -75,11 +75,11 @@ class DatasourceNode(Node): node_data = self._node_data variable_pool = self.graph_runtime_state.variable_pool - datasource_type_segement = variable_pool.get(["sys", SystemVariableKey.DATASOURCE_TYPE.value]) + datasource_type_segement = variable_pool.get(["sys", SystemVariableKey.DATASOURCE_TYPE]) if not datasource_type_segement: raise DatasourceNodeError("Datasource type is not set") datasource_type = str(datasource_type_segement.value) if datasource_type_segement.value else None - datasource_info_segement = variable_pool.get(["sys", SystemVariableKey.DATASOURCE_INFO.value]) + datasource_info_segement = variable_pool.get(["sys", SystemVariableKey.DATASOURCE_INFO]) if not datasource_info_segement: raise DatasourceNodeError("Datasource info is not set") datasource_info_value = datasource_info_segement.value @@ -267,7 +267,7 @@ class DatasourceNode(Node): return result def _fetch_files(self, variable_pool: VariablePool) -> list[File]: - variable = variable_pool.get(["sys", SystemVariableKey.FILES.value]) + variable = variable_pool.get(["sys", SystemVariableKey.FILES]) assert isinstance(variable, ArrayAnyVariable | ArrayAnySegment) return list(variable.value) if variable else [] diff --git a/api/core/workflow/nodes/document_extractor/node.py b/api/core/workflow/nodes/document_extractor/node.py index ae1061d72c..12cd7e2bd9 100644 --- a/api/core/workflow/nodes/document_extractor/node.py +++ b/api/core/workflow/nodes/document_extractor/node.py @@ -10,10 +10,10 @@ from typing import Any import chardet import docx import pandas as pd -import pypandoc # type: ignore -import pypdfium2 # type: ignore -import webvtt # type: ignore -import yaml # type: ignore +import pypandoc +import pypdfium2 +import webvtt +import yaml from docx.document import Document from docx.oxml.table import CT_Tbl from docx.oxml.text.paragraph import CT_P @@ -171,6 +171,7 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str) ".txt" | ".markdown" | ".md" + | ".mdx" | ".html" | ".htm" | ".xml" diff --git a/api/core/workflow/nodes/end/end_node.py b/api/core/workflow/nodes/end/end_node.py index 2bdfe4efce..7ec74084d0 100644 --- a/api/core/workflow/nodes/end/end_node.py +++ b/api/core/workflow/nodes/end/end_node.py @@ -16,7 +16,7 @@ class EndNode(Node): _node_data: EndNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = EndNodeData(**data) + self._node_data = EndNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index c47ffb5ab0..7b5b9c9e86 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -15,7 +15,7 @@ from core.file import file_manager from core.file.enums import FileTransferMethod from core.helper import ssrf_proxy from core.variables.segments import ArrayFileSegment, FileSegment -from core.workflow.entities import VariablePool +from core.workflow.runtime import VariablePool from .entities import ( HttpRequestNodeAuthorization, @@ -87,7 +87,7 @@ class Executor: node_data.authorization.config.api_key ).text - self.url: str = node_data.url + self.url = node_data.url self.method = node_data.method self.auth = node_data.authorization self.timeout = timeout @@ -349,11 +349,10 @@ class Executor: "timeout": (self.timeout.connect, self.timeout.read, self.timeout.write), "ssl_verify": self.ssl_verify, "follow_redirects": True, - "max_retries": self.max_retries, } # request_args = {k: v for k, v in request_args.items() if v is not None} try: - response: httpx.Response = _METHOD_MAP[method_lc](**request_args) + response: httpx.Response = _METHOD_MAP[method_lc](**request_args, max_retries=self.max_retries) except (ssrf_proxy.MaxRetriesExceededError, httpx.RequestError) as e: raise HttpRequestNodeError(str(e)) from e # FIXME: fix type ignore, this maybe httpx type issue diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 826820a8e3..55dec3fb08 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -165,6 +165,8 @@ class HttpRequestNode(Node): body_type = typed_node_data.body.type data = typed_node_data.body.data match body_type: + case "none": + pass case "binary": if len(data) != 1: raise RequestBodyError("invalid body data, should have only one item") @@ -232,7 +234,7 @@ class HttpRequestNode(Node): mapping = { "tool_file_id": tool_file.id, - "transfer_method": FileTransferMethod.TOOL_FILE.value, + "transfer_method": FileTransferMethod.TOOL_FILE, } file = file_factory.build_from_mapping( mapping=mapping, diff --git a/api/core/workflow/nodes/human_input/__init__.py b/api/core/workflow/nodes/human_input/__init__.py new file mode 100644 index 0000000000..379440557c --- /dev/null +++ b/api/core/workflow/nodes/human_input/__init__.py @@ -0,0 +1,3 @@ +from .human_input_node import HumanInputNode + +__all__ = ["HumanInputNode"] diff --git a/api/core/workflow/nodes/human_input/entities.py b/api/core/workflow/nodes/human_input/entities.py new file mode 100644 index 0000000000..02913d93c3 --- /dev/null +++ b/api/core/workflow/nodes/human_input/entities.py @@ -0,0 +1,10 @@ +from pydantic import Field + +from core.workflow.nodes.base import BaseNodeData + + +class HumanInputNodeData(BaseNodeData): + """Configuration schema for the HumanInput node.""" + + required_variables: list[str] = Field(default_factory=list) + pause_reason: str | None = Field(default=None) diff --git a/api/core/workflow/nodes/human_input/human_input_node.py b/api/core/workflow/nodes/human_input/human_input_node.py new file mode 100644 index 0000000000..e49f9a8c81 --- /dev/null +++ b/api/core/workflow/nodes/human_input/human_input_node.py @@ -0,0 +1,132 @@ +from collections.abc import Mapping +from typing import Any + +from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType, WorkflowNodeExecutionStatus +from core.workflow.node_events import NodeRunResult, PauseRequestedEvent +from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig +from core.workflow.nodes.base.node import Node + +from .entities import HumanInputNodeData + + +class HumanInputNode(Node): + node_type = NodeType.HUMAN_INPUT + execution_type = NodeExecutionType.BRANCH + + _BRANCH_SELECTION_KEYS: tuple[str, ...] = ( + "edge_source_handle", + "edgeSourceHandle", + "source_handle", + "selected_branch", + "selectedBranch", + "branch", + "branch_id", + "branchId", + "handle", + ) + + _node_data: HumanInputNodeData + + def init_node_data(self, data: Mapping[str, Any]) -> None: + self._node_data = HumanInputNodeData(**data) + + def get_base_node_data(self) -> BaseNodeData: + return self._node_data + + @classmethod + def version(cls) -> str: + return "1" + + def _get_error_strategy(self) -> ErrorStrategy | None: + return self._node_data.error_strategy + + def _get_retry_config(self) -> RetryConfig: + return self._node_data.retry_config + + def _get_title(self) -> str: + return self._node_data.title + + def _get_description(self) -> str | None: + return self._node_data.desc + + def _get_default_value_dict(self) -> dict[str, Any]: + return self._node_data.default_value_dict + + def _run(self): # type: ignore[override] + if self._is_completion_ready(): + branch_handle = self._resolve_branch_selection() + return NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + outputs={}, + edge_source_handle=branch_handle or "source", + ) + + return self._pause_generator() + + def _pause_generator(self): + yield PauseRequestedEvent(reason=self._node_data.pause_reason) + + def _is_completion_ready(self) -> bool: + """Determine whether all required inputs are satisfied.""" + + if not self._node_data.required_variables: + return False + + variable_pool = self.graph_runtime_state.variable_pool + + for selector_str in self._node_data.required_variables: + parts = selector_str.split(".") + if len(parts) != 2: + return False + segment = variable_pool.get(parts) + if segment is None: + return False + + return True + + def _resolve_branch_selection(self) -> str | None: + """Determine the branch handle selected by human input if available.""" + + variable_pool = self.graph_runtime_state.variable_pool + + for key in self._BRANCH_SELECTION_KEYS: + handle = self._extract_branch_handle(variable_pool.get((self.id, key))) + if handle: + return handle + + default_values = self._node_data.default_value_dict + for key in self._BRANCH_SELECTION_KEYS: + handle = self._normalize_branch_value(default_values.get(key)) + if handle: + return handle + + return None + + @staticmethod + def _extract_branch_handle(segment: Any) -> str | None: + if segment is None: + return None + + candidate = getattr(segment, "to_object", None) + raw_value = candidate() if callable(candidate) else getattr(segment, "value", None) + if raw_value is None: + return None + + return HumanInputNode._normalize_branch_value(raw_value) + + @staticmethod + def _normalize_branch_value(value: Any) -> str | None: + if value is None: + return None + + if isinstance(value, str): + stripped = value.strip() + return stripped or None + + if isinstance(value, Mapping): + for key in ("handle", "edge_source_handle", "edgeSourceHandle", "branch", "id", "value"): + candidate = value.get(key) + if isinstance(candidate, str) and candidate: + return candidate + + return None diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index 075f6f8444..165e529714 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -3,12 +3,12 @@ from typing import Any, Literal from typing_extensions import deprecated -from core.workflow.entities import VariablePool from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.if_else.entities import IfElseNodeData +from core.workflow.runtime import VariablePool from core.workflow.utils.condition.entities import Condition from core.workflow.utils.condition.processor import ConditionProcessor @@ -83,7 +83,7 @@ class IfElseNode(Node): else: # TODO: Update database then remove this # Fallback to old structure if cases are not defined - input_conditions, group_result, final_result = _should_not_use_old_function( # ty: ignore [deprecated] + input_conditions, group_result, final_result = _should_not_use_old_function( # pyright: ignore [reportDeprecated] condition_processor=condition_processor, variable_pool=self.graph_runtime_state.variable_pool, conditions=self._node_data.conditions or [], diff --git a/api/core/workflow/nodes/iteration/entities.py b/api/core/workflow/nodes/iteration/entities.py index ed4ab2c11c..63a41ec755 100644 --- a/api/core/workflow/nodes/iteration/entities.py +++ b/api/core/workflow/nodes/iteration/entities.py @@ -23,6 +23,7 @@ class IterationNodeData(BaseIterationNodeData): is_parallel: bool = False # open the parallel mode or not parallel_nums: int = 10 # the numbers of parallel error_handle_mode: ErrorHandleMode = ErrorHandleMode.TERMINATED # how to handle the error + flatten_output: bool = True # whether to flatten the output array if all elements are lists class IterationStartNodeData(BaseNodeData): diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 1a417b5739..ce83352dcb 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -8,9 +8,11 @@ from typing import TYPE_CHECKING, Any, NewType, cast from flask import Flask, current_app from typing_extensions import TypeIs +from core.model_runtime.entities.llm_entities import LLMUsage from core.variables import IntegerVariable, NoneSegment from core.variables.segments import ArrayAnySegment, ArraySegment -from core.workflow.entities import VariablePool +from core.variables.variables import VariableUnion +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID from core.workflow.enums import ( ErrorStrategy, NodeExecutionType, @@ -33,9 +35,11 @@ from core.workflow.node_events import ( NodeRunResult, StreamCompletedEvent, ) +from core.workflow.nodes.base import LLMUsageTrackingMixin from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData +from core.workflow.runtime import VariablePool from libs.datetime_utils import naive_utc_now from libs.flask_utils import preserve_flask_contexts @@ -56,7 +60,7 @@ logger = logging.getLogger(__name__) EmptyArraySegment = NewType("EmptyArraySegment", ArraySegment) -class IterationNode(Node): +class IterationNode(LLMUsageTrackingMixin, Node): """ Iteration Node. """ @@ -93,7 +97,8 @@ class IterationNode(Node): "config": { "is_parallel": False, "parallel_nums": 10, - "error_handle_mode": ErrorHandleMode.TERMINATED.value, + "error_handle_mode": ErrorHandleMode.TERMINATED, + "flatten_output": True, }, } @@ -116,6 +121,7 @@ class IterationNode(Node): started_at = naive_utc_now() iter_run_map: dict[str, float] = {} outputs: list[object] = [] + usage_accumulator = [LLMUsage.empty_usage()] yield IterationStartedEvent( start_at=started_at, @@ -128,22 +134,27 @@ class IterationNode(Node): iterator_list_value=iterator_list_value, outputs=outputs, iter_run_map=iter_run_map, + usage_accumulator=usage_accumulator, ) + self._accumulate_usage(usage_accumulator[0]) yield from self._handle_iteration_success( started_at=started_at, inputs=inputs, outputs=outputs, iterator_list_value=iterator_list_value, iter_run_map=iter_run_map, + usage=usage_accumulator[0], ) except IterationNodeError as e: + self._accumulate_usage(usage_accumulator[0]) yield from self._handle_iteration_failure( started_at=started_at, inputs=inputs, outputs=outputs, iterator_list_value=iterator_list_value, iter_run_map=iter_run_map, + usage=usage_accumulator[0], error=e, ) @@ -194,6 +205,7 @@ class IterationNode(Node): iterator_list_value: Sequence[object], outputs: list[object], iter_run_map: dict[str, float], + usage_accumulator: list[LLMUsage], ) -> Generator[GraphNodeEventBase | NodeEventBase, None, None]: if self._node_data.is_parallel: # Parallel mode execution @@ -201,6 +213,7 @@ class IterationNode(Node): iterator_list_value=iterator_list_value, outputs=outputs, iter_run_map=iter_run_map, + usage_accumulator=usage_accumulator, ) else: # Sequential mode execution @@ -217,8 +230,18 @@ class IterationNode(Node): graph_engine=graph_engine, ) + # Sync conversation variables after each iteration completes + self._sync_conversation_variables_from_snapshot( + self._extract_conversation_variable_snapshot( + variable_pool=graph_engine.graph_runtime_state.variable_pool + ) + ) + # Update the total tokens from this iteration self.graph_runtime_state.total_tokens += graph_engine.graph_runtime_state.total_tokens + usage_accumulator[0] = self._merge_usage( + usage_accumulator[0], graph_engine.graph_runtime_state.llm_usage + ) iter_run_map[str(index)] = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() def _execute_parallel_iterations( @@ -226,6 +249,7 @@ class IterationNode(Node): iterator_list_value: Sequence[object], outputs: list[object], iter_run_map: dict[str, float], + usage_accumulator: list[LLMUsage], ) -> Generator[GraphNodeEventBase | NodeEventBase, None, None]: # Initialize outputs list with None values to maintain order outputs.extend([None] * len(iterator_list_value)) @@ -235,7 +259,19 @@ class IterationNode(Node): with ThreadPoolExecutor(max_workers=max_workers) as executor: # Submit all iteration tasks - future_to_index: dict[Future[tuple[datetime, list[GraphNodeEventBase], object | None, int]], int] = {} + future_to_index: dict[ + Future[ + tuple[ + datetime, + list[GraphNodeEventBase], + object | None, + int, + dict[str, VariableUnion], + LLMUsage, + ] + ], + int, + ] = {} for index, item in enumerate(iterator_list_value): yield IterationNextEvent(index=index) future = executor.submit( @@ -252,7 +288,14 @@ class IterationNode(Node): index = future_to_index[future] try: result = future.result() - iter_start_at, events, output_value, tokens_used = result + ( + iter_start_at, + events, + output_value, + tokens_used, + conversation_snapshot, + iteration_usage, + ) = result # Update outputs at the correct index outputs[index] = output_value @@ -264,6 +307,11 @@ class IterationNode(Node): self.graph_runtime_state.total_tokens += tokens_used iter_run_map[str(index)] = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() + usage_accumulator[0] = self._merge_usage(usage_accumulator[0], iteration_usage) + + # Sync conversation variables after iteration completion + self._sync_conversation_variables_from_snapshot(conversation_snapshot) + except Exception as e: # Handle errors based on error_handle_mode match self._node_data.error_handle_mode: @@ -288,7 +336,7 @@ class IterationNode(Node): item: object, flask_app: Flask, context_vars: contextvars.Context, - ) -> tuple[datetime, list[GraphNodeEventBase], object | None, int]: + ) -> tuple[datetime, list[GraphNodeEventBase], object | None, int, dict[str, VariableUnion], LLMUsage]: """Execute a single iteration in parallel mode and return results.""" with preserve_flask_contexts(flask_app=flask_app, context_vars=context_vars): iter_start_at = datetime.now(UTC).replace(tzinfo=None) @@ -307,8 +355,18 @@ class IterationNode(Node): # Get the output value from the temporary outputs list output_value = outputs_temp[0] if outputs_temp else None + conversation_snapshot = self._extract_conversation_variable_snapshot( + variable_pool=graph_engine.graph_runtime_state.variable_pool + ) - return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens + return ( + iter_start_at, + events, + output_value, + graph_engine.graph_runtime_state.total_tokens, + conversation_snapshot, + graph_engine.graph_runtime_state.llm_usage, + ) def _handle_iteration_success( self, @@ -317,14 +375,21 @@ class IterationNode(Node): outputs: list[object], iterator_list_value: Sequence[object], iter_run_map: dict[str, float], + *, + usage: LLMUsage, ) -> Generator[NodeEventBase, None, None]: + # Flatten the list of lists if all outputs are lists + flattened_outputs = self._flatten_outputs_if_needed(outputs) + yield IterationSucceededEvent( start_at=started_at, inputs=inputs, - outputs={"output": outputs}, + outputs={"output": flattened_outputs}, steps=len(iterator_list_value), metadata={ - WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: usage.currency, WorkflowNodeExecutionMetadataKey.ITERATION_DURATION_MAP: iter_run_map, }, ) @@ -333,13 +398,49 @@ class IterationNode(Node): yield StreamCompletedEvent( node_run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={"output": outputs}, + outputs={"output": flattened_outputs}, metadata={ - WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: usage.currency, }, + llm_usage=usage, ) ) + def _flatten_outputs_if_needed(self, outputs: list[object]) -> list[object]: + """ + Flatten the outputs list if all elements are lists. + This maintains backward compatibility with version 1.8.1 behavior. + + If flatten_output is False, returns outputs as-is (nested structure). + If flatten_output is True (default), flattens the list if all elements are lists. + """ + # If flatten_output is disabled, return outputs as-is + if not self._node_data.flatten_output: + return outputs + + if not outputs: + return outputs + + # Check if all non-None outputs are lists + non_none_outputs = [output for output in outputs if output is not None] + if not non_none_outputs: + return outputs + + if all(isinstance(output, list) for output in non_none_outputs): + # Flatten the list of lists + flattened: list[Any] = [] + for output in outputs: + if isinstance(output, list): + flattened.extend(output) + elif output is not None: + # This shouldn't happen based on our check, but handle it gracefully + flattened.append(output) + return flattened + + return outputs + def _handle_iteration_failure( self, started_at: datetime, @@ -347,15 +448,22 @@ class IterationNode(Node): outputs: list[object], iterator_list_value: Sequence[object], iter_run_map: dict[str, float], + *, + usage: LLMUsage, error: IterationNodeError, ) -> Generator[NodeEventBase, None, None]: + # Flatten the list of lists if all outputs are lists (even in failure case) + flattened_outputs = self._flatten_outputs_if_needed(outputs) + yield IterationFailedEvent( start_at=started_at, inputs=inputs, - outputs={"output": outputs}, + outputs={"output": flattened_outputs}, steps=len(iterator_list_value), metadata={ - WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: usage.currency, WorkflowNodeExecutionMetadataKey.ITERATION_DURATION_MAP: iter_run_map, }, error=str(error), @@ -364,6 +472,12 @@ class IterationNode(Node): node_run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, error=str(error), + metadata={ + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: usage.currency, + }, + llm_usage=usage, ) ) @@ -430,6 +544,23 @@ class IterationNode(Node): return variable_mapping + def _extract_conversation_variable_snapshot(self, *, variable_pool: VariablePool) -> dict[str, VariableUnion]: + conversation_variables = variable_pool.variable_dictionary.get(CONVERSATION_VARIABLE_NODE_ID, {}) + return {name: variable.model_copy(deep=True) for name, variable in conversation_variables.items()} + + def _sync_conversation_variables_from_snapshot(self, snapshot: dict[str, VariableUnion]) -> None: + parent_pool = self.graph_runtime_state.variable_pool + parent_conversations = parent_pool.variable_dictionary.get(CONVERSATION_VARIABLE_NODE_ID, {}) + + current_keys = set(parent_conversations.keys()) + snapshot_keys = set(snapshot.keys()) + + for removed_key in current_keys - snapshot_keys: + parent_pool.remove((CONVERSATION_VARIABLE_NODE_ID, removed_key)) + + for name, variable in snapshot.items(): + parent_pool.add((CONVERSATION_VARIABLE_NODE_ID, name), variable) + def _append_iteration_info_to_event( self, event: GraphNodeEventBase, @@ -484,11 +615,12 @@ class IterationNode(Node): def _create_graph_engine(self, index: int, item: object): # Import dependencies - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel from core.workflow.nodes.node_factory import DifyNodeFactory + from core.workflow.runtime import GraphRuntimeState # Create GraphInitParams from node attributes graph_init_params = GraphInitParams( diff --git a/api/core/workflow/nodes/iteration/iteration_start_node.py b/api/core/workflow/nodes/iteration/iteration_start_node.py index 80f39ccebc..90b7f4539b 100644 --- a/api/core/workflow/nodes/iteration/iteration_start_node.py +++ b/api/core/workflow/nodes/iteration/iteration_start_node.py @@ -18,7 +18,7 @@ class IterationStartNode(Node): _node_data: IterationStartNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = IterationStartNodeData(**data) + self._node_data = IterationStartNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/knowledge_index/entities.py b/api/core/workflow/nodes/knowledge_index/entities.py index c79373afd5..3daca90b9b 100644 --- a/api/core/workflow/nodes/knowledge_index/entities.py +++ b/api/core/workflow/nodes/knowledge_index/entities.py @@ -2,6 +2,7 @@ from typing import Literal, Union from pydantic import BaseModel +from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.workflow.nodes.base import BaseNodeData @@ -63,7 +64,7 @@ class RetrievalSetting(BaseModel): Retrieval Setting. """ - search_method: Literal["semantic_search", "keyword_search", "full_text_search", "hybrid_search"] + search_method: RetrievalMethod top_k: int score_threshold: float | None = 0.5 score_threshold_enabled: bool = False diff --git a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py index 4b6bad1aa3..2ba1e5e1c5 100644 --- a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py +++ b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py @@ -2,20 +2,20 @@ import datetime import logging import time from collections.abc import Mapping -from typing import Any, cast +from typing import Any from sqlalchemy import func, select from core.app.entities.app_invoke_entities import InvokeFrom from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from core.rag.retrieval.retrieval_methods import RetrievalMethod -from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType, SystemVariableKey from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.template import Template +from core.workflow.runtime import VariablePool from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment @@ -27,7 +27,7 @@ from .exc import ( logger = logging.getLogger(__name__) default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 2, @@ -62,7 +62,7 @@ class KnowledgeIndexNode(Node): return self._node_data def _run(self) -> NodeRunResult: # type: ignore - node_data = cast(KnowledgeIndexNodeData, self._node_data) + node_data = self._node_data variable_pool = self.graph_runtime_state.variable_pool dataset_id = variable_pool.get(["sys", SystemVariableKey.DATASET_ID]) if not dataset_id: @@ -77,7 +77,7 @@ class KnowledgeIndexNode(Node): raise KnowledgeIndexNodeError("Index chunk variable is required.") invoke_from = variable_pool.get(["sys", SystemVariableKey.INVOKE_FROM]) if invoke_from: - is_preview = invoke_from.value == InvokeFrom.DEBUGGER.value + is_preview = invoke_from.value == InvokeFrom.DEBUGGER else: is_preview = False chunks = variable.value @@ -136,6 +136,11 @@ class KnowledgeIndexNode(Node): document = db.session.query(Document).filter_by(id=document_id.value).first() if not document: raise KnowledgeIndexNodeError(f"Document {document_id.value} not found.") + doc_id_value = document.id + ds_id_value = dataset.id + dataset_name_value = dataset.name + document_name_value = document.name + created_at_value = document.created_at # chunk nodes by chunk size indexing_start_at = time.perf_counter() index_processor = IndexProcessorFactory(dataset.chunk_structure).init_index_processor() @@ -161,16 +166,16 @@ class KnowledgeIndexNode(Node): document.word_count = ( db.session.query(func.sum(DocumentSegment.word_count)) .where( - DocumentSegment.document_id == document.id, - DocumentSegment.dataset_id == dataset.id, + DocumentSegment.document_id == doc_id_value, + DocumentSegment.dataset_id == ds_id_value, ) .scalar() ) db.session.add(document) # update document segment status db.session.query(DocumentSegment).where( - DocumentSegment.document_id == document.id, - DocumentSegment.dataset_id == dataset.id, + DocumentSegment.document_id == doc_id_value, + DocumentSegment.dataset_id == ds_id_value, ).update( { DocumentSegment.status: "completed", @@ -182,13 +187,13 @@ class KnowledgeIndexNode(Node): db.session.commit() return { - "dataset_id": dataset.id, - "dataset_name": dataset.name, + "dataset_id": ds_id_value, + "dataset_name": dataset_name_value, "batch": batch.value, - "document_id": document.id, - "document_name": document.name, - "created_at": document.created_at.timestamp(), - "display_status": document.indexing_status, + "document_id": doc_id_value, + "document_name": document_name_value, + "created_at": created_at_value.timestamp(), + "display_status": "completed", } def _get_preview_output(self, chunk_structure: str, chunks: Any) -> Mapping[str, Any]: diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 1afb2e05b9..4a63900527 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -15,14 +15,11 @@ from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEnti from core.entities.agent_entities import PlanningStrategy from core.entities.model_entities import ModelStatus from core.model_manager import ModelInstance, ModelManager -from core.model_runtime.entities.message_entities import ( - PromptMessageRole, -) -from core.model_runtime.entities.model_entities import ( - ModelFeature, - ModelType, -) +from core.model_runtime.entities.llm_entities import LLMUsage +from core.model_runtime.entities.message_entities import PromptMessageRole +from core.model_runtime.entities.model_entities import ModelFeature, ModelType from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel +from core.model_runtime.utils.encoders import jsonable_encoder from core.prompt.simple_prompt_transform import ModelMode from core.rag.datasource.retrieval_service import RetrievalService from core.rag.entities.metadata_entities import Condition, MetadataCondition @@ -33,8 +30,14 @@ from core.variables import ( ) from core.variables.segments import ArrayObjectSegment from core.workflow.entities import GraphInitParams -from core.workflow.enums import ErrorStrategy, NodeType, WorkflowNodeExecutionStatus +from core.workflow.enums import ( + ErrorStrategy, + NodeType, + WorkflowNodeExecutionMetadataKey, + WorkflowNodeExecutionStatus, +) from core.workflow.node_events import ModelInvokeCompletedEvent, NodeRunResult +from core.workflow.nodes.base import LLMUsageTrackingMixin from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.knowledge_retrieval.template_prompts import ( @@ -67,12 +70,12 @@ from .exc import ( if TYPE_CHECKING: from core.file.models import File - from core.workflow.entities import GraphRuntimeState + from core.workflow.runtime import GraphRuntimeState logger = logging.getLogger(__name__) default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -80,7 +83,7 @@ default_retrieval_model = { } -class KnowledgeRetrievalNode(Node): +class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node): node_type = NodeType.KNOWLEDGE_RETRIEVAL _node_data: KnowledgeRetrievalNodeData @@ -107,7 +110,7 @@ class KnowledgeRetrievalNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( @@ -141,7 +144,7 @@ class KnowledgeRetrievalNode(Node): def version(cls): return "1" - def _run(self) -> NodeRunResult: # type: ignore + def _run(self) -> NodeRunResult: # extract variables variable = self.graph_runtime_state.variable_pool.get(self._node_data.query_variable_selector) if not isinstance(variable, StringSegment): @@ -182,14 +185,21 @@ class KnowledgeRetrievalNode(Node): ) # retrieve knowledge + usage = LLMUsage.empty_usage() try: - results = self._fetch_dataset_retriever(node_data=self._node_data, query=query) + results, usage = self._fetch_dataset_retriever(node_data=self._node_data, query=query) outputs = {"result": ArrayObjectSegment(value=results)} return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, - process_data={}, + process_data={"usage": jsonable_encoder(usage)}, outputs=outputs, # type: ignore + metadata={ + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: usage.currency, + }, + llm_usage=usage, ) except KnowledgeRetrievalNodeError as e: @@ -199,6 +209,7 @@ class KnowledgeRetrievalNode(Node): inputs=variables, error=str(e), error_type=type(e).__name__, + llm_usage=usage, ) # Temporary handle all exceptions from DatasetRetrieval class here. except Exception as e: @@ -207,11 +218,15 @@ class KnowledgeRetrievalNode(Node): inputs=variables, error=str(e), error_type=type(e).__name__, + llm_usage=usage, ) finally: db.session.close() - def _fetch_dataset_retriever(self, node_data: KnowledgeRetrievalNodeData, query: str) -> list[dict[str, Any]]: + def _fetch_dataset_retriever( + self, node_data: KnowledgeRetrievalNodeData, query: str + ) -> tuple[list[dict[str, Any]], LLMUsage]: + usage = LLMUsage.empty_usage() available_datasets = [] dataset_ids = node_data.dataset_ids @@ -245,9 +260,10 @@ class KnowledgeRetrievalNode(Node): if not dataset: continue available_datasets.append(dataset) - metadata_filter_document_ids, metadata_condition = self._get_metadata_filter_condition( + metadata_filter_document_ids, metadata_condition, metadata_usage = self._get_metadata_filter_condition( [dataset.id for dataset in available_datasets], query, node_data ) + usage = self._merge_usage(usage, metadata_usage) all_documents = [] dataset_retrieval = DatasetRetrieval() if node_data.retrieval_mode == DatasetRetrieveConfigEntity.RetrieveStrategy.SINGLE: @@ -330,6 +346,8 @@ class KnowledgeRetrievalNode(Node): metadata_filter_document_ids=metadata_filter_document_ids, metadata_condition=metadata_condition, ) + usage = self._merge_usage(usage, dataset_retrieval.llm_usage) + dify_documents = [item for item in all_documents if item.provider == "dify"] external_documents = [item for item in all_documents if item.provider == "external"] retrieval_resource_list = [] @@ -406,11 +424,12 @@ class KnowledgeRetrievalNode(Node): ) for position, item in enumerate(retrieval_resource_list, start=1): item["metadata"]["position"] = position - return retrieval_resource_list + return retrieval_resource_list, usage def _get_metadata_filter_condition( self, dataset_ids: list, query: str, node_data: KnowledgeRetrievalNodeData - ) -> tuple[dict[str, list[str]] | None, MetadataCondition | None]: + ) -> tuple[dict[str, list[str]] | None, MetadataCondition | None, LLMUsage]: + usage = LLMUsage.empty_usage() document_query = db.session.query(Document).where( Document.dataset_id.in_(dataset_ids), Document.indexing_status == "completed", @@ -420,9 +439,12 @@ class KnowledgeRetrievalNode(Node): filters: list[Any] = [] metadata_condition = None if node_data.metadata_filtering_mode == "disabled": - return None, None + return None, None, usage elif node_data.metadata_filtering_mode == "automatic": - automatic_metadata_filters = self._automatic_metadata_filter_func(dataset_ids, query, node_data) + automatic_metadata_filters, automatic_usage = self._automatic_metadata_filter_func( + dataset_ids, query, node_data + ) + usage = self._merge_usage(usage, automatic_usage) if automatic_metadata_filters: conditions = [] for sequence, filter in enumerate(automatic_metadata_filters): @@ -443,7 +465,7 @@ class KnowledgeRetrievalNode(Node): metadata_condition = MetadataCondition( logical_operator=node_data.metadata_filtering_conditions.logical_operator if node_data.metadata_filtering_conditions - else "or", # type: ignore + else "or", conditions=conditions, ) elif node_data.metadata_filtering_mode == "manual": @@ -457,10 +479,10 @@ class KnowledgeRetrievalNode(Node): expected_value = self.graph_runtime_state.variable_pool.convert_template( expected_value ).value[0] - if expected_value.value_type in {"number", "integer", "float"}: # type: ignore - expected_value = expected_value.value # type: ignore - elif expected_value.value_type == "string": # type: ignore - expected_value = re.sub(r"[\r\n\t]+", " ", expected_value.text).strip() # type: ignore + if expected_value.value_type in {"number", "integer", "float"}: + expected_value = expected_value.value + elif expected_value.value_type == "string": + expected_value = re.sub(r"[\r\n\t]+", " ", expected_value.text).strip() else: raise ValueError("Invalid expected metadata value type") conditions.append( @@ -487,7 +509,7 @@ class KnowledgeRetrievalNode(Node): if ( node_data.metadata_filtering_conditions and node_data.metadata_filtering_conditions.logical_operator == "and" - ): # type: ignore + ): document_query = document_query.where(and_(*filters)) else: document_query = document_query.where(or_(*filters)) @@ -496,11 +518,12 @@ class KnowledgeRetrievalNode(Node): metadata_filter_document_ids = defaultdict(list) if documents else None # type: ignore for document in documents: metadata_filter_document_ids[document.dataset_id].append(document.id) # type: ignore - return metadata_filter_document_ids, metadata_condition + return metadata_filter_document_ids, metadata_condition, usage def _automatic_metadata_filter_func( self, dataset_ids: list, query: str, node_data: KnowledgeRetrievalNodeData - ) -> list[dict[str, Any]]: + ) -> tuple[list[dict[str, Any]], LLMUsage]: + usage = LLMUsage.empty_usage() # get all metadata field stmt = select(DatasetMetadata).where(DatasetMetadata.dataset_id.in_(dataset_ids)) metadata_fields = db.session.scalars(stmt).all() @@ -548,6 +571,7 @@ class KnowledgeRetrievalNode(Node): for event in generator: if isinstance(event, ModelInvokeCompletedEvent): result_text = event.text + usage = self._merge_usage(usage, event.usage) break result_text_json = parse_and_check_json_markdown(result_text, []) @@ -564,8 +588,8 @@ class KnowledgeRetrievalNode(Node): } ) except Exception: - return [] - return automatic_metadata_filters + return [], usage + return automatic_metadata_filters, usage def _process_metadata_filter_func( self, sequence: int, condition: str, metadata_name: str, value: Any, filters: list[Any] diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index 7a31d69221..180eb2ad90 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -41,7 +41,7 @@ class ListOperatorNode(Node): _node_data: ListOperatorNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = ListOperatorNodeData(**data) + self._node_data = ListOperatorNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy @@ -161,6 +161,8 @@ class ListOperatorNode(Node): elif isinstance(variable, ArrayFileSegment): if isinstance(condition.value, str): value = self.graph_runtime_state.variable_pool.convert_template(condition.value).text + elif isinstance(condition.value, bool): + raise ValueError(f"File filter expects a string value, got {type(condition.value)}") else: value = condition.value filter_func = _get_file_filter_func( diff --git a/api/core/workflow/nodes/llm/file_saver.py b/api/core/workflow/nodes/llm/file_saver.py index 81f2df0891..3f32fa894a 100644 --- a/api/core/workflow/nodes/llm/file_saver.py +++ b/api/core/workflow/nodes/llm/file_saver.py @@ -46,7 +46,7 @@ class LLMFileSaver(tp.Protocol): dot (`.`). For example, `.py` and `.tar.gz` are both valid values, while `py` and `tar.gz` are not. """ - pass + raise NotImplementedError() def save_remote_url(self, url: str, file_type: FileType) -> File: """save_remote_url saves the file from a remote url returned by LLM. @@ -56,7 +56,7 @@ class LLMFileSaver(tp.Protocol): :param url: the url of the file. :param file_type: the file type of the file, check `FileType` enum for reference. """ - pass + raise NotImplementedError() EngineFactory: tp.TypeAlias = tp.Callable[[], Engine] diff --git a/api/core/workflow/nodes/llm/llm_utils.py b/api/core/workflow/nodes/llm/llm_utils.py index ad969cdad1..0c545469bc 100644 --- a/api/core/workflow/nodes/llm/llm_utils.py +++ b/api/core/workflow/nodes/llm/llm_utils.py @@ -15,9 +15,9 @@ from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.prompt.entities.advanced_prompt_entities import MemoryConfig from core.variables.segments import ArrayAnySegment, ArrayFileSegment, FileSegment, NoneSegment, StringSegment -from core.workflow.entities import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.nodes.llm.entities import ModelConfig +from core.workflow.runtime import VariablePool from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from models.model import Conversation @@ -92,7 +92,7 @@ def fetch_memory( return None # get conversation id - conversation_id_variable = variable_pool.get(["sys", SystemVariableKey.CONVERSATION_ID.value]) + conversation_id_variable = variable_pool.get(["sys", SystemVariableKey.CONVERSATION_ID]) if not isinstance(conversation_id_variable, StringSegment): return None conversation_id = conversation_id_variable.value @@ -143,7 +143,7 @@ def deduct_llm_quota(tenant_id: str, model_instance: ModelInstance, usage: LLMUs Provider.tenant_id == tenant_id, # TODO: Use provider name with prefix after the data migration. Provider.provider_name == ModelProviderID(model_instance.provider).provider_name, - Provider.provider_type == ProviderType.SYSTEM.value, + Provider.provider_type == ProviderType.SYSTEM, Provider.quota_type == system_configuration.current_quota_type.value, Provider.quota_limit > Provider.quota_used, ) diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 7767440be6..1644f683bf 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -23,6 +23,7 @@ from core.model_runtime.entities.llm_entities import ( LLMResult, LLMResultChunk, LLMResultChunkWithStructuredOutput, + LLMResultWithStructuredOutput, LLMStructuredOutput, LLMUsage, ) @@ -51,7 +52,7 @@ from core.variables import ( StringSegment, ) from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID -from core.workflow.entities import GraphInitParams, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import ( ErrorStrategy, NodeType, @@ -70,6 +71,7 @@ from core.workflow.node_events import ( from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig, VariableSelector from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.variable_template_parser import VariableTemplateParser +from core.workflow.runtime import VariablePool from . import llm_utils from .entities import ( @@ -92,7 +94,7 @@ from .file_saver import FileSaverImpl, LLMFileSaver if TYPE_CHECKING: from core.file.models import File - from core.workflow.entities import GraphRuntimeState + from core.workflow.runtime import GraphRuntimeState logger = logging.getLogger(__name__) @@ -127,7 +129,7 @@ class LLMNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( @@ -165,6 +167,7 @@ class LLMNode(Node): node_inputs: dict[str, Any] = {} process_data: dict[str, Any] = {} result_text = "" + clean_text = "" usage = LLMUsage.empty_usage() finish_reason = None reasoning_content = None @@ -278,6 +281,13 @@ class LLMNode(Node): # Extract clean text from tags clean_text, _ = LLMNode._split_reasoning(result_text, self._node_data.reasoning_format) + # Process structured output if available from the event. + structured_output = ( + LLMStructuredOutput(structured_output=event.structured_output) + if event.structured_output + else None + ) + # deduct quota llm_utils.deduct_llm_quota(tenant_id=self.tenant_id, model_instance=model_instance, usage=usage) break @@ -431,10 +441,14 @@ class LLMNode(Node): usage = LLMUsage.empty_usage() finish_reason = None full_text_buffer = io.StringIO() + collected_structured_output = None # Collect structured_output from streaming chunks # Consume the invoke result and handle generator exception try: for result in invoke_result: if isinstance(result, LLMResultChunkWithStructuredOutput): + # Collect structured_output from the chunk + if result.structured_output is not None: + collected_structured_output = dict(result.structured_output) yield result if isinstance(result, LLMResultChunk): contents = result.delta.message.content @@ -482,6 +496,8 @@ class LLMNode(Node): finish_reason=finish_reason, # Reasoning content for workflow variables and downstream nodes reasoning_content=reasoning_content, + # Pass structured output if collected from streaming chunks + structured_output=collected_structured_output, ) @staticmethod @@ -936,7 +952,7 @@ class LLMNode(Node): variable_mapping["#files#"] = typed_node_data.vision.configs.variable_selector if typed_node_data.memory: - variable_mapping["#sys.query#"] = ["sys", SystemVariableKey.QUERY.value] + variable_mapping["#sys.query#"] = ["sys", SystemVariableKey.QUERY] if typed_node_data.prompt_config: enable_jinja = False @@ -1048,7 +1064,7 @@ class LLMNode(Node): @staticmethod def handle_blocking_result( *, - invoke_result: LLMResult, + invoke_result: LLMResult | LLMResultWithStructuredOutput, saver: LLMFileSaver, file_outputs: list["File"], reasoning_format: Literal["separated", "tagged"] = "tagged", @@ -1079,6 +1095,8 @@ class LLMNode(Node): finish_reason=None, # Reasoning content for workflow variables and downstream nodes reasoning_content=reasoning_content, + # Pass structured output if enabled + structured_output=getattr(invoke_result, "structured_output", None), ) @staticmethod diff --git a/api/core/workflow/nodes/loop/loop_end_node.py b/api/core/workflow/nodes/loop/loop_end_node.py index 38aef06d24..e5bce1230c 100644 --- a/api/core/workflow/nodes/loop/loop_end_node.py +++ b/api/core/workflow/nodes/loop/loop_end_node.py @@ -18,7 +18,7 @@ class LoopEndNode(Node): _node_data: LoopEndNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = LoopEndNodeData(**data) + self._node_data = LoopEndNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index 790975d556..ca39e5aa23 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -5,6 +5,7 @@ from collections.abc import Callable, Generator, Mapping, Sequence from datetime import datetime from typing import TYPE_CHECKING, Any, Literal, cast +from core.model_runtime.entities.llm_entities import LLMUsage from core.variables import Segment, SegmentType from core.workflow.enums import ( ErrorStrategy, @@ -27,6 +28,7 @@ from core.workflow.node_events import ( NodeRunResult, StreamCompletedEvent, ) +from core.workflow.nodes.base import LLMUsageTrackingMixin from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.loop.entities import LoopNodeData, LoopVariableData @@ -40,7 +42,7 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -class LoopNode(Node): +class LoopNode(LLMUsageTrackingMixin, Node): """ Loop Node. """ @@ -108,7 +110,7 @@ class LoopNode(Node): raise ValueError(f"Invalid value for loop variable {loop_variable.label}") variable_selector = [self._node_id, loop_variable.label] variable = segment_to_variable(segment=processed_segment, selector=variable_selector) - self.graph_runtime_state.variable_pool.add(variable_selector, variable) + self.graph_runtime_state.variable_pool.add(variable_selector, variable.value) loop_variable_selectors[loop_variable.label] = variable_selector inputs[loop_variable.label] = processed_segment.value @@ -117,6 +119,7 @@ class LoopNode(Node): loop_duration_map: dict[str, float] = {} single_loop_variable_map: dict[str, dict[str, Any]] = {} # single loop variable output + loop_usage = LLMUsage.empty_usage() # Start Loop event yield LoopStartedEvent( @@ -163,6 +166,9 @@ class LoopNode(Node): # Update the total tokens from this iteration cost_tokens += graph_engine.graph_runtime_state.total_tokens + # Accumulate usage from the sub-graph execution + loop_usage = self._merge_usage(loop_usage, graph_engine.graph_runtime_state.llm_usage) + # Collect loop variable values after iteration single_loop_variable = {} for key, selector in loop_variable_selectors.items(): @@ -189,6 +195,7 @@ class LoopNode(Node): ) self.graph_runtime_state.total_tokens += cost_tokens + self._accumulate_usage(loop_usage) # Loop completed successfully yield LoopSucceededEvent( start_at=start_at, @@ -196,7 +203,9 @@ class LoopNode(Node): outputs=self._node_data.outputs, steps=loop_count, metadata={ - WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: cost_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: loop_usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: loop_usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: loop_usage.currency, "completed_reason": "loop_break" if reach_break_condition else "loop_completed", WorkflowNodeExecutionMetadataKey.LOOP_DURATION_MAP: loop_duration_map, WorkflowNodeExecutionMetadataKey.LOOP_VARIABLE_MAP: single_loop_variable_map, @@ -207,22 +216,28 @@ class LoopNode(Node): node_run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, metadata={ - WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: loop_usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: loop_usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: loop_usage.currency, WorkflowNodeExecutionMetadataKey.LOOP_DURATION_MAP: loop_duration_map, WorkflowNodeExecutionMetadataKey.LOOP_VARIABLE_MAP: single_loop_variable_map, }, outputs=self._node_data.outputs, inputs=inputs, + llm_usage=loop_usage, ) ) except Exception as e: + self._accumulate_usage(loop_usage) yield LoopFailedEvent( start_at=start_at, inputs=inputs, steps=loop_count, metadata={ - WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: loop_usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: loop_usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: loop_usage.currency, "completed_reason": "error", WorkflowNodeExecutionMetadataKey.LOOP_DURATION_MAP: loop_duration_map, WorkflowNodeExecutionMetadataKey.LOOP_VARIABLE_MAP: single_loop_variable_map, @@ -235,10 +250,13 @@ class LoopNode(Node): status=WorkflowNodeExecutionStatus.FAILED, error=str(e), metadata={ - WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: loop_usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: loop_usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: loop_usage.currency, WorkflowNodeExecutionMetadataKey.LOOP_DURATION_MAP: loop_duration_map, WorkflowNodeExecutionMetadataKey.LOOP_VARIABLE_MAP: single_loop_variable_map, }, + llm_usage=loop_usage, ) ) @@ -406,11 +424,12 @@ class LoopNode(Node): def _create_graph_engine(self, start_at: datetime, root_node_id: str): # Import dependencies - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel from core.workflow.nodes.node_factory import DifyNodeFactory + from core.workflow.runtime import GraphRuntimeState # Create GraphInitParams from node attributes graph_init_params = GraphInitParams( diff --git a/api/core/workflow/nodes/loop/loop_start_node.py b/api/core/workflow/nodes/loop/loop_start_node.py index e777a8cbe9..e065dc90a0 100644 --- a/api/core/workflow/nodes/loop/loop_start_node.py +++ b/api/core/workflow/nodes/loop/loop_start_node.py @@ -18,7 +18,7 @@ class LoopStartNode(Node): _node_data: LoopStartNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = LoopStartNodeData(**data) + self._node_data = LoopStartNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/node_factory.py b/api/core/workflow/nodes/node_factory.py index df1d685909..84f63d57eb 100644 --- a/api/core/workflow/nodes/node_factory.py +++ b/api/core/workflow/nodes/node_factory.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, final from typing_extensions import override -from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType +from core.workflow.enums import NodeType from core.workflow.graph import NodeFactory from core.workflow.nodes.base.node import Node from libs.typing import is_str, is_str_dict @@ -10,7 +10,8 @@ from libs.typing import is_str, is_str_dict from .node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING if TYPE_CHECKING: - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState @final @@ -81,8 +82,4 @@ class DifyNodeFactory(NodeFactory): raise ValueError(f"Node {node_id} missing data information") node_instance.init_node_data(node_data) - # If node has fail branch, change execution type to branch - if node_instance.error_strategy == ErrorStrategy.FAIL_BRANCH: - node_instance.execution_type = NodeExecutionType.BRANCH - return node_instance diff --git a/api/core/workflow/nodes/node_mapping.py b/api/core/workflow/nodes/node_mapping.py index 3d3a1bec98..3ee28802f1 100644 --- a/api/core/workflow/nodes/node_mapping.py +++ b/api/core/workflow/nodes/node_mapping.py @@ -9,6 +9,7 @@ from core.workflow.nodes.datasource.datasource_node import DatasourceNode from core.workflow.nodes.document_extractor import DocumentExtractorNode from core.workflow.nodes.end.end_node import EndNode from core.workflow.nodes.http_request import HttpRequestNode +from core.workflow.nodes.human_input import HumanInputNode from core.workflow.nodes.if_else import IfElseNode from core.workflow.nodes.iteration import IterationNode, IterationStartNode from core.workflow.nodes.knowledge_index import KnowledgeIndexNode @@ -134,6 +135,10 @@ NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[Node]]] = { "2": AgentNode, "1": AgentNode, }, + NodeType.HUMAN_INPUT: { + LATEST_VERSION: HumanInputNode, + "1": HumanInputNode, + }, NodeType.DATASOURCE: { LATEST_VERSION: DatasourceNode, "1": DatasourceNode, diff --git a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py index 875a0598e0..e250650fef 100644 --- a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py @@ -27,13 +27,13 @@ from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, Comp from core.prompt.simple_prompt_transform import ModelMode from core.prompt.utils.prompt_message_util import PromptMessageUtil from core.variables.types import ArrayValidation, SegmentType -from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import ErrorStrategy, NodeType, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base import variable_template_parser from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.llm import ModelConfig, llm_utils +from core.workflow.runtime import VariablePool from factories.variable_factory import build_segment_with_type from .entities import ParameterExtractorNodeData @@ -747,7 +747,7 @@ class ParameterExtractorNode(Node): if model_mode == ModelMode.CHAT: system_prompt_messages = ChatModelMessage( role=PromptMessageRole.SYSTEM, - text=CHAT_GENERATE_JSON_PROMPT.format(histories=memory_str).replace("{{instructions}}", instruction), + text=CHAT_GENERATE_JSON_PROMPT.format(histories=memory_str, instructions=instruction), ) user_prompt_message = ChatModelMessage(role=PromptMessageRole.USER, text=input_text) return [system_prompt_messages, user_prompt_message] diff --git a/api/core/workflow/nodes/parameter_extractor/prompts.py b/api/core/workflow/nodes/parameter_extractor/prompts.py index ab7ddcc32a..1b29be4418 100644 --- a/api/core/workflow/nodes/parameter_extractor/prompts.py +++ b/api/core/workflow/nodes/parameter_extractor/prompts.py @@ -135,7 +135,7 @@ Here are the chat histories between human and assistant, inside -{{instructions}} +{instructions} """ @@ -179,6 +179,6 @@ CHAT_EXAMPLE = [ "required": ["food"], }, }, - "assistant": {"text": "I need to output a valid JSON object.", "json": {"result": "apple pie"}}, + "assistant": {"text": "I need to output a valid JSON object.", "json": {"food": "apple pie"}}, }, ] diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 483cfff574..948a1cead7 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -1,4 +1,5 @@ import json +import re from collections.abc import Mapping, Sequence from typing import TYPE_CHECKING, Any @@ -40,7 +41,7 @@ from .template_prompts import ( if TYPE_CHECKING: from core.file.models import File - from core.workflow.entities import GraphRuntimeState + from core.workflow.runtime import GraphRuntimeState class QuestionClassifierNode(Node): @@ -68,7 +69,7 @@ class QuestionClassifierNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( @@ -111,9 +112,9 @@ class QuestionClassifierNode(Node): query = variable.value if variable else None variables = {"query": query} # fetch model config - model_instance, model_config = LLMNode._fetch_model_config( - node_data_model=node_data.model, + model_instance, model_config = llm_utils.fetch_model_config( tenant_id=self.tenant_id, + node_data_model=node_data.model, ) # fetch memory memory = llm_utils.fetch_memory( @@ -192,13 +193,19 @@ class QuestionClassifierNode(Node): finish_reason = event.finish_reason break - category_name = node_data.classes[0].name - category_id = node_data.classes[0].id + rendered_classes = [ + c.model_copy(update={"name": variable_pool.convert_template(c.name).text}) for c in node_data.classes + ] + + category_name = rendered_classes[0].name + category_id = rendered_classes[0].id + if "" in result_text: + result_text = re.sub(r"]*>[\s\S]*?", "", result_text, flags=re.IGNORECASE) result_text_json = parse_and_check_json_markdown(result_text, []) # result_text_json = json.loads(result_text.strip('```JSON\n')) if "category_name" in result_text_json and "category_id" in result_text_json: category_id_result = result_text_json["category_id"] - classes = node_data.classes + classes = rendered_classes classes_map = {class_.id: class_.name for class_ in classes} category_ids = [_class.id for _class in classes] if category_id_result in category_ids: diff --git a/api/core/workflow/nodes/start/start_node.py b/api/core/workflow/nodes/start/start_node.py index 2f33c54128..3b134be1a1 100644 --- a/api/core/workflow/nodes/start/start_node.py +++ b/api/core/workflow/nodes/start/start_node.py @@ -16,7 +16,7 @@ class StartNode(Node): _node_data: StartNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = StartNodeData(**data) + self._node_data = StartNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/template_transform/template_transform_node.py b/api/core/workflow/nodes/template_transform/template_transform_node.py index cf05ef253a..254a8318b5 100644 --- a/api/core/workflow/nodes/template_transform/template_transform_node.py +++ b/api/core/workflow/nodes/template_transform/template_transform_node.py @@ -1,7 +1,7 @@ -import os from collections.abc import Mapping, Sequence from typing import Any +from configs import dify_config from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage from core.workflow.enums import ErrorStrategy, NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult @@ -9,7 +9,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.template_transform.entities import TemplateTransformNodeData -MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = int(os.environ.get("TEMPLATE_TRANSFORM_MAX_LENGTH", "80000")) +MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH class TemplateTransformNode(Node): diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index ce1a879ff1..69ab6f0718 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -6,10 +6,13 @@ from sqlalchemy.orm import Session from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler from core.file import File, FileTransferMethod +from core.model_runtime.entities.llm_entities import LLMUsage +from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter from core.tools.errors import ToolInvokeError from core.tools.tool_engine import ToolEngine from core.tools.utils.message_transformer import ToolFileMessageTransformer +from core.tools.workflow_as_tool.tool import WorkflowTool from core.variables.segments import ArrayAnySegment, ArrayFileSegment from core.variables.variables import ArrayAnyVariable from core.workflow.enums import ( @@ -36,7 +39,7 @@ from .exc import ( ) if TYPE_CHECKING: - from core.workflow.entities import VariablePool + from core.workflow.runtime import VariablePool class ToolNode(Node): @@ -136,13 +139,14 @@ class ToolNode(Node): try: # convert tool messages - yield from self._transform_message( + _ = yield from self._transform_message( messages=message_stream, tool_info=tool_info, parameters_for_log=parameters_for_log, user_id=self.user_id, tenant_id=self.tenant_id, node_id=self._node_id, + tool_runtime=tool_runtime, ) except ToolInvokeError as e: yield StreamCompletedEvent( @@ -224,7 +228,7 @@ class ToolNode(Node): return result def _fetch_files(self, variable_pool: "VariablePool") -> list[File]: - variable = variable_pool.get(["sys", SystemVariableKey.FILES.value]) + variable = variable_pool.get(["sys", SystemVariableKey.FILES]) assert isinstance(variable, ArrayAnyVariable | ArrayAnySegment) return list(variable.value) if variable else [] @@ -236,7 +240,8 @@ class ToolNode(Node): user_id: str, tenant_id: str, node_id: str, - ) -> Generator: + tool_runtime: Tool, + ) -> Generator[NodeEventBase, None, LLMUsage]: """ Convert ToolInvokeMessages into tuple[plain_text, files] """ @@ -424,17 +429,34 @@ class ToolNode(Node): is_final=True, ) + usage = self._extract_tool_usage(tool_runtime) + + metadata: dict[WorkflowNodeExecutionMetadataKey, Any] = { + WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info, + } + if usage.total_tokens > 0: + metadata[WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS] = usage.total_tokens + metadata[WorkflowNodeExecutionMetadataKey.TOTAL_PRICE] = usage.total_price + metadata[WorkflowNodeExecutionMetadataKey.CURRENCY] = usage.currency + yield StreamCompletedEvent( node_run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs={"text": text, "files": ArrayFileSegment(value=files), "json": json_output, **variables}, - metadata={ - WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info, - }, + metadata=metadata, inputs=parameters_for_log, + llm_usage=usage, ) ) + return usage + + @staticmethod + def _extract_tool_usage(tool_runtime: Tool) -> LLMUsage: + if isinstance(tool_runtime, WorkflowTool): + return tool_runtime.latest_usage + return LLMUsage.empty_usage() + @classmethod def _extract_variable_selector_to_variable_mapping( cls, diff --git a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py index be00d55937..0ac0d3d858 100644 --- a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py +++ b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py @@ -15,7 +15,7 @@ class VariableAggregatorNode(Node): _node_data: VariableAssignerNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = VariableAssignerNodeData(**data) + self._node_data = VariableAssignerNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/variable_assigner/v1/node.py b/api/core/workflow/nodes/variable_assigner/v1/node.py index c2a9ecd7fb..8cd267c4a7 100644 --- a/api/core/workflow/nodes/variable_assigner/v1/node.py +++ b/api/core/workflow/nodes/variable_assigner/v1/node.py @@ -18,7 +18,7 @@ from ..common.impl import conversation_variable_updater_factory from .node_data import VariableAssignerData, WriteMode if TYPE_CHECKING: - from core.workflow.entities import GraphRuntimeState + from core.workflow.runtime import GraphRuntimeState _CONV_VAR_UPDATER_FACTORY: TypeAlias = Callable[[], ConversationVariableUpdater] diff --git a/api/core/workflow/runtime/__init__.py b/api/core/workflow/runtime/__init__.py new file mode 100644 index 0000000000..10014c7182 --- /dev/null +++ b/api/core/workflow/runtime/__init__.py @@ -0,0 +1,14 @@ +from .graph_runtime_state import GraphRuntimeState +from .graph_runtime_state_protocol import ReadOnlyGraphRuntimeState, ReadOnlyVariablePool +from .read_only_wrappers import ReadOnlyGraphRuntimeStateWrapper, ReadOnlyVariablePoolWrapper +from .variable_pool import VariablePool, VariableValue + +__all__ = [ + "GraphRuntimeState", + "ReadOnlyGraphRuntimeState", + "ReadOnlyGraphRuntimeStateWrapper", + "ReadOnlyVariablePool", + "ReadOnlyVariablePoolWrapper", + "VariablePool", + "VariableValue", +] diff --git a/api/core/workflow/runtime/graph_runtime_state.py b/api/core/workflow/runtime/graph_runtime_state.py new file mode 100644 index 0000000000..4c322c6aa6 --- /dev/null +++ b/api/core/workflow/runtime/graph_runtime_state.py @@ -0,0 +1,473 @@ +from __future__ import annotations + +import importlib +import json +from collections.abc import Mapping, Sequence +from collections.abc import Mapping as TypingMapping +from copy import deepcopy +from dataclasses import dataclass +from typing import Any, Protocol + +from pydantic.json import pydantic_encoder + +from core.model_runtime.entities.llm_entities import LLMUsage +from core.workflow.runtime.variable_pool import VariablePool + + +class ReadyQueueProtocol(Protocol): + """Structural interface required from ready queue implementations.""" + + def put(self, item: str) -> None: + """Enqueue the identifier of a node that is ready to run.""" + ... + + def get(self, timeout: float | None = None) -> str: + """Return the next node identifier, blocking until available or timeout expires.""" + ... + + def task_done(self) -> None: + """Signal that the most recently dequeued node has completed processing.""" + ... + + def empty(self) -> bool: + """Return True when the queue contains no pending nodes.""" + ... + + def qsize(self) -> int: + """Approximate the number of pending nodes awaiting execution.""" + ... + + def dumps(self) -> str: + """Serialize the queue contents for persistence.""" + ... + + def loads(self, data: str) -> None: + """Restore the queue contents from a serialized payload.""" + ... + + +class GraphExecutionProtocol(Protocol): + """Structural interface for graph execution aggregate.""" + + workflow_id: str + started: bool + completed: bool + aborted: bool + error: Exception | None + exceptions_count: int + + def start(self) -> None: + """Transition execution into the running state.""" + ... + + def complete(self) -> None: + """Mark execution as successfully completed.""" + ... + + def abort(self, reason: str) -> None: + """Abort execution in response to an external stop request.""" + ... + + def fail(self, error: Exception) -> None: + """Record an unrecoverable error and end execution.""" + ... + + def dumps(self) -> str: + """Serialize execution state into a JSON payload.""" + ... + + def loads(self, data: str) -> None: + """Restore execution state from a previously serialized payload.""" + ... + + +class ResponseStreamCoordinatorProtocol(Protocol): + """Structural interface for response stream coordinator.""" + + def register(self, response_node_id: str) -> None: + """Register a response node so its outputs can be streamed.""" + ... + + def loads(self, data: str) -> None: + """Restore coordinator state from a serialized payload.""" + ... + + def dumps(self) -> str: + """Serialize coordinator state for persistence.""" + ... + + +class GraphProtocol(Protocol): + """Structural interface required from graph instances attached to the runtime state.""" + + nodes: TypingMapping[str, object] + edges: TypingMapping[str, object] + root_node: object + + def get_outgoing_edges(self, node_id: str) -> Sequence[object]: ... + + +@dataclass(slots=True) +class _GraphRuntimeStateSnapshot: + """Immutable view of a serialized runtime state snapshot.""" + + start_at: float + total_tokens: int + node_run_steps: int + llm_usage: LLMUsage + outputs: dict[str, Any] + variable_pool: VariablePool + has_variable_pool: bool + ready_queue_dump: str | None + graph_execution_dump: str | None + response_coordinator_dump: str | None + paused_nodes: tuple[str, ...] + + +class GraphRuntimeState: + """Mutable runtime state shared across graph execution components.""" + + def __init__( + self, + *, + variable_pool: VariablePool, + start_at: float, + total_tokens: int = 0, + llm_usage: LLMUsage | None = None, + outputs: dict[str, object] | None = None, + node_run_steps: int = 0, + ready_queue: ReadyQueueProtocol | None = None, + graph_execution: GraphExecutionProtocol | None = None, + response_coordinator: ResponseStreamCoordinatorProtocol | None = None, + graph: GraphProtocol | None = None, + ) -> None: + self._variable_pool = variable_pool + self._start_at = start_at + + if total_tokens < 0: + raise ValueError("total_tokens must be non-negative") + self._total_tokens = total_tokens + + self._llm_usage = (llm_usage or LLMUsage.empty_usage()).model_copy() + self._outputs = deepcopy(outputs) if outputs is not None else {} + + if node_run_steps < 0: + raise ValueError("node_run_steps must be non-negative") + self._node_run_steps = node_run_steps + + self._graph: GraphProtocol | None = None + + self._ready_queue = ready_queue + self._graph_execution = graph_execution + self._response_coordinator = response_coordinator + self._pending_response_coordinator_dump: str | None = None + self._pending_graph_execution_workflow_id: str | None = None + self._paused_nodes: set[str] = set() + + if graph is not None: + self.attach_graph(graph) + + # ------------------------------------------------------------------ + # Context binding helpers + # ------------------------------------------------------------------ + def attach_graph(self, graph: GraphProtocol) -> None: + """Attach the materialized graph to the runtime state.""" + if self._graph is not None and self._graph is not graph: + raise ValueError("GraphRuntimeState already attached to a different graph instance") + + self._graph = graph + + if self._response_coordinator is None: + self._response_coordinator = self._build_response_coordinator(graph) + + if self._pending_response_coordinator_dump is not None and self._response_coordinator is not None: + self._response_coordinator.loads(self._pending_response_coordinator_dump) + self._pending_response_coordinator_dump = None + + def configure(self, *, graph: GraphProtocol | None = None) -> None: + """Ensure core collaborators are initialized with the provided context.""" + if graph is not None: + self.attach_graph(graph) + + # Ensure collaborators are instantiated + _ = self.ready_queue + _ = self.graph_execution + if self._graph is not None: + _ = self.response_coordinator + + # ------------------------------------------------------------------ + # Primary collaborators + # ------------------------------------------------------------------ + @property + def variable_pool(self) -> VariablePool: + return self._variable_pool + + @property + def ready_queue(self) -> ReadyQueueProtocol: + if self._ready_queue is None: + self._ready_queue = self._build_ready_queue() + return self._ready_queue + + @property + def graph_execution(self) -> GraphExecutionProtocol: + if self._graph_execution is None: + self._graph_execution = self._build_graph_execution() + return self._graph_execution + + @property + def response_coordinator(self) -> ResponseStreamCoordinatorProtocol: + if self._response_coordinator is None: + if self._graph is None: + raise ValueError("Graph must be attached before accessing response coordinator") + self._response_coordinator = self._build_response_coordinator(self._graph) + return self._response_coordinator + + # ------------------------------------------------------------------ + # Scalar state + # ------------------------------------------------------------------ + @property + def start_at(self) -> float: + return self._start_at + + @start_at.setter + def start_at(self, value: float) -> None: + self._start_at = value + + @property + def total_tokens(self) -> int: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: int) -> None: + if value < 0: + raise ValueError("total_tokens must be non-negative") + self._total_tokens = value + + @property + def llm_usage(self) -> LLMUsage: + return self._llm_usage.model_copy() + + @llm_usage.setter + def llm_usage(self, value: LLMUsage) -> None: + self._llm_usage = value.model_copy() + + @property + def outputs(self) -> dict[str, Any]: + return deepcopy(self._outputs) + + @outputs.setter + def outputs(self, value: dict[str, Any]) -> None: + self._outputs = deepcopy(value) + + def set_output(self, key: str, value: object) -> None: + self._outputs[key] = deepcopy(value) + + def get_output(self, key: str, default: object = None) -> object: + return deepcopy(self._outputs.get(key, default)) + + def update_outputs(self, updates: dict[str, object]) -> None: + for key, value in updates.items(): + self._outputs[key] = deepcopy(value) + + @property + def node_run_steps(self) -> int: + return self._node_run_steps + + @node_run_steps.setter + def node_run_steps(self, value: int) -> None: + if value < 0: + raise ValueError("node_run_steps must be non-negative") + self._node_run_steps = value + + def increment_node_run_steps(self) -> None: + self._node_run_steps += 1 + + def add_tokens(self, tokens: int) -> None: + if tokens < 0: + raise ValueError("tokens must be non-negative") + self._total_tokens += tokens + + # ------------------------------------------------------------------ + # Serialization + # ------------------------------------------------------------------ + def dumps(self) -> str: + """Serialize runtime state into a JSON string.""" + + snapshot: dict[str, Any] = { + "version": "1.0", + "start_at": self._start_at, + "total_tokens": self._total_tokens, + "node_run_steps": self._node_run_steps, + "llm_usage": self._llm_usage.model_dump(mode="json"), + "outputs": self.outputs, + "variable_pool": self.variable_pool.model_dump(mode="json"), + "ready_queue": self.ready_queue.dumps(), + "graph_execution": self.graph_execution.dumps(), + "paused_nodes": list(self._paused_nodes), + } + + if self._response_coordinator is not None and self._graph is not None: + snapshot["response_coordinator"] = self._response_coordinator.dumps() + + return json.dumps(snapshot, default=pydantic_encoder) + + @classmethod + def from_snapshot(cls, data: str | Mapping[str, Any]) -> GraphRuntimeState: + """Restore runtime state from a serialized snapshot.""" + + snapshot = cls._parse_snapshot_payload(data) + + state = cls( + variable_pool=snapshot.variable_pool, + start_at=snapshot.start_at, + total_tokens=snapshot.total_tokens, + llm_usage=snapshot.llm_usage, + outputs=snapshot.outputs, + node_run_steps=snapshot.node_run_steps, + ) + state._apply_snapshot(snapshot) + return state + + def loads(self, data: str | Mapping[str, Any]) -> None: + """Restore runtime state from a serialized snapshot (legacy API).""" + + snapshot = self._parse_snapshot_payload(data) + self._apply_snapshot(snapshot) + + def register_paused_node(self, node_id: str) -> None: + """Record a node that should resume when execution is continued.""" + + self._paused_nodes.add(node_id) + + def consume_paused_nodes(self) -> list[str]: + """Retrieve and clear the list of paused nodes awaiting resume.""" + + nodes = list(self._paused_nodes) + self._paused_nodes.clear() + return nodes + + # ------------------------------------------------------------------ + # Builders + # ------------------------------------------------------------------ + def _build_ready_queue(self) -> ReadyQueueProtocol: + # Import lazily to avoid breaching architecture boundaries enforced by import-linter. + module = importlib.import_module("core.workflow.graph_engine.ready_queue") + in_memory_cls = module.InMemoryReadyQueue + return in_memory_cls() + + def _build_graph_execution(self) -> GraphExecutionProtocol: + # Lazily import to keep the runtime domain decoupled from graph_engine modules. + module = importlib.import_module("core.workflow.graph_engine.domain.graph_execution") + graph_execution_cls = module.GraphExecution + workflow_id = self._pending_graph_execution_workflow_id or "" + self._pending_graph_execution_workflow_id = None + return graph_execution_cls(workflow_id=workflow_id) + + def _build_response_coordinator(self, graph: GraphProtocol) -> ResponseStreamCoordinatorProtocol: + # Lazily import to keep the runtime domain decoupled from graph_engine modules. + module = importlib.import_module("core.workflow.graph_engine.response_coordinator") + coordinator_cls = module.ResponseStreamCoordinator + return coordinator_cls(variable_pool=self.variable_pool, graph=graph) + + # ------------------------------------------------------------------ + # Snapshot helpers + # ------------------------------------------------------------------ + @classmethod + def _parse_snapshot_payload(cls, data: str | Mapping[str, Any]) -> _GraphRuntimeStateSnapshot: + payload: dict[str, Any] + if isinstance(data, str): + payload = json.loads(data) + else: + payload = dict(data) + + version = payload.get("version") + if version != "1.0": + raise ValueError(f"Unsupported GraphRuntimeState snapshot version: {version}") + + start_at = float(payload.get("start_at", 0.0)) + + total_tokens = int(payload.get("total_tokens", 0)) + if total_tokens < 0: + raise ValueError("total_tokens must be non-negative") + + node_run_steps = int(payload.get("node_run_steps", 0)) + if node_run_steps < 0: + raise ValueError("node_run_steps must be non-negative") + + llm_usage_payload = payload.get("llm_usage", {}) + llm_usage = LLMUsage.model_validate(llm_usage_payload) + + outputs_payload = deepcopy(payload.get("outputs", {})) + + variable_pool_payload = payload.get("variable_pool") + has_variable_pool = variable_pool_payload is not None + variable_pool = VariablePool.model_validate(variable_pool_payload) if has_variable_pool else VariablePool() + + ready_queue_payload = payload.get("ready_queue") + graph_execution_payload = payload.get("graph_execution") + response_payload = payload.get("response_coordinator") + paused_nodes_payload = payload.get("paused_nodes", []) + + return _GraphRuntimeStateSnapshot( + start_at=start_at, + total_tokens=total_tokens, + node_run_steps=node_run_steps, + llm_usage=llm_usage, + outputs=outputs_payload, + variable_pool=variable_pool, + has_variable_pool=has_variable_pool, + ready_queue_dump=ready_queue_payload, + graph_execution_dump=graph_execution_payload, + response_coordinator_dump=response_payload, + paused_nodes=tuple(map(str, paused_nodes_payload)), + ) + + def _apply_snapshot(self, snapshot: _GraphRuntimeStateSnapshot) -> None: + self._start_at = snapshot.start_at + self._total_tokens = snapshot.total_tokens + self._node_run_steps = snapshot.node_run_steps + self._llm_usage = snapshot.llm_usage.model_copy() + self._outputs = deepcopy(snapshot.outputs) + if snapshot.has_variable_pool or self._variable_pool is None: + self._variable_pool = snapshot.variable_pool + + self._restore_ready_queue(snapshot.ready_queue_dump) + self._restore_graph_execution(snapshot.graph_execution_dump) + self._restore_response_coordinator(snapshot.response_coordinator_dump) + self._paused_nodes = set(snapshot.paused_nodes) + + def _restore_ready_queue(self, payload: str | None) -> None: + if payload is not None: + self._ready_queue = self._build_ready_queue() + self._ready_queue.loads(payload) + else: + self._ready_queue = None + + def _restore_graph_execution(self, payload: str | None) -> None: + self._graph_execution = None + self._pending_graph_execution_workflow_id = None + + if payload is None: + return + + try: + execution_payload = json.loads(payload) + self._pending_graph_execution_workflow_id = execution_payload.get("workflow_id") + except (json.JSONDecodeError, TypeError, AttributeError): + self._pending_graph_execution_workflow_id = None + + self.graph_execution.loads(payload) + + def _restore_response_coordinator(self, payload: str | None) -> None: + if payload is None: + self._pending_response_coordinator_dump = None + self._response_coordinator = None + return + + if self._graph is not None: + self.response_coordinator.loads(payload) + self._pending_response_coordinator_dump = None + return + + self._pending_response_coordinator_dump = payload + self._response_coordinator = None diff --git a/api/core/workflow/graph/graph_runtime_state_protocol.py b/api/core/workflow/runtime/graph_runtime_state_protocol.py similarity index 76% rename from api/core/workflow/graph/graph_runtime_state_protocol.py rename to api/core/workflow/runtime/graph_runtime_state_protocol.py index d7961405ca..40835a936f 100644 --- a/api/core/workflow/graph/graph_runtime_state_protocol.py +++ b/api/core/workflow/runtime/graph_runtime_state_protocol.py @@ -16,6 +16,10 @@ class ReadOnlyVariablePool(Protocol): """Get all variables for a node (read-only).""" ... + def get_by_prefix(self, prefix: str) -> Mapping[str, object]: + """Get all variables stored under a given node prefix (read-only).""" + ... + class ReadOnlyGraphRuntimeState(Protocol): """ @@ -56,6 +60,20 @@ class ReadOnlyGraphRuntimeState(Protocol): """Get the node run steps count (read-only).""" ... + @property + def ready_queue_size(self) -> int: + """Get the number of nodes currently in the ready queue.""" + ... + + @property + def exceptions_count(self) -> int: + """Get the number of node execution exceptions recorded.""" + ... + def get_output(self, key: str, default: Any = None) -> Any: """Get a single output value (returns a copy).""" ... + + def dumps(self) -> str: + """Serialize the runtime state into a JSON snapshot (read-only).""" + ... diff --git a/api/core/workflow/graph/read_only_state_wrapper.py b/api/core/workflow/runtime/read_only_wrappers.py similarity index 54% rename from api/core/workflow/graph/read_only_state_wrapper.py rename to api/core/workflow/runtime/read_only_wrappers.py index 255bb5adee..664c365295 100644 --- a/api/core/workflow/graph/read_only_state_wrapper.py +++ b/api/core/workflow/runtime/read_only_wrappers.py @@ -1,77 +1,82 @@ +from __future__ import annotations + from collections.abc import Mapping from copy import deepcopy from typing import Any from core.model_runtime.entities.llm_entities import LLMUsage from core.variables.segments import Segment -from core.workflow.entities.graph_runtime_state import GraphRuntimeState -from core.workflow.entities.variable_pool import VariablePool + +from .graph_runtime_state import GraphRuntimeState +from .variable_pool import VariablePool class ReadOnlyVariablePoolWrapper: - """Wrapper that provides read-only access to VariablePool.""" + """Provide defensive, read-only access to ``VariablePool``.""" - def __init__(self, variable_pool: VariablePool): + def __init__(self, variable_pool: VariablePool) -> None: self._variable_pool = variable_pool def get(self, node_id: str, variable_key: str) -> Segment | None: - """Get a variable value (returns a defensive copy).""" + """Return a copy of a variable value if present.""" value = self._variable_pool.get([node_id, variable_key]) return deepcopy(value) if value is not None else None def get_all_by_node(self, node_id: str) -> Mapping[str, object]: - """Get all variables for a node (returns defensive copies).""" + """Return a copy of all variables for the specified node.""" variables: dict[str, object] = {} if node_id in self._variable_pool.variable_dictionary: - for key, var in self._variable_pool.variable_dictionary[node_id].items(): - # Variables have a value property that contains the actual data - variables[key] = deepcopy(var.value) + for key, variable in self._variable_pool.variable_dictionary[node_id].items(): + variables[key] = deepcopy(variable.value) return variables + def get_by_prefix(self, prefix: str) -> Mapping[str, object]: + """Return a copy of all variables stored under the given prefix.""" + return self._variable_pool.get_by_prefix(prefix) + class ReadOnlyGraphRuntimeStateWrapper: - """ - Wrapper that provides read-only access to GraphRuntimeState. + """Expose a defensive, read-only view of ``GraphRuntimeState``.""" - This wrapper ensures that layers can observe the state without - modifying it. All returned values are defensive copies. - """ - - def __init__(self, state: GraphRuntimeState): + def __init__(self, state: GraphRuntimeState) -> None: self._state = state self._variable_pool_wrapper = ReadOnlyVariablePoolWrapper(state.variable_pool) @property def variable_pool(self) -> ReadOnlyVariablePoolWrapper: - """Get read-only access to the variable pool.""" return self._variable_pool_wrapper @property def start_at(self) -> float: - """Get the start time (read-only).""" return self._state.start_at @property def total_tokens(self) -> int: - """Get the total tokens count (read-only).""" return self._state.total_tokens @property def llm_usage(self) -> LLMUsage: - """Get a copy of LLM usage info (read-only).""" - # Return a copy to prevent modification return self._state.llm_usage.model_copy() @property def outputs(self) -> dict[str, Any]: - """Get a defensive copy of outputs (read-only).""" return deepcopy(self._state.outputs) @property def node_run_steps(self) -> int: - """Get the node run steps count (read-only).""" return self._state.node_run_steps + @property + def ready_queue_size(self) -> int: + return self._state.ready_queue.qsize() + + @property + def exceptions_count(self) -> int: + return self._state.graph_execution.exceptions_count + def get_output(self, key: str, default: Any = None) -> Any: - """Get a single output value (returns a copy).""" return self._state.get_output(key, default) + + def dumps(self) -> str: + """Serialize the underlying runtime state for external persistence.""" + return self._state.dumps() diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/runtime/variable_pool.py similarity index 90% rename from api/core/workflow/entities/variable_pool.py rename to api/core/workflow/runtime/variable_pool.py index 8ceabde7e6..d41a20dfd7 100644 --- a/api/core/workflow/entities/variable_pool.py +++ b/api/core/workflow/runtime/variable_pool.py @@ -1,6 +1,7 @@ import re from collections import defaultdict from collections.abc import Mapping, Sequence +from copy import deepcopy from typing import Annotated, Any, Union, cast from pydantic import BaseModel, Field @@ -184,11 +185,22 @@ class VariablePool(BaseModel): """Extract the actual value from an ObjectSegment.""" return obj.value if isinstance(obj, ObjectSegment) else obj - def _get_nested_attribute(self, obj: Mapping[str, Any], attr: str): - """Get a nested attribute from a dictionary-like object.""" - if not isinstance(obj, dict): + def _get_nested_attribute(self, obj: Mapping[str, Any], attr: str) -> Segment | None: + """ + Get a nested attribute from a dictionary-like object. + + Args: + obj: The dictionary-like object to search. + attr: The key to look up. + + Returns: + Segment | None: + The corresponding Segment built from the attribute value if the key exists, + otherwise None. + """ + if not isinstance(obj, dict) or attr not in obj: return None - return obj.get(attr) + return variable_factory.build_segment(obj.get(attr)) def remove(self, selector: Sequence[str], /): """ @@ -224,6 +236,20 @@ class VariablePool(BaseModel): return segment return None + def get_by_prefix(self, prefix: str, /) -> Mapping[str, object]: + """Return a copy of all variables stored under the given node prefix.""" + + nodes = self.variable_dictionary.get(prefix) + if not nodes: + return {} + + result: dict[str, object] = {} + for key, variable in nodes.items(): + value = variable.value + result[key] = deepcopy(value) + + return result + def _add_system_variables(self, system_variable: SystemVariable): sys_var_mapping = system_variable.to_dict() for key, value in sys_var_mapping.items(): @@ -234,7 +260,7 @@ class VariablePool(BaseModel): # This ensures that we can keep the id of the system variables intact. if self._has(selector): continue - self.add(selector, value) # type: ignore + self.add(selector, value) @classmethod def empty(cls) -> "VariablePool": diff --git a/api/core/workflow/utils/condition/processor.py b/api/core/workflow/utils/condition/processor.py index f4bbe9c3c3..650a44c681 100644 --- a/api/core/workflow/utils/condition/processor.py +++ b/api/core/workflow/utils/condition/processor.py @@ -5,7 +5,7 @@ from typing import Literal, NamedTuple from core.file import FileAttribute, file_manager from core.variables import ArrayFileSegment from core.variables.segments import ArrayBooleanSegment, BooleanSegment -from core.workflow.entities import VariablePool +from core.workflow.runtime import VariablePool from .entities import Condition, SubCondition, SupportedComparisonOperator diff --git a/api/core/workflow/variable_loader.py b/api/core/workflow/variable_loader.py index 1b31022495..ea0bdc3537 100644 --- a/api/core/workflow/variable_loader.py +++ b/api/core/workflow/variable_loader.py @@ -4,7 +4,7 @@ from typing import Any, Protocol from core.variables import Variable from core.variables.consts import SELECTORS_LENGTH -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.runtime import VariablePool class VariableLoader(Protocol): diff --git a/api/core/workflow/workflow_cycle_manager.py b/api/core/workflow/workflow_cycle_manager.py deleted file mode 100644 index a88f350a9e..0000000000 --- a/api/core/workflow/workflow_cycle_manager.py +++ /dev/null @@ -1,459 +0,0 @@ -from collections.abc import Mapping -from dataclasses import dataclass -from datetime import datetime -from typing import Any, Union - -from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity -from core.app.entities.queue_entities import ( - QueueNodeExceptionEvent, - QueueNodeFailedEvent, - QueueNodeRetryEvent, - QueueNodeStartedEvent, - QueueNodeSucceededEvent, -) -from core.app.task_pipeline.exc import WorkflowRunNotFoundError -from core.ops.entities.trace_entity import TraceTaskName -from core.ops.ops_trace_manager import TraceQueueManager, TraceTask -from core.workflow.entities import ( - WorkflowExecution, - WorkflowNodeExecution, -) -from core.workflow.enums import ( - SystemVariableKey, - WorkflowExecutionStatus, - WorkflowNodeExecutionMetadataKey, - WorkflowNodeExecutionStatus, - WorkflowType, -) -from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository -from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository -from core.workflow.system_variable import SystemVariable -from core.workflow.workflow_entry import WorkflowEntry -from libs.datetime_utils import naive_utc_now -from libs.uuid_utils import uuidv7 - - -@dataclass -class CycleManagerWorkflowInfo: - workflow_id: str - workflow_type: WorkflowType - version: str - graph_data: Mapping[str, Any] - - -class WorkflowCycleManager: - def __init__( - self, - *, - application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity], - workflow_system_variables: SystemVariable, - workflow_info: CycleManagerWorkflowInfo, - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, - ): - self._application_generate_entity = application_generate_entity - self._workflow_system_variables = workflow_system_variables - self._workflow_info = workflow_info - self._workflow_execution_repository = workflow_execution_repository - self._workflow_node_execution_repository = workflow_node_execution_repository - - # Initialize caches for workflow execution cycle - # These caches avoid redundant repository calls during a single workflow execution - self._workflow_execution_cache: dict[str, WorkflowExecution] = {} - self._node_execution_cache: dict[str, WorkflowNodeExecution] = {} - - def handle_workflow_run_start(self) -> WorkflowExecution: - inputs = self._prepare_workflow_inputs() - execution_id = self._get_or_generate_execution_id() - - execution = WorkflowExecution.new( - id_=execution_id, - workflow_id=self._workflow_info.workflow_id, - workflow_type=self._workflow_info.workflow_type, - workflow_version=self._workflow_info.version, - graph=self._workflow_info.graph_data, - inputs=inputs, - started_at=naive_utc_now(), - ) - - return self._save_and_cache_workflow_execution(execution) - - def handle_workflow_run_success( - self, - *, - workflow_run_id: str, - total_tokens: int, - total_steps: int, - outputs: Mapping[str, Any] | None = None, - conversation_id: str | None = None, - trace_manager: TraceQueueManager | None = None, - external_trace_id: str | None = None, - ) -> WorkflowExecution: - workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - - self._update_workflow_execution_completion( - workflow_execution, - status=WorkflowExecutionStatus.SUCCEEDED, - outputs=outputs, - total_tokens=total_tokens, - total_steps=total_steps, - ) - - self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id, external_trace_id) - - self._workflow_execution_repository.save(workflow_execution) - return workflow_execution - - def handle_workflow_run_partial_success( - self, - *, - workflow_run_id: str, - total_tokens: int, - total_steps: int, - outputs: Mapping[str, Any] | None = None, - exceptions_count: int = 0, - conversation_id: str | None = None, - trace_manager: TraceQueueManager | None = None, - external_trace_id: str | None = None, - ) -> WorkflowExecution: - execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - - self._update_workflow_execution_completion( - execution, - status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED, - outputs=outputs, - total_tokens=total_tokens, - total_steps=total_steps, - exceptions_count=exceptions_count, - ) - - self._add_trace_task_if_needed(trace_manager, execution, conversation_id, external_trace_id) - - self._workflow_execution_repository.save(execution) - return execution - - def handle_workflow_run_failed( - self, - *, - workflow_run_id: str, - total_tokens: int, - total_steps: int, - status: WorkflowExecutionStatus, - error_message: str, - conversation_id: str | None = None, - trace_manager: TraceQueueManager | None = None, - exceptions_count: int = 0, - external_trace_id: str | None = None, - ) -> WorkflowExecution: - workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - now = naive_utc_now() - - self._update_workflow_execution_completion( - workflow_execution, - status=status, - total_tokens=total_tokens, - total_steps=total_steps, - error_message=error_message, - exceptions_count=exceptions_count, - finished_at=now, - ) - - self._fail_running_node_executions(workflow_execution.id_, error_message, now) - self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id, external_trace_id) - - self._workflow_execution_repository.save(workflow_execution) - return workflow_execution - - def handle_node_execution_start( - self, - *, - workflow_execution_id: str, - event: QueueNodeStartedEvent, - ) -> WorkflowNodeExecution: - workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id) - - domain_execution = self._create_node_execution_from_event( - workflow_execution=workflow_execution, - event=event, - status=WorkflowNodeExecutionStatus.RUNNING, - ) - - return self._save_and_cache_node_execution(domain_execution) - - def handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution: - domain_execution = self._get_node_execution_from_cache(event.node_execution_id) - - self._update_node_execution_completion( - domain_execution, - event=event, - status=WorkflowNodeExecutionStatus.SUCCEEDED, - ) - - self._workflow_node_execution_repository.save(domain_execution) - self._workflow_node_execution_repository.save_execution_data(domain_execution) - return domain_execution - - def handle_workflow_node_execution_failed( - self, - *, - event: QueueNodeFailedEvent | QueueNodeExceptionEvent, - ) -> WorkflowNodeExecution: - """ - Workflow node execution failed - :param event: queue node failed event - :return: - """ - domain_execution = self._get_node_execution_from_cache(event.node_execution_id) - - status = ( - WorkflowNodeExecutionStatus.EXCEPTION - if isinstance(event, QueueNodeExceptionEvent) - else WorkflowNodeExecutionStatus.FAILED - ) - - self._update_node_execution_completion( - domain_execution, - event=event, - status=status, - error=event.error, - handle_special_values=True, - ) - - self._workflow_node_execution_repository.save(domain_execution) - self._workflow_node_execution_repository.save_execution_data(domain_execution) - return domain_execution - - def handle_workflow_node_execution_retried( - self, *, workflow_execution_id: str, event: QueueNodeRetryEvent - ) -> WorkflowNodeExecution: - workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id) - - domain_execution = self._create_node_execution_from_event( - workflow_execution=workflow_execution, - event=event, - status=WorkflowNodeExecutionStatus.RETRY, - error=event.error, - created_at=event.start_at, - ) - - # Handle inputs and outputs - inputs = WorkflowEntry.handle_special_values(event.inputs) - outputs = event.outputs - metadata = self._merge_event_metadata(event) - - domain_execution.update_from_mapping(inputs=inputs, outputs=outputs, metadata=metadata) - - execution = self._save_and_cache_node_execution(domain_execution) - self._workflow_node_execution_repository.save_execution_data(execution) - return execution - - def _get_workflow_execution_or_raise_error(self, id: str, /) -> WorkflowExecution: - # Check cache first - if id in self._workflow_execution_cache: - return self._workflow_execution_cache[id] - - raise WorkflowRunNotFoundError(id) - - def _prepare_workflow_inputs(self) -> dict[str, Any]: - """Prepare workflow inputs by merging application inputs with system variables.""" - inputs = {**self._application_generate_entity.inputs} - - if self._workflow_system_variables: - for field_name, value in self._workflow_system_variables.to_dict().items(): - if field_name != SystemVariableKey.CONVERSATION_ID: - inputs[f"sys.{field_name}"] = value - - return dict(WorkflowEntry.handle_special_values(inputs) or {}) - - def _get_or_generate_execution_id(self) -> str: - """Get execution ID from system variables or generate a new one.""" - if self._workflow_system_variables and self._workflow_system_variables.workflow_execution_id: - return str(self._workflow_system_variables.workflow_execution_id) - return str(uuidv7()) - - def _save_and_cache_workflow_execution(self, execution: WorkflowExecution) -> WorkflowExecution: - """Save workflow execution to repository and cache it.""" - self._workflow_execution_repository.save(execution) - self._workflow_execution_cache[execution.id_] = execution - return execution - - def _save_and_cache_node_execution(self, execution: WorkflowNodeExecution) -> WorkflowNodeExecution: - """Save node execution to repository and cache it if it has an ID. - - This does not persist the `inputs` / `process_data` / `outputs` fields of the execution model. - """ - self._workflow_node_execution_repository.save(execution) - if execution.node_execution_id: - self._node_execution_cache[execution.node_execution_id] = execution - return execution - - def _get_node_execution_from_cache(self, node_execution_id: str) -> WorkflowNodeExecution: - """Get node execution from cache or raise error if not found.""" - domain_execution = self._node_execution_cache.get(node_execution_id) - if not domain_execution: - raise ValueError(f"Domain node execution not found: {node_execution_id}") - return domain_execution - - def _update_workflow_execution_completion( - self, - execution: WorkflowExecution, - *, - status: WorkflowExecutionStatus, - total_tokens: int, - total_steps: int, - outputs: Mapping[str, Any] | None = None, - error_message: str | None = None, - exceptions_count: int = 0, - finished_at: datetime | None = None, - ): - """Update workflow execution with completion data.""" - execution.status = status - execution.outputs = outputs or {} - execution.total_tokens = total_tokens - execution.total_steps = total_steps - execution.finished_at = finished_at or naive_utc_now() - execution.exceptions_count = exceptions_count - if error_message: - execution.error_message = error_message - - def _add_trace_task_if_needed( - self, - trace_manager: TraceQueueManager | None, - workflow_execution: WorkflowExecution, - conversation_id: str | None, - external_trace_id: str | None, - ): - """Add trace task if trace manager is provided.""" - if trace_manager: - trace_manager.add_trace_task( - TraceTask( - TraceTaskName.WORKFLOW_TRACE, - workflow_execution=workflow_execution, - conversation_id=conversation_id, - user_id=trace_manager.user_id, - external_trace_id=external_trace_id, - ) - ) - - def _fail_running_node_executions( - self, - workflow_execution_id: str, - error_message: str, - now: datetime, - ): - """Fail all running node executions for a workflow.""" - running_node_executions = [ - node_exec - for node_exec in self._node_execution_cache.values() - if node_exec.workflow_execution_id == workflow_execution_id - and node_exec.status == WorkflowNodeExecutionStatus.RUNNING - ] - - for node_execution in running_node_executions: - if node_execution.node_execution_id: - node_execution.status = WorkflowNodeExecutionStatus.FAILED - node_execution.error = error_message - node_execution.finished_at = now - node_execution.elapsed_time = (now - node_execution.created_at).total_seconds() - self._workflow_node_execution_repository.save(node_execution) - - def _create_node_execution_from_event( - self, - *, - workflow_execution: WorkflowExecution, - event: QueueNodeStartedEvent, - status: WorkflowNodeExecutionStatus, - error: str | None = None, - created_at: datetime | None = None, - ) -> WorkflowNodeExecution: - """Create a node execution from an event.""" - now = naive_utc_now() - created_at = created_at or now - - metadata = { - WorkflowNodeExecutionMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id, - WorkflowNodeExecutionMetadataKey.ITERATION_ID: event.in_iteration_id, - WorkflowNodeExecutionMetadataKey.LOOP_ID: event.in_loop_id, - } - - domain_execution = WorkflowNodeExecution( - id=event.node_execution_id, - workflow_id=workflow_execution.workflow_id, - workflow_execution_id=workflow_execution.id_, - predecessor_node_id=event.predecessor_node_id, - index=event.node_run_index, - node_execution_id=event.node_execution_id, - node_id=event.node_id, - node_type=event.node_type, - title=event.node_title, - status=status, - metadata=metadata, - created_at=created_at, - error=error, - ) - - if status == WorkflowNodeExecutionStatus.RETRY: - domain_execution.finished_at = now - domain_execution.elapsed_time = (now - created_at).total_seconds() - - return domain_execution - - def _update_node_execution_completion( - self, - domain_execution: WorkflowNodeExecution, - *, - event: Union[ - QueueNodeSucceededEvent, - QueueNodeFailedEvent, - QueueNodeExceptionEvent, - ], - status: WorkflowNodeExecutionStatus, - error: str | None = None, - handle_special_values: bool = False, - ): - """Update node execution with completion data.""" - finished_at = naive_utc_now() - elapsed_time = (finished_at - event.start_at).total_seconds() - - # Process data - if handle_special_values: - inputs = WorkflowEntry.handle_special_values(event.inputs) - process_data = WorkflowEntry.handle_special_values(event.process_data) - else: - inputs = event.inputs - process_data = event.process_data - - outputs = event.outputs - - # Convert metadata - execution_metadata_dict: dict[WorkflowNodeExecutionMetadataKey, Any] = {} - if event.execution_metadata: - execution_metadata_dict.update(event.execution_metadata) - - # Update domain model - domain_execution.status = status - domain_execution.update_from_mapping( - inputs=inputs, - process_data=process_data, - outputs=outputs, - metadata=execution_metadata_dict, - ) - domain_execution.finished_at = finished_at - domain_execution.elapsed_time = elapsed_time - - if error: - domain_execution.error = error - - def _merge_event_metadata(self, event: QueueNodeRetryEvent) -> dict[WorkflowNodeExecutionMetadataKey, str | None]: - """Merge event metadata with origin metadata.""" - origin_metadata = { - WorkflowNodeExecutionMetadataKey.ITERATION_ID: event.in_iteration_id, - WorkflowNodeExecutionMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id, - WorkflowNodeExecutionMetadataKey.LOOP_ID: event.in_loop_id, - } - - execution_metadata_dict: dict[WorkflowNodeExecutionMetadataKey, str | None] = {} - if event.execution_metadata: - execution_metadata_dict.update(event.execution_metadata) - - return {**execution_metadata_dict, **origin_metadata} if execution_metadata_dict else origin_metadata diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 49645ff120..742c42ec2b 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -9,7 +9,7 @@ from core.app.apps.exc import GenerateTaskStoppedError from core.app.entities.app_invoke_entities import InvokeFrom from core.file.models import File from core.workflow.constants import ENVIRONMENT_VARIABLE_NODE_ID -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine @@ -20,6 +20,7 @@ from core.workflow.graph_events import GraphEngineEvent, GraphNodeEventBase, Gra from core.workflow.nodes import NodeType from core.workflow.nodes.base.node import Node from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool from factories import file_factory @@ -227,7 +228,7 @@ class WorkflowEntry: "height": node_height, "type": "custom", "data": { - "type": NodeType.START.value, + "type": NodeType.START, "title": "Start", "desc": "Start", }, @@ -416,4 +417,8 @@ class WorkflowEntry: # append variable and value to variable pool if variable_node_id != ENVIRONMENT_VARIABLE_NODE_ID: + # In single run, the input_value is set as the LLM's structured output value within the variable_pool. + if len(variable_key_list) == 2 and variable_key_list[0] == "structured_output": + input_value = {variable_key_list[1]: input_value} + variable_key_list = variable_key_list[0:1] variable_pool.add([variable_node_id] + variable_key_list, input_value) diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index 08c0a1f35e..8f6998119e 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -32,7 +32,8 @@ if [[ "${MODE}" == "worker" ]]; then exec celery -A celery_entrypoint.celery worker -P ${CELERY_WORKER_CLASS:-gevent} $CONCURRENCY_OPTION \ --max-tasks-per-child ${MAX_TASKS_PER_CHILD:-50} --loglevel ${LOG_LEVEL:-INFO} \ - -Q ${CELERY_QUEUES:-dataset,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation} + -Q ${CELERY_QUEUES:-dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline} \ + --prefetch-multiplier=1 elif [[ "${MODE}" == "beat" ]]; then exec celery -A app.celery beat --loglevel ${LOG_LEVEL:-INFO} diff --git a/api/events/event_handlers/clean_when_dataset_deleted.py b/api/events/event_handlers/clean_when_dataset_deleted.py index 7caa2d1cc9..1666e2e29f 100644 --- a/api/events/event_handlers/clean_when_dataset_deleted.py +++ b/api/events/event_handlers/clean_when_dataset_deleted.py @@ -1,10 +1,13 @@ from events.dataset_event import dataset_was_deleted +from models import Dataset from tasks.clean_dataset_task import clean_dataset_task @dataset_was_deleted.connect -def handle(sender, **kwargs): +def handle(sender: Dataset, **kwargs): dataset = sender + if not dataset.doc_form or not dataset.indexing_technique: + return clean_dataset_task.delay( dataset.id, dataset.tenant_id, diff --git a/api/events/event_handlers/clean_when_document_deleted.py b/api/events/event_handlers/clean_when_document_deleted.py index bbc913b7cf..0add109b06 100644 --- a/api/events/event_handlers/clean_when_document_deleted.py +++ b/api/events/event_handlers/clean_when_document_deleted.py @@ -8,6 +8,6 @@ def handle(sender, **kwargs): dataset_id = kwargs.get("dataset_id") doc_form = kwargs.get("doc_form") file_id = kwargs.get("file_id") - assert dataset_id is not None - assert doc_form is not None + if not dataset_id or not doc_form: + return clean_document_task.delay(document_id, dataset_id, doc_form, file_id) diff --git a/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py b/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py index 6c9fc0bf1d..1b44d8a1e2 100644 --- a/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py +++ b/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py @@ -12,9 +12,9 @@ def handle(sender, **kwargs): if synced_draft_workflow is None: return for node_data in synced_draft_workflow.graph_dict.get("nodes", []): - if node_data.get("data", {}).get("type") == NodeType.TOOL.value: + if node_data.get("data", {}).get("type") == NodeType.TOOL: try: - tool_entity = ToolEntity(**node_data["data"]) + tool_entity = ToolEntity.model_validate(node_data["data"]) tool_runtime = ToolManager.get_tool_runtime( provider_type=tool_entity.provider_type, provider_id=tool_entity.provider_id, diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py index 898ec1f153..53e0065f6e 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py @@ -53,7 +53,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]: # fetch all knowledge retrieval nodes knowledge_retrieval_nodes = [ - node for node in nodes if node.get("data", {}).get("type") == NodeType.KNOWLEDGE_RETRIEVAL.value + node for node in nodes if node.get("data", {}).get("type") == NodeType.KNOWLEDGE_RETRIEVAL ] if not knowledge_retrieval_nodes: @@ -61,7 +61,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]: for node in knowledge_retrieval_nodes: try: - node_data = KnowledgeRetrievalNodeData(**node.get("data", {})) + node_data = KnowledgeRetrievalNodeData.model_validate(node.get("data", {})) dataset_ids.update(dataset_id for dataset_id in node_data.dataset_ids) except Exception: continue diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py index 27efa539dc..e1c96fb050 100644 --- a/api/events/event_handlers/update_provider_when_message_created.py +++ b/api/events/event_handlers/update_provider_when_message_created.py @@ -1,10 +1,11 @@ import logging import time as time_module from datetime import datetime -from typing import Any +from typing import Any, cast from pydantic import BaseModel from sqlalchemy import update +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session from configs import dify_config @@ -139,7 +140,7 @@ def handle(sender: Message, **kwargs): filters=_ProviderUpdateFilters( tenant_id=tenant_id, provider_name=ModelProviderID(model_config.provider).provider_name, - provider_type=ProviderType.SYSTEM.value, + provider_type=ProviderType.SYSTEM, quota_type=provider_configuration.system_configuration.current_quota_type.value, ), values=_ProviderUpdateValues(quota_used=Provider.quota_used + used_quota, last_used=current_time), @@ -267,7 +268,7 @@ def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation] # Build and execute the update statement stmt = update(Provider).where(*where_conditions).values(**update_values) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) rows_affected = result.rowcount logger.debug( diff --git a/api/extensions/ext_app_metrics.py b/api/extensions/ext_app_metrics.py index 56a69a1862..4a6490b9f0 100644 --- a/api/extensions/ext_app_metrics.py +++ b/api/extensions/ext_app_metrics.py @@ -10,14 +10,14 @@ from dify_app import DifyApp def init_app(app: DifyApp): @app.after_request - def after_request(response): + def after_request(response): # pyright: ignore[reportUnusedFunction] """Add Version headers to the response.""" response.headers.add("X-Version", dify_config.project.version) response.headers.add("X-Env", dify_config.DEPLOY_ENV) return response @app.route("/health") - def health(): + def health(): # pyright: ignore[reportUnusedFunction] return Response( json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.project.version}), status=200, @@ -25,7 +25,7 @@ def init_app(app: DifyApp): ) @app.route("/threads") - def threads(): + def threads(): # pyright: ignore[reportUnusedFunction] num_threads = threading.active_count() threads = threading.enumerate() @@ -50,7 +50,7 @@ def init_app(app: DifyApp): } @app.route("/db-pool-stat") - def pool_stat(): + def pool_stat(): # pyright: ignore[reportUnusedFunction] from extensions.ext_database import db engine = db.engine diff --git a/api/extensions/ext_blueprints.py b/api/extensions/ext_blueprints.py index 9c08a08c45..82f0542b35 100644 --- a/api/extensions/ext_blueprints.py +++ b/api/extensions/ext_blueprints.py @@ -1,6 +1,12 @@ from configs import dify_config +from constants import HEADER_NAME_APP_CODE, HEADER_NAME_CSRF_TOKEN, HEADER_NAME_PASSPORT from dify_app import DifyApp +BASE_CORS_HEADERS: tuple[str, ...] = ("Content-Type", HEADER_NAME_APP_CODE, HEADER_NAME_PASSPORT) +SERVICE_API_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, "Authorization") +AUTHENTICATED_HEADERS: tuple[str, ...] = (*SERVICE_API_HEADERS, HEADER_NAME_CSRF_TOKEN) +FILES_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, HEADER_NAME_CSRF_TOKEN) + def init_app(app: DifyApp): # register blueprint routers @@ -16,7 +22,7 @@ def init_app(app: DifyApp): CORS( service_api_bp, - allow_headers=["Content-Type", "Authorization", "X-App-Code"], + allow_headers=list(SERVICE_API_HEADERS), methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], ) app.register_blueprint(service_api_bp) @@ -25,7 +31,7 @@ def init_app(app: DifyApp): web_bp, resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}}, supports_credentials=True, - allow_headers=["Content-Type", "Authorization", "X-App-Code"], + allow_headers=list(AUTHENTICATED_HEADERS), methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], expose_headers=["X-Version", "X-Env"], ) @@ -35,7 +41,7 @@ def init_app(app: DifyApp): console_app_bp, resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}}, supports_credentials=True, - allow_headers=["Content-Type", "Authorization"], + allow_headers=list(AUTHENTICATED_HEADERS), methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], expose_headers=["X-Version", "X-Env"], ) @@ -43,7 +49,7 @@ def init_app(app: DifyApp): CORS( files_bp, - allow_headers=["Content-Type"], + allow_headers=list(FILES_HEADERS), methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], ) app.register_blueprint(files_bp) diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 585539e2ce..6d7d81ed87 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -145,6 +145,7 @@ def init_app(app: DifyApp) -> Celery: } if dify_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK and dify_config.MARKETPLACE_ENABLED: imports.append("schedule.check_upgradable_plugin_task") + imports.append("tasks.process_tenant_plugin_autoupgrade_check_task") beat_schedule["check_upgradable_plugin_task"] = { "task": "schedule.check_upgradable_plugin_task.check_upgradable_plugin_task", "schedule": crontab(minute="*/15"), diff --git a/api/extensions/ext_compress.py b/api/extensions/ext_compress.py index 26ff6427be..9c3a663af4 100644 --- a/api/extensions/ext_compress.py +++ b/api/extensions/ext_compress.py @@ -7,7 +7,7 @@ def is_enabled() -> bool: def init_app(app: DifyApp): - from flask_compress import Compress # type: ignore + from flask_compress import Compress compress = Compress() compress.init_app(app) diff --git a/api/extensions/ext_database.py b/api/extensions/ext_database.py index 067ce39e4f..c90b1d0a9f 100644 --- a/api/extensions/ext_database.py +++ b/api/extensions/ext_database.py @@ -10,7 +10,7 @@ from models.engine import db logger = logging.getLogger(__name__) # Global flag to avoid duplicate registration of event listener -_GEVENT_COMPATIBILITY_SETUP: bool = False +_gevent_compatibility_setup: bool = False def _safe_rollback(connection): @@ -26,14 +26,14 @@ def _safe_rollback(connection): def _setup_gevent_compatibility(): - global _GEVENT_COMPATIBILITY_SETUP # pylint: disable=global-statement + global _gevent_compatibility_setup # pylint: disable=global-statement # Avoid duplicate registration - if _GEVENT_COMPATIBILITY_SETUP: + if _gevent_compatibility_setup: return @event.listens_for(Pool, "reset") - def _safe_reset(dbapi_connection, connection_record, reset_state): # pylint: disable=unused-argument + def _safe_reset(dbapi_connection, connection_record, reset_state): # pyright: ignore[reportUnusedFunction] if reset_state.terminate_only: return @@ -47,7 +47,7 @@ def _setup_gevent_compatibility(): except (AttributeError, ImportError): _safe_rollback(dbapi_connection) - _GEVENT_COMPATIBILITY_SETUP = True + _gevent_compatibility_setup = True def init_app(app: DifyApp): diff --git a/api/extensions/ext_import_modules.py b/api/extensions/ext_import_modules.py index 9566f430b6..4eb363ff93 100644 --- a/api/extensions/ext_import_modules.py +++ b/api/extensions/ext_import_modules.py @@ -2,4 +2,4 @@ from dify_app import DifyApp def init_app(app: DifyApp): - from events import event_handlers # noqa: F401 + from events import event_handlers # noqa: F401 # pyright: ignore[reportUnusedImport] diff --git a/api/extensions/ext_login.py b/api/extensions/ext_login.py index 5571c0d9ba..74299956c0 100644 --- a/api/extensions/ext_login.py +++ b/api/extensions/ext_login.py @@ -1,15 +1,17 @@ import json -import flask_login # type: ignore +import flask_login from flask import Response, request from flask_login import user_loaded_from_request, user_logged_in from werkzeug.exceptions import NotFound, Unauthorized from configs import dify_config +from constants import HEADER_NAME_APP_CODE from dify_app import DifyApp from extensions.ext_database import db from libs.passport import PassportService -from models.account import Account, Tenant, TenantAccountJoin +from libs.token import extract_access_token, extract_webapp_passport +from models import Account, Tenant, TenantAccountJoin from models.model import AppMCPServer, EndUser from services.account_service import AccountService @@ -24,20 +26,10 @@ def load_user_from_request(request_from_flask_login): if dify_config.SWAGGER_UI_ENABLED and request.path.endswith((dify_config.SWAGGER_UI_PATH, "/swagger.json")): return None - auth_header = request.headers.get("Authorization", "") - auth_token: str | None = None - if auth_header: - if " " not in auth_header: - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - auth_scheme, auth_token = auth_header.split(maxsplit=1) - auth_scheme = auth_scheme.lower() - if auth_scheme != "bearer": - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - else: - auth_token = request.args.get("_token") + auth_token = extract_access_token(request) # Check for admin API key authentication first - if dify_config.ADMIN_API_KEY_ENABLE and auth_header: + if dify_config.ADMIN_API_KEY_ENABLE and auth_token: admin_api_key = dify_config.ADMIN_API_KEY if admin_api_key and admin_api_key == auth_token: workspace_id = request.headers.get("X-WORKSPACE-ID") @@ -70,14 +62,30 @@ def load_user_from_request(request_from_flask_login): logged_in_account = AccountService.load_logged_in_account(account_id=user_id) return logged_in_account elif request.blueprint == "web": - decoded = PassportService().verify(auth_token) - end_user_id = decoded.get("end_user_id") - if not end_user_id: - raise Unauthorized("Invalid Authorization token.") - end_user = db.session.query(EndUser).where(EndUser.id == decoded["end_user_id"]).first() - if not end_user: - raise NotFound("End user not found.") - return end_user + app_code = request.headers.get(HEADER_NAME_APP_CODE) + webapp_token = extract_webapp_passport(app_code, request) if app_code else None + + if webapp_token: + decoded = PassportService().verify(webapp_token) + end_user_id = decoded.get("end_user_id") + if not end_user_id: + raise Unauthorized("Invalid Authorization token.") + end_user = db.session.query(EndUser).where(EndUser.id == end_user_id).first() + if not end_user: + raise NotFound("End user not found.") + return end_user + else: + if not auth_token: + raise Unauthorized("Invalid Authorization token.") + decoded = PassportService().verify(auth_token) + end_user_id = decoded.get("end_user_id") + if end_user_id: + end_user = db.session.query(EndUser).where(EndUser.id == end_user_id).first() + if not end_user: + raise NotFound("End user not found.") + return end_user + else: + raise Unauthorized("Invalid Authorization token for web API.") elif request.blueprint == "mcp": server_code = request.view_args.get("server_code") if request.view_args else None if not server_code: diff --git a/api/extensions/ext_migrate.py b/api/extensions/ext_migrate.py index 5f862181fa..6d8f35c30d 100644 --- a/api/extensions/ext_migrate.py +++ b/api/extensions/ext_migrate.py @@ -2,7 +2,7 @@ from dify_app import DifyApp def init_app(app: DifyApp): - import flask_migrate # type: ignore + import flask_migrate from extensions.ext_database import db diff --git a/api/extensions/ext_otel.py b/api/extensions/ext_otel.py index b0059693e2..20ac2503a2 100644 --- a/api/extensions/ext_otel.py +++ b/api/extensions/ext_otel.py @@ -103,7 +103,7 @@ def init_app(app: DifyApp): def shutdown_tracer(): provider = trace.get_tracer_provider() if hasattr(provider, "force_flush"): - provider.force_flush() # ty: ignore [call-non-callable] + provider.force_flush() class ExceptionLoggingHandler(logging.Handler): """Custom logging handler that creates spans for logging.exception() calls""" @@ -136,8 +136,8 @@ def init_app(app: DifyApp): from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HTTPSpanExporter from opentelemetry.instrumentation.celery import CeleryInstrumentor from opentelemetry.instrumentation.flask import FlaskInstrumentor + from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor from opentelemetry.instrumentation.redis import RedisInstrumentor - from opentelemetry.instrumentation.requests import RequestsInstrumentor from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor from opentelemetry.metrics import get_meter, get_meter_provider, set_meter_provider from opentelemetry.propagate import set_global_textmap @@ -237,7 +237,7 @@ def init_app(app: DifyApp): instrument_exception_logging() init_sqlalchemy_instrumentor(app) RedisInstrumentor().instrument() - RequestsInstrumentor().instrument() + HTTPXClientInstrumentor().instrument() atexit.register(shutdown_tracer) diff --git a/api/extensions/ext_proxy_fix.py b/api/extensions/ext_proxy_fix.py index c085aed986..fe6685f633 100644 --- a/api/extensions/ext_proxy_fix.py +++ b/api/extensions/ext_proxy_fix.py @@ -6,4 +6,4 @@ def init_app(app: DifyApp): if dify_config.RESPECT_XFORWARD_HEADERS_ENABLED: from werkzeug.middleware.proxy_fix import ProxyFix - app.wsgi_app = ProxyFix(app.wsgi_app, x_port=1) # type: ignore + app.wsgi_app = ProxyFix(app.wsgi_app, x_port=1) # type: ignore[method-assign] diff --git a/api/extensions/ext_sentry.py b/api/extensions/ext_sentry.py index 6cfa99a62a..c3aa8edf80 100644 --- a/api/extensions/ext_sentry.py +++ b/api/extensions/ext_sentry.py @@ -4,9 +4,8 @@ from dify_app import DifyApp def init_app(app: DifyApp): if dify_config.SENTRY_DSN: - import openai import sentry_sdk - from langfuse import parse_error # type: ignore + from langfuse import parse_error from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.flask import FlaskIntegration from werkzeug.exceptions import HTTPException @@ -28,7 +27,6 @@ def init_app(app: DifyApp): HTTPException, ValueError, FileNotFoundError, - openai.APIStatusError, InvokeRateLimitError, parse_error.defaultErrorResponse, ], diff --git a/api/extensions/storage/aliyun_oss_storage.py b/api/extensions/storage/aliyun_oss_storage.py index 00bf5d4f93..2283581f62 100644 --- a/api/extensions/storage/aliyun_oss_storage.py +++ b/api/extensions/storage/aliyun_oss_storage.py @@ -1,7 +1,7 @@ import posixpath from collections.abc import Generator -import oss2 as aliyun_s3 # type: ignore +import oss2 as aliyun_s3 from configs import dify_config from extensions.storage.base_storage import BaseStorage @@ -33,7 +33,9 @@ class AliyunOssStorage(BaseStorage): def load_once(self, filename: str) -> bytes: obj = self.client.get_object(self.__wrapper_folder_filename(filename)) - data: bytes = obj.read() + data = obj.read() + if not isinstance(data, bytes): + return b"" return data def load_stream(self, filename: str) -> Generator: diff --git a/api/extensions/storage/aws_s3_storage.py b/api/extensions/storage/aws_s3_storage.py index e755ab089a..6ab2a95e3c 100644 --- a/api/extensions/storage/aws_s3_storage.py +++ b/api/extensions/storage/aws_s3_storage.py @@ -39,10 +39,10 @@ class AwsS3Storage(BaseStorage): self.client.head_bucket(Bucket=self.bucket_name) except ClientError as e: # if bucket not exists, create it - if e.response["Error"]["Code"] == "404": + if e.response.get("Error", {}).get("Code") == "404": self.client.create_bucket(Bucket=self.bucket_name) # if bucket is not accessible, pass, maybe the bucket is existing but not accessible - elif e.response["Error"]["Code"] == "403": + elif e.response.get("Error", {}).get("Code") == "403": pass else: # other error, raise exception @@ -55,7 +55,7 @@ class AwsS3Storage(BaseStorage): try: data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise @@ -66,7 +66,7 @@ class AwsS3Storage(BaseStorage): response = self.client.get_object(Bucket=self.bucket_name, Key=filename) yield from response["Body"].iter_chunks() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("file not found") elif "reached max retries" in str(ex): raise ValueError("please do not request the same file too frequently") diff --git a/api/extensions/storage/azure_blob_storage.py b/api/extensions/storage/azure_blob_storage.py index 9053aece89..4bccaf13c8 100644 --- a/api/extensions/storage/azure_blob_storage.py +++ b/api/extensions/storage/azure_blob_storage.py @@ -27,24 +27,38 @@ class AzureBlobStorage(BaseStorage): self.credential = None def save(self, filename, data): + if not self.bucket_name: + return + client = self._sync_client() blob_container = client.get_container_client(container=self.bucket_name) blob_container.upload_blob(filename, data) def load_once(self, filename: str) -> bytes: + if not self.bucket_name: + raise FileNotFoundError("Azure bucket name is not configured.") + client = self._sync_client() blob = client.get_container_client(container=self.bucket_name) blob = blob.get_blob_client(blob=filename) - data: bytes = blob.download_blob().readall() + data = blob.download_blob().readall() + if not isinstance(data, bytes): + raise TypeError(f"Expected bytes from blob.readall(), got {type(data).__name__}") return data def load_stream(self, filename: str) -> Generator: + if not self.bucket_name: + raise FileNotFoundError("Azure bucket name is not configured.") + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) blob_data = blob.download_blob() yield from blob_data.chunks() def download(self, filename, target_filepath): + if not self.bucket_name: + return + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) @@ -53,12 +67,18 @@ class AzureBlobStorage(BaseStorage): blob_data.readinto(my_blob) def exists(self, filename): + if not self.bucket_name: + return False + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) return blob.exists() def delete(self, filename): + if not self.bucket_name: + return + client = self._sync_client() blob_container = client.get_container_client(container=self.bucket_name) diff --git a/api/extensions/storage/baidu_obs_storage.py b/api/extensions/storage/baidu_obs_storage.py index b94efa08be..0bb4648c0a 100644 --- a/api/extensions/storage/baidu_obs_storage.py +++ b/api/extensions/storage/baidu_obs_storage.py @@ -2,9 +2,9 @@ import base64 import hashlib from collections.abc import Generator -from baidubce.auth.bce_credentials import BceCredentials # type: ignore -from baidubce.bce_client_configuration import BceClientConfiguration # type: ignore -from baidubce.services.bos.bos_client import BosClient # type: ignore +from baidubce.auth.bce_credentials import BceCredentials +from baidubce.bce_client_configuration import BceClientConfiguration +from baidubce.services.bos.bos_client import BosClient from configs import dify_config from extensions.storage.base_storage import BaseStorage diff --git a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py index 2ffac9a92d..1cabc57e74 100644 --- a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py +++ b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py @@ -11,7 +11,7 @@ from collections.abc import Generator from io import BytesIO from pathlib import Path -import clickzetta # type: ignore[import] +import clickzetta from pydantic import BaseModel, model_validator from extensions.storage.base_storage import BaseStorage @@ -430,7 +430,7 @@ class ClickZettaVolumeStorage(BaseStorage): rows = self._execute_sql(sql, fetch=True) - exists = len(rows) > 0 + exists = len(rows) > 0 if rows else False logger.debug("File %s exists check: %s", filename, exists) return exists except Exception as e: @@ -509,16 +509,17 @@ class ClickZettaVolumeStorage(BaseStorage): rows = self._execute_sql(sql, fetch=True) result = [] - for row in rows: - file_path = row[0] # relative_path column + if rows: + for row in rows: + file_path = row[0] # relative_path column - # For User Volume, remove dify prefix from results - dify_prefix_with_slash = f"{self._config.dify_prefix}/" - if volume_prefix == "USER VOLUME" and file_path.startswith(dify_prefix_with_slash): - file_path = file_path[len(dify_prefix_with_slash) :] # Remove prefix + # For User Volume, remove dify prefix from results + dify_prefix_with_slash = f"{self._config.dify_prefix}/" + if volume_prefix == "USER VOLUME" and file_path.startswith(dify_prefix_with_slash): + file_path = file_path[len(dify_prefix_with_slash) :] # Remove prefix - if files and not file_path.endswith("/") or directories and file_path.endswith("/"): - result.append(file_path) + if files and not file_path.endswith("/") or directories and file_path.endswith("/"): + result.append(file_path) logger.debug("Scanned %d items in path %s", len(result), path) return result diff --git a/api/extensions/storage/clickzetta_volume/file_lifecycle.py b/api/extensions/storage/clickzetta_volume/file_lifecycle.py index 6ab02ad8cc..dc5aa8e39c 100644 --- a/api/extensions/storage/clickzetta_volume/file_lifecycle.py +++ b/api/extensions/storage/clickzetta_volume/file_lifecycle.py @@ -264,7 +264,7 @@ class FileLifecycleManager: logger.warning("File %s not found in metadata", filename) return False - metadata_dict[filename]["status"] = FileStatus.ARCHIVED.value + metadata_dict[filename]["status"] = FileStatus.ARCHIVED metadata_dict[filename]["modified_at"] = datetime.now().isoformat() self._save_metadata(metadata_dict) @@ -309,7 +309,7 @@ class FileLifecycleManager: # Update metadata metadata_dict = self._load_metadata() if filename in metadata_dict: - metadata_dict[filename]["status"] = FileStatus.DELETED.value + metadata_dict[filename]["status"] = FileStatus.DELETED metadata_dict[filename]["modified_at"] = datetime.now().isoformat() self._save_metadata(metadata_dict) diff --git a/api/extensions/storage/clickzetta_volume/volume_permissions.py b/api/extensions/storage/clickzetta_volume/volume_permissions.py index eb1116638f..9d4ca689d8 100644 --- a/api/extensions/storage/clickzetta_volume/volume_permissions.py +++ b/api/extensions/storage/clickzetta_volume/volume_permissions.py @@ -34,7 +34,7 @@ class VolumePermissionManager: # Support two initialization methods: connection object or configuration dictionary if isinstance(connection_or_config, dict): # Create connection from configuration dictionary - import clickzetta # type: ignore[import-untyped] + import clickzetta config = connection_or_config self._connection = clickzetta.connect( @@ -439,6 +439,11 @@ class VolumePermissionManager: self._permission_cache.clear() logger.debug("Permission cache cleared") + @property + def volume_type(self) -> str | None: + """Get the volume type.""" + return self._volume_type + def get_permission_summary(self, dataset_id: str | None = None) -> dict[str, bool]: """Get permission summary @@ -632,13 +637,13 @@ def check_volume_permission(permission_manager: VolumePermissionManager, operati VolumePermissionError: If no permission """ if not permission_manager.validate_operation(operation, dataset_id): - error_message = f"Permission denied for operation '{operation}' on {permission_manager._volume_type} volume" + error_message = f"Permission denied for operation '{operation}' on {permission_manager.volume_type} volume" if dataset_id: error_message += f" (dataset: {dataset_id})" raise VolumePermissionError( error_message, operation=operation, - volume_type=permission_manager._volume_type or "unknown", + volume_type=permission_manager.volume_type or "unknown", dataset_id=dataset_id, ) diff --git a/api/extensions/storage/google_cloud_storage.py b/api/extensions/storage/google_cloud_storage.py index 705639f42e..d352996518 100644 --- a/api/extensions/storage/google_cloud_storage.py +++ b/api/extensions/storage/google_cloud_storage.py @@ -3,7 +3,7 @@ import io import json from collections.abc import Generator -from google.cloud import storage as google_cloud_storage # type: ignore +from google.cloud import storage as google_cloud_storage from configs import dify_config from extensions.storage.base_storage import BaseStorage @@ -35,12 +35,16 @@ class GoogleCloudStorage(BaseStorage): def load_once(self, filename: str) -> bytes: bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") data: bytes = blob.download_as_bytes() return data def load_stream(self, filename: str) -> Generator: bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") with blob.open(mode="rb") as blob_stream: while chunk := blob_stream.read(4096): yield chunk @@ -48,6 +52,8 @@ class GoogleCloudStorage(BaseStorage): def download(self, filename, target_filepath): bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") blob.download_to_filename(target_filepath) def exists(self, filename): diff --git a/api/extensions/storage/huawei_obs_storage.py b/api/extensions/storage/huawei_obs_storage.py index 07f1d19970..74fed26f65 100644 --- a/api/extensions/storage/huawei_obs_storage.py +++ b/api/extensions/storage/huawei_obs_storage.py @@ -1,6 +1,6 @@ from collections.abc import Generator -from obs import ObsClient # type: ignore +from obs import ObsClient from configs import dify_config from extensions.storage.base_storage import BaseStorage @@ -45,7 +45,7 @@ class HuaweiObsStorage(BaseStorage): def _get_meta(self, filename): res = self.client.getObjectMetadata(bucketName=self.bucket_name, objectKey=filename) - if res.status < 300: + if res and res.status and res.status < 300: return res else: return None diff --git a/api/extensions/storage/opendal_storage.py b/api/extensions/storage/opendal_storage.py index b10391c7f1..f7146adba6 100644 --- a/api/extensions/storage/opendal_storage.py +++ b/api/extensions/storage/opendal_storage.py @@ -3,9 +3,9 @@ import os from collections.abc import Generator from pathlib import Path +import opendal from dotenv import dotenv_values from opendal import Operator -from opendal.layers import RetryLayer from extensions.storage.base_storage import BaseStorage @@ -35,7 +35,7 @@ class OpenDALStorage(BaseStorage): root = kwargs.get("root", "storage") Path(root).mkdir(parents=True, exist_ok=True) - retry_layer = RetryLayer(max_times=3, factor=2.0, jitter=True) + retry_layer = opendal.layers.RetryLayer(max_times=3, factor=2.0, jitter=True) self.op = Operator(scheme=scheme, **kwargs).layer(retry_layer) logger.debug("opendal operator created with scheme %s", scheme) logger.debug("added retry layer to opendal operator") diff --git a/api/extensions/storage/oracle_oci_storage.py b/api/extensions/storage/oracle_oci_storage.py index 82829f7fd5..c032803045 100644 --- a/api/extensions/storage/oracle_oci_storage.py +++ b/api/extensions/storage/oracle_oci_storage.py @@ -1,7 +1,7 @@ from collections.abc import Generator -import boto3 # type: ignore -from botocore.exceptions import ClientError # type: ignore +import boto3 +from botocore.exceptions import ClientError from configs import dify_config from extensions.storage.base_storage import BaseStorage @@ -29,7 +29,7 @@ class OracleOCIStorage(BaseStorage): try: data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise @@ -40,7 +40,7 @@ class OracleOCIStorage(BaseStorage): response = self.client.get_object(Bucket=self.bucket_name, Key=filename) yield from response["Body"].iter_chunks() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise diff --git a/api/extensions/storage/supabase_storage.py b/api/extensions/storage/supabase_storage.py index 711c3f7211..2ca84d4c15 100644 --- a/api/extensions/storage/supabase_storage.py +++ b/api/extensions/storage/supabase_storage.py @@ -46,13 +46,13 @@ class SupabaseStorage(BaseStorage): Path(target_filepath).write_bytes(result) def exists(self, filename): - result = self.client.storage.from_(self.bucket_name).list(filename) - if result.count() > 0: + result = self.client.storage.from_(self.bucket_name).list(path=filename) + if len(result) > 0: return True return False def delete(self, filename): - self.client.storage.from_(self.bucket_name).remove(filename) + self.client.storage.from_(self.bucket_name).remove([filename]) def bucket_exists(self): buckets = self.client.storage.list_buckets() diff --git a/api/extensions/storage/tencent_cos_storage.py b/api/extensions/storage/tencent_cos_storage.py index 9cdd3e67f7..ea5d982efc 100644 --- a/api/extensions/storage/tencent_cos_storage.py +++ b/api/extensions/storage/tencent_cos_storage.py @@ -1,6 +1,6 @@ from collections.abc import Generator -from qcloud_cos import CosConfig, CosS3Client # type: ignore +from qcloud_cos import CosConfig, CosS3Client from configs import dify_config from extensions.storage.base_storage import BaseStorage diff --git a/api/extensions/storage/volcengine_tos_storage.py b/api/extensions/storage/volcengine_tos_storage.py index 32839d3497..a44959221f 100644 --- a/api/extensions/storage/volcengine_tos_storage.py +++ b/api/extensions/storage/volcengine_tos_storage.py @@ -1,6 +1,6 @@ from collections.abc import Generator -import tos # type: ignore +import tos from configs import dify_config from extensions.storage.base_storage import BaseStorage @@ -11,6 +11,14 @@ class VolcengineTosStorage(BaseStorage): def __init__(self): super().__init__() + if not dify_config.VOLCENGINE_TOS_ACCESS_KEY: + raise ValueError("VOLCENGINE_TOS_ACCESS_KEY is not set") + if not dify_config.VOLCENGINE_TOS_SECRET_KEY: + raise ValueError("VOLCENGINE_TOS_SECRET_KEY is not set") + if not dify_config.VOLCENGINE_TOS_ENDPOINT: + raise ValueError("VOLCENGINE_TOS_ENDPOINT is not set") + if not dify_config.VOLCENGINE_TOS_REGION: + raise ValueError("VOLCENGINE_TOS_REGION is not set") self.bucket_name = dify_config.VOLCENGINE_TOS_BUCKET_NAME self.client = tos.TosClientV2( ak=dify_config.VOLCENGINE_TOS_ACCESS_KEY, @@ -20,27 +28,39 @@ class VolcengineTosStorage(BaseStorage): ) def save(self, filename, data): + if not self.bucket_name: + raise ValueError("VOLCENGINE_TOS_BUCKET_NAME is not set") self.client.put_object(bucket=self.bucket_name, key=filename, content=data) def load_once(self, filename: str) -> bytes: + if not self.bucket_name: + raise FileNotFoundError("VOLCENGINE_TOS_BUCKET_NAME is not set") data = self.client.get_object(bucket=self.bucket_name, key=filename).read() if not isinstance(data, bytes): raise TypeError(f"Expected bytes, got {type(data).__name__}") return data def load_stream(self, filename: str) -> Generator: + if not self.bucket_name: + raise FileNotFoundError("VOLCENGINE_TOS_BUCKET_NAME is not set") response = self.client.get_object(bucket=self.bucket_name, key=filename) while chunk := response.read(4096): yield chunk def download(self, filename, target_filepath): + if not self.bucket_name: + raise ValueError("VOLCENGINE_TOS_BUCKET_NAME is not set") self.client.get_object_to_file(bucket=self.bucket_name, key=filename, file_path=target_filepath) def exists(self, filename): + if not self.bucket_name: + return False res = self.client.head_object(bucket=self.bucket_name, key=filename) if res.status_code != 200: return False return True def delete(self, filename): + if not self.bucket_name: + return self.client.delete_object(bucket=self.bucket_name, key=filename) diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index d66c757249..2316e45179 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -21,7 +21,7 @@ def build_from_message_files( *, message_files: Sequence["MessageFile"], tenant_id: str, - config: FileUploadConfig, + config: FileUploadConfig | None = None, ) -> Sequence[File]: results = [ build_from_message_file(message_file=file, tenant_id=tenant_id, config=config) @@ -35,17 +35,20 @@ def build_from_message_file( *, message_file: "MessageFile", tenant_id: str, - config: FileUploadConfig, + config: FileUploadConfig | None, ): mapping = { "transfer_method": message_file.transfer_method, "url": message_file.url, - "id": message_file.id, "type": message_file.type, } + # Only include id if it exists (message_file has been committed to DB) + if message_file.id: + mapping["id"] = message_file.id + # Set the correct ID field based on transfer method - if message_file.transfer_method == FileTransferMethod.TOOL_FILE.value: + if message_file.transfer_method == FileTransferMethod.TOOL_FILE: mapping["tool_file_id"] = message_file.upload_file_id else: mapping["upload_file_id"] = message_file.upload_file_id @@ -64,7 +67,10 @@ def build_from_mapping( config: FileUploadConfig | None = None, strict_type_validation: bool = False, ) -> File: - transfer_method = FileTransferMethod.value_of(mapping.get("transfer_method")) + transfer_method_value = mapping.get("transfer_method") + if not transfer_method_value: + raise ValueError("transfer_method is required in file mapping") + transfer_method = FileTransferMethod.value_of(transfer_method_value) build_functions: dict[FileTransferMethod, Callable] = { FileTransferMethod.LOCAL_FILE: _build_from_local_file, @@ -104,6 +110,8 @@ def build_from_mappings( ) -> Sequence[File]: # TODO(QuantumGhost): Performance concern - each mapping triggers a separate database query. # Implement batch processing to reduce database load when handling multiple files. + # Filter out None/empty mappings to avoid errors + valid_mappings = [m for m in mappings if m and m.get("transfer_method")] files = [ build_from_mapping( mapping=mapping, @@ -111,7 +119,7 @@ def build_from_mappings( config=config, strict_type_validation=strict_type_validation, ) - for mapping in mappings + for mapping in valid_mappings ] if ( @@ -158,7 +166,10 @@ def _build_from_local_file( if strict_type_validation and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type return File( id=mapping.get("id"), @@ -206,9 +217,10 @@ def _build_from_remote_url( if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = ( - FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type - ) + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type return File( id=mapping.get("id"), @@ -230,10 +242,17 @@ def _build_from_remote_url( mime_type, filename, file_size = _get_remote_file_info(url) extension = mimetypes.guess_extension(mime_type) or ("." + filename.split(".")[-1] if "." in filename else ".bin") - file_type = _standardize_file_type(extension=extension, mime_type=mime_type) - if file_type.value != mapping.get("type", "custom"): + detected_file_type = _standardize_file_type(extension=extension, mime_type=mime_type) + specified_type = mapping.get("type") + + if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type + return File( id=mapping.get("id"), filename=filename, @@ -323,7 +342,10 @@ def _build_from_tool_file( if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type return File( id=mapping.get("id"), @@ -368,9 +390,10 @@ def _build_from_datasource_file( if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = ( - FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM.value else detected_file_type - ) + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type return File( id=mapping.get("datasource_file_id"), diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index 2104e66254..494194369a 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -142,6 +142,8 @@ def build_segment(value: Any, /) -> Segment: # below if value is None: return NoneSegment() + if isinstance(value, Segment): + return value if isinstance(value, str): return StringSegment(value=value) if isinstance(value, bool): diff --git a/api/fields/workflow_run_fields.py b/api/fields/workflow_run_fields.py index 649e881848..79594beeed 100644 --- a/api/fields/workflow_run_fields.py +++ b/api/fields/workflow_run_fields.py @@ -64,6 +64,15 @@ workflow_run_pagination_fields = { "data": fields.List(fields.Nested(workflow_run_for_list_fields), attribute="data"), } +workflow_run_count_fields = { + "total": fields.Integer, + "running": fields.Integer, + "succeeded": fields.Integer, + "failed": fields.Integer, + "stopped": fields.Integer, + "partial_succeeded": fields.Integer(attribute="partial-succeeded"), +} + workflow_run_detail_fields = { "id": fields.String, "version": fields.String, diff --git a/api/libs/collection_utils.py b/api/libs/collection_utils.py new file mode 100644 index 0000000000..f97308ca44 --- /dev/null +++ b/api/libs/collection_utils.py @@ -0,0 +1,14 @@ +def convert_to_lower_and_upper_set(inputs: list[str] | set[str]) -> set[str]: + """ + Convert a list or set of strings to a set containing both lower and upper case versions of each string. + + Args: + inputs (list[str] | set[str]): A list or set of strings to be converted. + + Returns: + set[str]: A set containing both lower and upper case versions of each string. + """ + if not inputs: + return set() + else: + return {case for s in inputs if s for case in (s.lower(), s.upper())} diff --git a/api/libs/custom_inputs.py b/api/libs/custom_inputs.py new file mode 100644 index 0000000000..10d550ed65 --- /dev/null +++ b/api/libs/custom_inputs.py @@ -0,0 +1,32 @@ +"""Custom input types for Flask-RESTX request parsing.""" + +import re + + +def time_duration(value: str) -> str: + """ + Validate and return time duration string. + + Accepts formats: d (days), h (hours), m (minutes), s (seconds) + Examples: 7d, 4h, 30m, 30s + + Args: + value: The time duration string + + Returns: + The validated time duration string + + Raises: + ValueError: If the format is invalid + """ + if not value: + raise ValueError("Time duration cannot be empty") + + pattern = r"^(\d+)([dhms])$" + if not re.match(pattern, value.lower()): + raise ValueError( + "Invalid time duration format. Use: d (days), h (hours), " + "m (minutes), or s (seconds). Examples: 7d, 4h, 30m, 30s" + ) + + return value.lower() diff --git a/api/libs/external_api.py b/api/libs/external_api.py index cf91b0117f..61a90ee4a9 100644 --- a/api/libs/external_api.py +++ b/api/libs/external_api.py @@ -10,6 +10,7 @@ from werkzeug.http import HTTP_STATUS_CODES from configs import dify_config from core.errors.error import AppInvokeQuotaExceededError +from libs.token import build_force_logout_cookie_headers def http_status_message(code): @@ -22,7 +23,7 @@ def register_external_error_handlers(api: Api): got_request_exception.send(current_app, exception=e) # If Werkzeug already prepared a Response, just use it. - if getattr(e, "response", None) is not None: + if e.response is not None: return e.response status_code = getattr(e, "code", 500) or 500 @@ -67,6 +68,11 @@ def register_external_error_handlers(api: Api): # If you need WWW-Authenticate for 401, add it to headers if status_code == 401: headers["WWW-Authenticate"] = 'Bearer realm="api"' + # Check if this is a forced logout error - clear cookies + error_code = getattr(e, "error_code", None) + if error_code == "unauthorized_and_force_logout": + # Add Set-Cookie headers to clear auth cookies + headers["Set-Cookie"] = build_force_logout_cookie_headers() return data, status_code, headers _ = handle_http_exception @@ -94,7 +100,7 @@ def register_external_error_handlers(api: Api): got_request_exception.send(current_app, exception=e) status_code = 500 - data = getattr(e, "data", {"message": http_status_message(status_code)}) + data: dict[str, Any] = getattr(e, "data", {"message": http_status_message(status_code)}) # 🔒 Normalize non-mapping data (e.g., if someone set e.data = Response) if not isinstance(data, dict): @@ -106,7 +112,7 @@ def register_external_error_handlers(api: Api): # Log stack exc_info: Any = sys.exc_info() if exc_info[1] is None: - exc_info = None + exc_info = (None, None, None) current_app.log_exception(exc_info) return data, status_code @@ -131,6 +137,6 @@ class ExternalApi(Api): kwargs["doc"] = dify_config.SWAGGER_UI_PATH if dify_config.SWAGGER_UI_ENABLED else False # manual separate call on construction and init_app to ensure configs in kwargs effective - super().__init__(app=None, *args, **kwargs) # type: ignore + super().__init__(app=None, *args, **kwargs) self.init_app(app, **kwargs) register_external_error_handlers(self) diff --git a/api/libs/gmpy2_pkcs10aep_cipher.py b/api/libs/gmpy2_pkcs10aep_cipher.py index 9759156c0f..23eb8dca05 100644 --- a/api/libs/gmpy2_pkcs10aep_cipher.py +++ b/api/libs/gmpy2_pkcs10aep_cipher.py @@ -23,11 +23,11 @@ from hashlib import sha1 import Crypto.Hash.SHA1 import Crypto.Util.number -import gmpy2 # type: ignore +import gmpy2 from Crypto import Random from Crypto.Signature.pss import MGF1 from Crypto.Util.number import bytes_to_long, ceil_div, long_to_bytes -from Crypto.Util.py3compat import _copy_bytes, bord +from Crypto.Util.py3compat import bord from Crypto.Util.strxor import strxor @@ -72,7 +72,7 @@ class PKCS1OAepCipher: else: self._mgf = lambda x, y: MGF1(x, y, self._hashObj) - self._label = _copy_bytes(None, None, label) + self._label = bytes(label) self._randfunc = randfunc def can_encrypt(self): @@ -120,7 +120,7 @@ class PKCS1OAepCipher: # Step 2b ps = b"\x00" * ps_len # Step 2c - db = lHash + ps + b"\x01" + _copy_bytes(None, None, message) + db = lHash + ps + b"\x01" + bytes(message) # Step 2d ros = self._randfunc(hLen) # Step 2e @@ -136,7 +136,7 @@ class PKCS1OAepCipher: # Step 3a (OS2IP) em_int = bytes_to_long(em) # Step 3b (RSAEP) - m_int = gmpy2.powmod(em_int, self._key.e, self._key.n) # ty: ignore [unresolved-attribute] + m_int = gmpy2.powmod(em_int, self._key.e, self._key.n) # Step 3c (I2OSP) c = long_to_bytes(m_int, k) return c @@ -169,7 +169,7 @@ class PKCS1OAepCipher: ct_int = bytes_to_long(ciphertext) # Step 2b (RSADP) # m_int = self._key._decrypt(ct_int) - m_int = gmpy2.powmod(ct_int, self._key.d, self._key.n) # ty: ignore [unresolved-attribute] + m_int = gmpy2.powmod(ct_int, self._key.d, self._key.n) # Complete step 2c (I2OSP) em = long_to_bytes(m_int, k) # Step 3a @@ -191,12 +191,12 @@ class PKCS1OAepCipher: # Step 3g one_pos = hLen + db[hLen:].find(b"\x01") lHash1 = db[:hLen] - invalid = bord(y) | int(one_pos < hLen) # type: ignore + invalid = bord(y) | int(one_pos < hLen) # type: ignore[arg-type] hash_compare = strxor(lHash1, lHash) for x in hash_compare: - invalid |= bord(x) # type: ignore + invalid |= bord(x) # type: ignore[arg-type] for x in db[hLen:one_pos]: - invalid |= bord(x) # type: ignore + invalid |= bord(x) # type: ignore[arg-type] if invalid != 0: raise ValueError("Incorrect decryption.") # Step 4 diff --git a/api/libs/helper.py b/api/libs/helper.py index 0551470f65..60484dd40b 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -24,7 +24,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from extensions.ext_redis import redis_client if TYPE_CHECKING: - from models.account import Account + from models import Account from models.model import EndUser logger = logging.getLogger(__name__) @@ -43,7 +43,7 @@ def extract_tenant_id(user: Union["Account", "EndUser"]) -> str | None: Raises: ValueError: If user is neither Account nor EndUser """ - from models.account import Account + from models import Account from models.model import EndUser if isinstance(user, Account): @@ -78,9 +78,11 @@ class AvatarUrlField(fields.Raw): if obj is None: return None - from models.account import Account + from models import Account if isinstance(obj, Account) and obj.avatar is not None: + if obj.avatar.startswith(("http://", "https://")): + return obj.avatar return file_helpers.get_signed_file_url(obj.avatar) return None diff --git a/api/libs/json_in_md_parser.py b/api/libs/json_in_md_parser.py index 0c642041bf..310e677747 100644 --- a/api/libs/json_in_md_parser.py +++ b/api/libs/json_in_md_parser.py @@ -6,22 +6,22 @@ from core.llm_generator.output_parser.errors import OutputParserError def parse_json_markdown(json_string: str): # Get json from the backticks/braces json_string = json_string.strip() - starts = ["```json", "```", "``", "`", "{"] - ends = ["```", "``", "`", "}"] + starts = ["```json", "```", "``", "`", "{", "["] + ends = ["```", "``", "`", "}", "]"] end_index = -1 start_index = 0 parsed: dict = {} for s in starts: start_index = json_string.find(s) if start_index != -1: - if json_string[start_index] != "{": + if json_string[start_index] not in ("{", "["): start_index += len(s) break if start_index != -1: for e in ends: end_index = json_string.rfind(e, start_index) if end_index != -1: - if json_string[end_index] == "}": + if json_string[end_index] in ("}", "]"): end_index += 1 break if start_index != -1 and end_index != -1 and start_index < end_index: @@ -38,6 +38,12 @@ def parse_and_check_json_markdown(text: str, expected_keys: list[str]): json_obj = parse_json_markdown(text) except json.JSONDecodeError as e: raise OutputParserError(f"got invalid json object. error: {e}") + + if isinstance(json_obj, list): + if len(json_obj) == 1 and isinstance(json_obj[0], dict): + json_obj = json_obj[0] + else: + raise OutputParserError(f"got invalid return object. obj:{json_obj}") for key in expected_keys: if key not in json_obj: raise OutputParserError( diff --git a/api/libs/login.py b/api/libs/login.py index 0535f52ea1..4b8ee2d1f8 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -1,18 +1,33 @@ from collections.abc import Callable from functools import wraps -from typing import Union, cast +from typing import Any from flask import current_app, g, has_request_context, request -from flask_login.config import EXEMPT_METHODS # type: ignore +from flask_login.config import EXEMPT_METHODS from werkzeug.local import LocalProxy from configs import dify_config -from models.account import Account +from libs.token import check_csrf_token +from models import Account from models.model import EndUser -#: A proxy for the current user. If no user is logged in, this will be an -#: anonymous user -current_user = cast(Union[Account, EndUser, None], LocalProxy(lambda: _get_user())) + +def current_account_with_tenant(): + """ + Resolve the underlying account for the current user proxy and ensure tenant context exists. + Allows tests to supply plain Account mocks without the LocalProxy helper. + """ + user_proxy = current_user + + get_current_object = getattr(user_proxy, "_get_current_object", None) + user = get_current_object() if callable(get_current_object) else user_proxy # type: ignore + + if not isinstance(user, Account): + raise ValueError("current_user must be an Account instance") + assert user.current_tenant_id is not None, "The tenant information should be loaded." + return user, user.current_tenant_id + + from typing import ParamSpec, TypeVar P = ParamSpec("P") @@ -59,6 +74,9 @@ def login_required(func: Callable[P, R]): pass elif current_user is not None and not current_user.is_authenticated: return current_app.login_manager.unauthorized() # type: ignore + # we put csrf validation here for less conflicts + # TODO: maybe find a better place for it. + check_csrf_token(request, current_user.id) return current_app.ensure_sync(func)(*args, **kwargs) return decorated_view @@ -69,6 +87,12 @@ def _get_user() -> EndUser | Account | None: if "_login_user" not in g: current_app.login_manager._load_user() # type: ignore - return g._login_user # type: ignore + return g._login_user return None + + +#: A proxy for the current user. If no user is logged in, this will be an +#: anonymous user +# NOTE: Any here, but use _get_current_object to check the fields +current_user: Any = LocalProxy(lambda: _get_user()) diff --git a/api/libs/sendgrid.py b/api/libs/sendgrid.py index ecc4b3fb98..c047c54d06 100644 --- a/api/libs/sendgrid.py +++ b/api/libs/sendgrid.py @@ -1,8 +1,8 @@ import logging -import sendgrid # type: ignore +import sendgrid from python_http_client.exceptions import ForbiddenError, UnauthorizedError -from sendgrid.helpers.mail import Content, Email, Mail, To # type: ignore +from sendgrid.helpers.mail import Content, Email, Mail, To logger = logging.getLogger(__name__) @@ -14,7 +14,7 @@ class SendGridClient: def send(self, mail: dict): logger.debug("Sending email with SendGrid") - + _to = "" try: _to = mail["to"] @@ -28,7 +28,7 @@ class SendGridClient: content = Content("text/html", mail["html"]) sg_mail = Mail(from_email, to_email, subject, content) mail_json = sg_mail.get() - response = sg.client.mail.send.post(request_body=mail_json) # ty: ignore [call-non-callable] + response = sg.client.mail.send.post(request_body=mail_json) # type: ignore logger.debug(response.status_code) logger.debug(response.body) logger.debug(response.headers) diff --git a/api/libs/time_parser.py b/api/libs/time_parser.py new file mode 100644 index 0000000000..1d9dd92a08 --- /dev/null +++ b/api/libs/time_parser.py @@ -0,0 +1,67 @@ +"""Time duration parser utility.""" + +import re +from datetime import UTC, datetime, timedelta + + +def parse_time_duration(duration_str: str) -> timedelta | None: + """ + Parse time duration string to timedelta. + + Supported formats: + - 7d: 7 days + - 4h: 4 hours + - 30m: 30 minutes + - 30s: 30 seconds + + Args: + duration_str: Duration string (e.g., "7d", "4h", "30m", "30s") + + Returns: + timedelta object or None if invalid format + """ + if not duration_str: + return None + + # Pattern: number followed by unit (d, h, m, s) + pattern = r"^(\d+)([dhms])$" + match = re.match(pattern, duration_str.lower()) + + if not match: + return None + + value = int(match.group(1)) + unit = match.group(2) + + if unit == "d": + return timedelta(days=value) + elif unit == "h": + return timedelta(hours=value) + elif unit == "m": + return timedelta(minutes=value) + elif unit == "s": + return timedelta(seconds=value) + + return None + + +def get_time_threshold(duration_str: str | None) -> datetime | None: + """ + Get datetime threshold from duration string. + + Calculates the datetime that is duration_str ago from now. + + Args: + duration_str: Duration string (e.g., "7d", "4h", "30m", "30s") + + Returns: + datetime object representing the threshold time, or None if no duration + """ + if not duration_str: + return None + + duration = parse_time_duration(duration_str) + if duration is None: + return None + + return datetime.now(UTC) - duration diff --git a/api/libs/token.py b/api/libs/token.py new file mode 100644 index 0000000000..098ff958da --- /dev/null +++ b/api/libs/token.py @@ -0,0 +1,231 @@ +import logging +import re +from datetime import UTC, datetime, timedelta + +from flask import Request +from werkzeug.exceptions import Unauthorized +from werkzeug.wrappers import Response + +from configs import dify_config +from constants import ( + COOKIE_NAME_ACCESS_TOKEN, + COOKIE_NAME_CSRF_TOKEN, + COOKIE_NAME_PASSPORT, + COOKIE_NAME_REFRESH_TOKEN, + COOKIE_NAME_WEBAPP_ACCESS_TOKEN, + HEADER_NAME_CSRF_TOKEN, + HEADER_NAME_PASSPORT, +) +from libs.passport import PassportService + +logger = logging.getLogger(__name__) + +CSRF_WHITE_LIST = [ + re.compile(r"/console/api/apps/[a-f0-9-]+/workflows/draft"), +] + + +# server is behind a reverse proxy, so we need to check the url +def is_secure() -> bool: + return dify_config.CONSOLE_WEB_URL.startswith("https") and dify_config.CONSOLE_API_URL.startswith("https") + + +def _cookie_domain() -> str | None: + """ + Returns the normalized cookie domain. + + Leading dots are stripped from the configured domain. Historically, a leading dot + indicated that a cookie should be sent to all subdomains, but modern browsers treat + 'example.com' and '.example.com' identically. This normalization ensures consistent + behavior and avoids confusion. + """ + domain = dify_config.COOKIE_DOMAIN.strip() + domain = domain.removeprefix(".") + return domain or None + + +def _real_cookie_name(cookie_name: str) -> str: + if is_secure() and _cookie_domain() is None: + return "__Host-" + cookie_name + else: + return cookie_name + + +def _try_extract_from_header(request: Request) -> str | None: + auth_header = request.headers.get("Authorization") + if auth_header: + if " " not in auth_header: + return None + else: + auth_scheme, auth_token = auth_header.split(None, 1) + auth_scheme = auth_scheme.lower() + if auth_scheme != "bearer": + return None + else: + return auth_token + return None + + +def extract_refresh_token(request: Request) -> str | None: + return request.cookies.get(_real_cookie_name(COOKIE_NAME_REFRESH_TOKEN)) + + +def extract_csrf_token(request: Request) -> str | None: + return request.headers.get(HEADER_NAME_CSRF_TOKEN) + + +def extract_csrf_token_from_cookie(request: Request) -> str | None: + return request.cookies.get(_real_cookie_name(COOKIE_NAME_CSRF_TOKEN)) + + +def extract_access_token(request: Request) -> str | None: + def _try_extract_from_cookie(request: Request) -> str | None: + return request.cookies.get(_real_cookie_name(COOKIE_NAME_ACCESS_TOKEN)) + + return _try_extract_from_cookie(request) or _try_extract_from_header(request) + + +def extract_webapp_access_token(request: Request) -> str | None: + return request.cookies.get(_real_cookie_name(COOKIE_NAME_WEBAPP_ACCESS_TOKEN)) or _try_extract_from_header(request) + + +def extract_webapp_passport(app_code: str, request: Request) -> str | None: + def _try_extract_passport_token_from_cookie(request: Request) -> str | None: + return request.cookies.get(_real_cookie_name(COOKIE_NAME_PASSPORT + "-" + app_code)) + + def _try_extract_passport_token_from_header(request: Request) -> str | None: + return request.headers.get(HEADER_NAME_PASSPORT) + + ret = _try_extract_passport_token_from_cookie(request) or _try_extract_passport_token_from_header(request) + return ret + + +def set_access_token_to_cookie(request: Request, response: Response, token: str, samesite: str = "Lax"): + response.set_cookie( + _real_cookie_name(COOKIE_NAME_ACCESS_TOKEN), + value=token, + httponly=True, + domain=_cookie_domain(), + secure=is_secure(), + samesite=samesite, + max_age=int(dify_config.ACCESS_TOKEN_EXPIRE_MINUTES * 60), + path="/", + ) + + +def set_refresh_token_to_cookie(request: Request, response: Response, token: str): + response.set_cookie( + _real_cookie_name(COOKIE_NAME_REFRESH_TOKEN), + value=token, + httponly=True, + domain=_cookie_domain(), + secure=is_secure(), + samesite="Lax", + max_age=int(60 * 60 * 24 * dify_config.REFRESH_TOKEN_EXPIRE_DAYS), + path="/", + ) + + +def set_csrf_token_to_cookie(request: Request, response: Response, token: str): + response.set_cookie( + _real_cookie_name(COOKIE_NAME_CSRF_TOKEN), + value=token, + httponly=False, + domain=_cookie_domain(), + secure=is_secure(), + samesite="Lax", + max_age=int(60 * dify_config.ACCESS_TOKEN_EXPIRE_MINUTES), + path="/", + ) + + +def _clear_cookie( + response: Response, + cookie_name: str, + samesite: str = "Lax", + http_only: bool = True, +): + response.set_cookie( + _real_cookie_name(cookie_name), + "", + expires=0, + path="/", + domain=_cookie_domain(), + secure=is_secure(), + httponly=http_only, + samesite=samesite, + ) + + +def clear_access_token_from_cookie(response: Response, samesite: str = "Lax"): + _clear_cookie(response, COOKIE_NAME_ACCESS_TOKEN, samesite) + + +def clear_webapp_access_token_from_cookie(response: Response, samesite: str = "Lax"): + _clear_cookie(response, COOKIE_NAME_WEBAPP_ACCESS_TOKEN, samesite) + + +def clear_refresh_token_from_cookie(response: Response): + _clear_cookie(response, COOKIE_NAME_REFRESH_TOKEN) + + +def clear_csrf_token_from_cookie(response: Response): + _clear_cookie(response, COOKIE_NAME_CSRF_TOKEN, http_only=False) + + +def build_force_logout_cookie_headers() -> list[str]: + """ + Generate Set-Cookie header values that clear all auth-related cookies. + This mirrors the behavior of the standard cookie clearing helpers while + allowing callers that do not have a Response instance to reuse the logic. + """ + response = Response() + clear_access_token_from_cookie(response) + clear_csrf_token_from_cookie(response) + clear_refresh_token_from_cookie(response) + return response.headers.getlist("Set-Cookie") + + +def check_csrf_token(request: Request, user_id: str): + # some apis are sent by beacon, so we need to bypass csrf token check + # since these APIs are post, they are already protected by SameSite: Lax, so csrf is not required. + def _unauthorized(): + raise Unauthorized("CSRF token is missing or invalid.") + + for pattern in CSRF_WHITE_LIST: + if pattern.match(request.path): + return + + csrf_token = extract_csrf_token(request) + csrf_token_from_cookie = extract_csrf_token_from_cookie(request) + + if csrf_token != csrf_token_from_cookie: + _unauthorized() + + if not csrf_token: + _unauthorized() + verified = {} + try: + verified = PassportService().verify(csrf_token) + except: + _unauthorized() + + if verified.get("sub") != user_id: + _unauthorized() + + exp: int | None = verified.get("exp") + if not exp: + _unauthorized() + else: + time_now = int(datetime.now().timestamp()) + if exp < time_now: + _unauthorized() + + +def generate_csrf_token(user_id: str) -> str: + exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES) + payload = { + "exp": int(exp_dt.timestamp()), + "sub": user_id, + } + return PassportService().issue(payload) diff --git a/api/libs/validators.py b/api/libs/validators.py new file mode 100644 index 0000000000..4d762e8116 --- /dev/null +++ b/api/libs/validators.py @@ -0,0 +1,5 @@ +def validate_description_length(description: str | None) -> str | None: + """Validate description length.""" + if description and len(description) > 400: + raise ValueError("Description cannot exceed 400 characters.") + return description diff --git a/api/migrations/env.py b/api/migrations/env.py index a5d815dcfd..66a4614e80 100644 --- a/api/migrations/env.py +++ b/api/migrations/env.py @@ -37,10 +37,11 @@ config.set_main_option('sqlalchemy.url', get_engine_url()) # my_important_option = config.get_main_option("my_important_option") # ... etc. -from models.base import Base +from models.base import TypeBase + def get_metadata(): - return Base.metadata + return TypeBase.metadata def include_object(object, name, type_, reflected, compare_to): if type_ == "foreign_key_constraint": diff --git a/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py b/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py new file mode 100644 index 0000000000..910cf75838 --- /dev/null +++ b/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py @@ -0,0 +1,35 @@ +"""add app_mode for messsage + +Revision ID: d98acf217d43 +Revises: 68519ad5cd18 +Create Date: 2025-10-14 16:18:08.568011 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd98acf217d43' +down_revision = '68519ad5cd18' +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('app_mode', sa.String(length=255), nullable=True)) + batch_op.create_index('message_app_mode_idx', ['app_mode'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.drop_index('message_app_mode_idx') + batch_op.drop_column('app_mode') + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py b/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py new file mode 100644 index 0000000000..086a02e7c3 --- /dev/null +++ b/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py @@ -0,0 +1,36 @@ +"""remove-builtin-template-user + +Revision ID: ae662b25d9bc +Revises: d98acf217d43 +Create Date: 2025-10-21 14:30:28.566192 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ae662b25d9bc' +down_revision = 'd98acf217d43' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op: + batch_op.drop_column('updated_by') + batch_op.drop_column('created_by') + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False)) + batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True)) + + # ### end Alembic commands ### diff --git a/api/models/account.py b/api/models/account.py index 8c1f990aa2..400a2c6362 100644 --- a/api/models/account.py +++ b/api/models/account.py @@ -1,15 +1,16 @@ import enum import json +from dataclasses import field from datetime import datetime from typing import Any, Optional import sqlalchemy as sa -from flask_login import UserMixin # type: ignore[import-untyped] +from flask_login import UserMixin from sqlalchemy import DateTime, String, func, select -from sqlalchemy.orm import Mapped, Session, mapped_column, reconstructor +from sqlalchemy.orm import Mapped, Session, mapped_column from typing_extensions import deprecated -from models.base import Base +from models.base import TypeBase from .engine import db from .types import StringUUID @@ -83,31 +84,37 @@ class AccountStatus(enum.StrEnum): CLOSED = "closed" -class Account(UserMixin, Base): +class Account(UserMixin, TypeBase): __tablename__ = "accounts" __table_args__ = (sa.PrimaryKeyConstraint("id", name="account_pkey"), sa.Index("account_email_idx", "email")) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) name: Mapped[str] = mapped_column(String(255)) email: Mapped[str] = mapped_column(String(255)) - password: Mapped[str | None] = mapped_column(String(255)) - password_salt: Mapped[str | None] = mapped_column(String(255)) - avatar: Mapped[str | None] = mapped_column(String(255), nullable=True) - interface_language: Mapped[str | None] = mapped_column(String(255)) - interface_theme: Mapped[str | None] = mapped_column(String(255), nullable=True) - timezone: Mapped[str | None] = mapped_column(String(255)) - last_login_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - last_login_ip: Mapped[str | None] = mapped_column(String(255), nullable=True) - last_active_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), nullable=False) - status: Mapped[str] = mapped_column(String(16), server_default=sa.text("'active'::character varying")) - initialized_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), nullable=False) - updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), nullable=False) + password: Mapped[str | None] = mapped_column(String(255), default=None) + password_salt: Mapped[str | None] = mapped_column(String(255), default=None) + avatar: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) + interface_language: Mapped[str | None] = mapped_column(String(255), default=None) + interface_theme: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) + timezone: Mapped[str | None] = mapped_column(String(255), default=None) + last_login_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) + last_login_ip: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) + last_active_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + status: Mapped[str] = mapped_column( + String(16), server_default=sa.text("'active'::character varying"), default="active" + ) + initialized_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) - @reconstructor - def init_on_load(self): - self.role: TenantAccountRole | None = None - self._current_tenant: Tenant | None = None + role: TenantAccountRole | None = field(default=None, init=False) + _current_tenant: "Tenant | None" = field(default=None, init=False) @property def is_password_set(self): @@ -226,18 +233,24 @@ class TenantStatus(enum.StrEnum): ARCHIVE = "archive" -class Tenant(Base): +class Tenant(TypeBase): __tablename__ = "tenants" __table_args__ = (sa.PrimaryKeyConstraint("id", name="tenant_pkey"),) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) name: Mapped[str] = mapped_column(String(255)) - encrypt_public_key: Mapped[str | None] = mapped_column(sa.Text) - plan: Mapped[str] = mapped_column(String(255), server_default=sa.text("'basic'::character varying")) - status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'normal'::character varying")) - custom_config: Mapped[str | None] = mapped_column(sa.Text) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), nullable=False) - updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) + encrypt_public_key: Mapped[str | None] = mapped_column(sa.Text, default=None) + plan: Mapped[str] = mapped_column( + String(255), server_default=sa.text("'basic'::character varying"), default="basic" + ) + status: Mapped[str] = mapped_column( + String(255), server_default=sa.text("'normal'::character varying"), default="normal" + ) + custom_config: Mapped[str | None] = mapped_column(sa.Text, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), init=False) def get_accounts(self) -> list[Account]: return list( @@ -257,7 +270,7 @@ class Tenant(Base): self.custom_config = json.dumps(value) -class TenantAccountJoin(Base): +class TenantAccountJoin(TypeBase): __tablename__ = "tenant_account_joins" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="tenant_account_join_pkey"), @@ -266,17 +279,21 @@ class TenantAccountJoin(Base): sa.UniqueConstraint("tenant_id", "account_id", name="unique_tenant_account_join"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID) account_id: Mapped[str] = mapped_column(StringUUID) - current: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false")) - role: Mapped[str] = mapped_column(String(16), server_default="normal") - invited_by: Mapped[str | None] = mapped_column(StringUUID) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) + current: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false"), default=False) + role: Mapped[str] = mapped_column(String(16), server_default="normal", default="normal") + invited_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) -class AccountIntegrate(Base): +class AccountIntegrate(TypeBase): __tablename__ = "account_integrates" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="account_integrate_pkey"), @@ -284,16 +301,20 @@ class AccountIntegrate(Base): sa.UniqueConstraint("provider", "open_id", name="unique_provider_open_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) account_id: Mapped[str] = mapped_column(StringUUID) provider: Mapped[str] = mapped_column(String(16)) open_id: Mapped[str] = mapped_column(String(255)) encrypted_token: Mapped[str] = mapped_column(String(255)) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) -class InvitationCode(Base): +class InvitationCode(TypeBase): __tablename__ = "invitation_codes" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="invitation_code_pkey"), @@ -301,18 +322,22 @@ class InvitationCode(Base): sa.Index("invitation_codes_code_idx", "code", "status"), ) - id: Mapped[int] = mapped_column(sa.Integer) + id: Mapped[int] = mapped_column(sa.Integer, init=False) batch: Mapped[str] = mapped_column(String(255)) code: Mapped[str] = mapped_column(String(32)) - status: Mapped[str] = mapped_column(String(16), server_default=sa.text("'unused'::character varying")) - used_at: Mapped[datetime | None] = mapped_column(DateTime) - used_by_tenant_id: Mapped[str | None] = mapped_column(StringUUID) - used_by_account_id: Mapped[str | None] = mapped_column(StringUUID) - deprecated_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=sa.text("CURRENT_TIMESTAMP(0)")) + status: Mapped[str] = mapped_column( + String(16), server_default=sa.text("'unused'::character varying"), default="unused" + ) + used_at: Mapped[datetime | None] = mapped_column(DateTime, default=None) + used_by_tenant_id: Mapped[str | None] = mapped_column(StringUUID, default=None) + used_by_account_id: Mapped[str | None] = mapped_column(StringUUID, default=None) + deprecated_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=sa.text("CURRENT_TIMESTAMP(0)"), nullable=False, init=False + ) -class TenantPluginPermission(Base): +class TenantPluginPermission(TypeBase): class InstallPermission(enum.StrEnum): EVERYONE = "everyone" ADMINS = "admins" @@ -329,13 +354,17 @@ class TenantPluginPermission(Base): sa.UniqueConstraint("tenant_id", name="unique_tenant_plugin"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - install_permission: Mapped[InstallPermission] = mapped_column(String(16), nullable=False, server_default="everyone") - debug_permission: Mapped[DebugPermission] = mapped_column(String(16), nullable=False, server_default="noone") + install_permission: Mapped[InstallPermission] = mapped_column( + String(16), nullable=False, server_default="everyone", default=InstallPermission.EVERYONE + ) + debug_permission: Mapped[DebugPermission] = mapped_column( + String(16), nullable=False, server_default="noone", default=DebugPermission.NOBODY + ) -class TenantPluginAutoUpgradeStrategy(Base): +class TenantPluginAutoUpgradeStrategy(TypeBase): class StrategySetting(enum.StrEnum): DISABLED = "disabled" FIX_ONLY = "fix_only" @@ -352,12 +381,20 @@ class TenantPluginAutoUpgradeStrategy(Base): sa.UniqueConstraint("tenant_id", name="unique_tenant_plugin_auto_upgrade_strategy"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - strategy_setting: Mapped[StrategySetting] = mapped_column(String(16), nullable=False, server_default="fix_only") - upgrade_time_of_day: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) # seconds of the day - upgrade_mode: Mapped[UpgradeMode] = mapped_column(String(16), nullable=False, server_default="exclude") - exclude_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False) # plugin_id (author/name) - include_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False) # plugin_id (author/name) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + strategy_setting: Mapped[StrategySetting] = mapped_column( + String(16), nullable=False, server_default="fix_only", default=StrategySetting.FIX_ONLY + ) + upgrade_mode: Mapped[UpgradeMode] = mapped_column( + String(16), nullable=False, server_default="exclude", default=UpgradeMode.EXCLUDE + ) + exclude_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False, default_factory=list) + include_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False, default_factory=list) + upgrade_time_of_day: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) diff --git a/api/models/api_based_extension.py b/api/models/api_based_extension.py index 60167d9069..e86826fc3d 100644 --- a/api/models/api_based_extension.py +++ b/api/models/api_based_extension.py @@ -9,7 +9,7 @@ from .base import Base from .types import StringUUID -class APIBasedExtensionPoint(enum.Enum): +class APIBasedExtensionPoint(enum.StrEnum): APP_EXTERNAL_DATA_TOOL_QUERY = "app.external_data_tool.query" PING = "ping" APP_MODERATION_INPUT = "app.moderation.input" diff --git a/api/models/dataset.py b/api/models/dataset.py index 25ebe14738..4a9e2688b8 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -61,18 +61,18 @@ class Dataset(Base): created_by = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) - updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) embedding_model = mapped_column(db.String(255), nullable=True) embedding_model_provider = mapped_column(db.String(255), nullable=True) - keyword_number = db.Column(db.Integer, nullable=True, server_default=db.text("10")) + keyword_number = mapped_column(sa.Integer, nullable=True, server_default=db.text("10")) collection_binding_id = mapped_column(StringUUID, nullable=True) retrieval_model = mapped_column(JSONB, nullable=True) - built_in_field_enabled = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) - icon_info = db.Column(JSONB, nullable=True) - runtime_mode = db.Column(db.String(255), nullable=True, server_default=db.text("'general'::character varying")) - pipeline_id = db.Column(StringUUID, nullable=True) - chunk_structure = db.Column(db.String(255), nullable=True) - enable_api = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) + built_in_field_enabled = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + icon_info = mapped_column(JSONB, nullable=True) + runtime_mode = mapped_column(db.String(255), nullable=True, server_default=db.text("'general'::character varying")) + pipeline_id = mapped_column(StringUUID, nullable=True) + chunk_structure = mapped_column(db.String(255), nullable=True) + enable_api = mapped_column(sa.Boolean, nullable=False, server_default=db.text("true")) @property def total_documents(self): @@ -184,7 +184,7 @@ class Dataset(Base): @property def retrieval_model_dict(self): default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 2, @@ -754,7 +754,7 @@ class DocumentSegment(Base): if process_rule and process_rule.mode == "hierarchical": rules_dict = process_rule.rules_dict if rules_dict: - rules = Rule(**rules_dict) + rules = Rule.model_validate(rules_dict) if rules.parent_mode and rules.parent_mode != ParentMode.FULL_DOC: child_chunks = ( db.session.query(ChildChunk) @@ -772,7 +772,7 @@ class DocumentSegment(Base): if process_rule and process_rule.mode == "hierarchical": rules_dict = process_rule.rules_dict if rules_dict: - rules = Rule(**rules_dict) + rules = Rule.model_validate(rules_dict) if rules.parent_mode: child_chunks = ( db.session.query(ChildChunk) @@ -1226,28 +1226,19 @@ class PipelineBuiltInTemplate(Base): # type: ignore[name-defined] __tablename__ = "pipeline_built_in_templates" __table_args__ = (db.PrimaryKeyConstraint("id", name="pipeline_built_in_template_pkey"),) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.Text, nullable=False) - chunk_structure = db.Column(db.String(255), nullable=False) - icon = db.Column(db.JSON, nullable=False) - yaml_content = db.Column(db.Text, nullable=False) - copyright = db.Column(db.String(255), nullable=False) - privacy_policy = db.Column(db.String(255), nullable=False) - position = db.Column(db.Integer, nullable=False) - install_count = db.Column(db.Integer, nullable=False, default=0) - language = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - created_by = db.Column(StringUUID, nullable=False) - updated_by = db.Column(StringUUID, nullable=True) - - @property - def created_user_name(self): - account = db.session.query(Account).where(Account.id == self.created_by).first() - if account: - return account.name - return "" + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + name = mapped_column(db.String(255), nullable=False) + description = mapped_column(sa.Text, nullable=False) + chunk_structure = mapped_column(db.String(255), nullable=False) + icon = mapped_column(sa.JSON, nullable=False) + yaml_content = mapped_column(sa.Text, nullable=False) + copyright = mapped_column(db.String(255), nullable=False) + privacy_policy = mapped_column(db.String(255), nullable=False) + position = mapped_column(sa.Integer, nullable=False) + install_count = mapped_column(sa.Integer, nullable=False, default=0) + language = mapped_column(db.String(255), nullable=False) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) class PipelineCustomizedTemplate(Base): # type: ignore[name-defined] @@ -1257,20 +1248,20 @@ class PipelineCustomizedTemplate(Base): # type: ignore[name-defined] db.Index("pipeline_customized_template_tenant_idx", "tenant_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - tenant_id = db.Column(StringUUID, nullable=False) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.Text, nullable=False) - chunk_structure = db.Column(db.String(255), nullable=False) - icon = db.Column(db.JSON, nullable=False) - position = db.Column(db.Integer, nullable=False) - yaml_content = db.Column(db.Text, nullable=False) - install_count = db.Column(db.Integer, nullable=False, default=0) - language = db.Column(db.String(255), nullable=False) - created_by = db.Column(StringUUID, nullable=False) - updated_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + tenant_id = mapped_column(StringUUID, nullable=False) + name = mapped_column(db.String(255), nullable=False) + description = mapped_column(sa.Text, nullable=False) + chunk_structure = mapped_column(db.String(255), nullable=False) + icon = mapped_column(sa.JSON, nullable=False) + position = mapped_column(sa.Integer, nullable=False) + yaml_content = mapped_column(sa.Text, nullable=False) + install_count = mapped_column(sa.Integer, nullable=False, default=0) + language = mapped_column(db.String(255), nullable=False) + created_by = mapped_column(StringUUID, nullable=False) + updated_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) @property def created_user_name(self): @@ -1284,17 +1275,17 @@ class Pipeline(Base): # type: ignore[name-defined] __tablename__ = "pipelines" __table_args__ = (db.PrimaryKeyConstraint("id", name="pipeline_pkey"),) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.Text, nullable=False, server_default=db.text("''::character varying")) - workflow_id = db.Column(StringUUID, nullable=True) - is_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - is_published = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - created_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + name = mapped_column(db.String(255), nullable=False) + description = mapped_column(sa.Text, nullable=False, server_default=db.text("''::character varying")) + workflow_id = mapped_column(StringUUID, nullable=True) + is_public = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + is_published = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + created_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) def retrieve_dataset(self, session: Session): return session.query(Dataset).where(Dataset.pipeline_id == self.id).first() @@ -1307,25 +1298,25 @@ class DocumentPipelineExecutionLog(Base): db.Index("document_pipeline_execution_logs_document_id_idx", "document_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - pipeline_id = db.Column(StringUUID, nullable=False) - document_id = db.Column(StringUUID, nullable=False) - datasource_type = db.Column(db.String(255), nullable=False) - datasource_info = db.Column(db.Text, nullable=False) - datasource_node_id = db.Column(db.String(255), nullable=False) - input_data = db.Column(db.JSON, nullable=False) - created_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + pipeline_id = mapped_column(StringUUID, nullable=False) + document_id = mapped_column(StringUUID, nullable=False) + datasource_type = mapped_column(db.String(255), nullable=False) + datasource_info = mapped_column(sa.Text, nullable=False) + datasource_node_id = mapped_column(db.String(255), nullable=False) + input_data = mapped_column(sa.JSON, nullable=False) + created_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) class PipelineRecommendedPlugin(Base): __tablename__ = "pipeline_recommended_plugins" __table_args__ = (db.PrimaryKeyConstraint("id", name="pipeline_recommended_plugin_pkey"),) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - plugin_id = db.Column(db.Text, nullable=False) - provider_name = db.Column(db.Text, nullable=False) - position = db.Column(db.Integer, nullable=False, default=0) - active = db.Column(db.Boolean, nullable=False, default=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + plugin_id = mapped_column(sa.Text, nullable=False) + provider_name = mapped_column(sa.Text, nullable=False) + position = mapped_column(sa.Integer, nullable=False, default=0) + active = mapped_column(sa.Boolean, nullable=False, default=True) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/model.py b/api/models/model.py index a8218c3a4e..8a8574e2fe 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, Any, Literal, Optional, cast import sqlalchemy as sa from flask import request -from flask_login import UserMixin # type: ignore[import-untyped] +from flask_login import UserMixin from sqlalchemy import Float, Index, PrimaryKeyConstraint, String, exists, func, select, text from sqlalchemy.orm import Mapped, Session, mapped_column @@ -186,13 +186,13 @@ class App(Base): if len(keys) >= 4: provider_type = tool.get("provider_type", "") provider_id = tool.get("provider_id", "") - if provider_type == ToolProviderType.API.value: + if provider_type == ToolProviderType.API: try: uuid.UUID(provider_id) except Exception: continue api_provider_ids.append(provider_id) - if provider_type == ToolProviderType.BUILT_IN.value: + if provider_type == ToolProviderType.BUILT_IN: try: # check if it's hardcoded try: @@ -251,23 +251,23 @@ class App(Base): provider_type = tool.get("provider_type", "") provider_id = tool.get("provider_id", "") - if provider_type == ToolProviderType.API.value: + if provider_type == ToolProviderType.API: if uuid.UUID(provider_id) not in existing_api_providers: deleted_tools.append( { - "type": ToolProviderType.API.value, + "type": ToolProviderType.API, "tool_name": tool["tool_name"], "provider_id": provider_id, } ) - if provider_type == ToolProviderType.BUILT_IN.value: + if provider_type == ToolProviderType.BUILT_IN: generic_provider_id = GenericProviderID(provider_id) if not existing_builtin_providers[generic_provider_id.provider_name]: deleted_tools.append( { - "type": ToolProviderType.BUILT_IN.value, + "type": ToolProviderType.BUILT_IN, "tool_name": tool["tool_name"], "provider_id": provider_id, # use the original one } @@ -910,6 +910,7 @@ class Message(Base): Index("message_account_idx", "app_id", "from_source", "from_account_id"), Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"), Index("message_created_at_idx", "created_at"), + Index("message_app_mode_idx", "app_mode"), ) id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) @@ -943,6 +944,7 @@ class Message(Base): updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) agent_based: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) workflow_run_id: Mapped[str | None] = mapped_column(StringUUID) + app_mode: Mapped[str | None] = mapped_column(String(255), nullable=True) @property def inputs(self) -> dict[str, Any]: @@ -1154,7 +1156,7 @@ class Message(Base): files: list[File] = [] for message_file in message_files: - if message_file.transfer_method == FileTransferMethod.LOCAL_FILE.value: + if message_file.transfer_method == FileTransferMethod.LOCAL_FILE: if message_file.upload_file_id is None: raise ValueError(f"MessageFile {message_file.id} is a local file but has no upload_file_id") file = file_factory.build_from_mapping( @@ -1166,7 +1168,7 @@ class Message(Base): }, tenant_id=current_app.tenant_id, ) - elif message_file.transfer_method == FileTransferMethod.REMOTE_URL.value: + elif message_file.transfer_method == FileTransferMethod.REMOTE_URL: if message_file.url is None: raise ValueError(f"MessageFile {message_file.id} is a remote url but has no url") file = file_factory.build_from_mapping( @@ -1179,7 +1181,7 @@ class Message(Base): }, tenant_id=current_app.tenant_id, ) - elif message_file.transfer_method == FileTransferMethod.TOOL_FILE.value: + elif message_file.transfer_method == FileTransferMethod.TOOL_FILE: if message_file.upload_file_id is None: assert message_file.url is not None message_file.upload_file_id = message_file.url.split("/")[-1].split(".")[0] @@ -1477,7 +1479,7 @@ class EndUser(Base, UserMixin): sa.Index("end_user_tenant_session_id_idx", "tenant_id", "session_id", "type"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id = mapped_column(StringUUID, nullable=True) type: Mapped[str] = mapped_column(String(255), nullable=False) diff --git a/api/models/oauth.py b/api/models/oauth.py index 1d5d37e3e1..ef23780dc8 100644 --- a/api/models/oauth.py +++ b/api/models/oauth.py @@ -1,7 +1,8 @@ from datetime import datetime +import sqlalchemy as sa from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.orm import Mapped +from sqlalchemy.orm import Mapped, mapped_column from .base import Base from .engine import db @@ -15,10 +16,10 @@ class DatasourceOauthParamConfig(Base): # type: ignore[name-defined] db.UniqueConstraint("plugin_id", "provider", name="datasource_oauth_config_datasource_id_provider_idx"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - plugin_id: Mapped[str] = db.Column(db.String(255), nullable=False) - provider: Mapped[str] = db.Column(db.String(255), nullable=False) - system_credentials: Mapped[dict] = db.Column(JSONB, nullable=False) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + plugin_id: Mapped[str] = mapped_column(db.String(255), nullable=False) + provider: Mapped[str] = mapped_column(db.String(255), nullable=False) + system_credentials: Mapped[dict] = mapped_column(JSONB, nullable=False) class DatasourceProvider(Base): @@ -28,19 +29,19 @@ class DatasourceProvider(Base): db.UniqueConstraint("tenant_id", "plugin_id", "provider", "name", name="datasource_provider_unique_name"), db.Index("datasource_provider_auth_type_provider_idx", "tenant_id", "plugin_id", "provider"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - tenant_id = db.Column(StringUUID, nullable=False) - name: Mapped[str] = db.Column(db.String(255), nullable=False) - provider: Mapped[str] = db.Column(db.String(255), nullable=False) - plugin_id: Mapped[str] = db.Column(db.String(255), nullable=False) - auth_type: Mapped[str] = db.Column(db.String(255), nullable=False) - encrypted_credentials: Mapped[dict] = db.Column(JSONB, nullable=False) - avatar_url: Mapped[str] = db.Column(db.Text, nullable=True, default="default") - is_default: Mapped[bool] = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - expires_at: Mapped[int] = db.Column(db.Integer, nullable=False, server_default="-1") + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + tenant_id = mapped_column(StringUUID, nullable=False) + name: Mapped[str] = mapped_column(db.String(255), nullable=False) + provider: Mapped[str] = mapped_column(db.String(255), nullable=False) + plugin_id: Mapped[str] = mapped_column(db.String(255), nullable=False) + auth_type: Mapped[str] = mapped_column(db.String(255), nullable=False) + encrypted_credentials: Mapped[dict] = mapped_column(JSONB, nullable=False) + avatar_url: Mapped[str] = mapped_column(sa.Text, nullable=True, default="default") + is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + expires_at: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default="-1") - created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, default=datetime.now) - updated_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, default=datetime.now) + created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, default=datetime.now) + updated_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, default=datetime.now) class DatasourceOauthTenantParamConfig(Base): @@ -50,12 +51,12 @@ class DatasourceOauthTenantParamConfig(Base): db.UniqueConstraint("tenant_id", "plugin_id", "provider", name="datasource_oauth_tenant_config_unique"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - tenant_id = db.Column(StringUUID, nullable=False) - provider: Mapped[str] = db.Column(db.String(255), nullable=False) - plugin_id: Mapped[str] = db.Column(db.String(255), nullable=False) - client_params: Mapped[dict] = db.Column(JSONB, nullable=False, default={}) - enabled: Mapped[bool] = db.Column(db.Boolean, nullable=False, default=False) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + tenant_id = mapped_column(StringUUID, nullable=False) + provider: Mapped[str] = mapped_column(db.String(255), nullable=False) + plugin_id: Mapped[str] = mapped_column(db.String(255), nullable=False) + client_params: Mapped[dict] = mapped_column(JSONB, nullable=False, default={}) + enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False) - created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, default=datetime.now) - updated_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, default=datetime.now) + created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, default=datetime.now) + updated_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, default=datetime.now) diff --git a/api/models/provider.py b/api/models/provider.py index aacc6e505a..f6852d49f4 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -107,7 +107,7 @@ class Provider(Base): """ Returns True if the provider is enabled. """ - if self.provider_type == ProviderType.SYSTEM.value: + if self.provider_type == ProviderType.SYSTEM: return self.is_valid else: return self.is_valid and self.token_is_set diff --git a/api/models/source.py b/api/models/source.py index 5b4c486bc4..0ed7c4c70e 100644 --- a/api/models/source.py +++ b/api/models/source.py @@ -6,12 +6,12 @@ from sqlalchemy import DateTime, String, func from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, mapped_column -from models.base import Base +from models.base import TypeBase from .types import StringUUID -class DataSourceOauthBinding(Base): +class DataSourceOauthBinding(TypeBase): __tablename__ = "data_source_oauth_bindings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="source_binding_pkey"), @@ -19,17 +19,25 @@ class DataSourceOauthBinding(Base): sa.Index("source_info_idx", "source_info", postgresql_using="gin"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - tenant_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) access_token: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) - source_info = mapped_column(JSONB, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - disabled: Mapped[bool | None] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false")) + source_info: Mapped[dict] = mapped_column(JSONB, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) + disabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"), default=False) -class DataSourceApiKeyAuthBinding(Base): +class DataSourceApiKeyAuthBinding(TypeBase): __tablename__ = "data_source_api_key_auth_bindings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="data_source_api_key_auth_binding_pkey"), @@ -37,14 +45,22 @@ class DataSourceApiKeyAuthBinding(Base): sa.Index("data_source_api_key_auth_binding_provider_idx", "provider"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - tenant_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) category: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) - credentials = mapped_column(sa.Text, nullable=True) # JSON - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - disabled: Mapped[bool | None] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false")) + credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) # JSON + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) + disabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"), default=False) def to_dict(self): return { @@ -52,7 +68,7 @@ class DataSourceApiKeyAuthBinding(Base): "tenant_id": self.tenant_id, "category": self.category, "provider": self.provider, - "credentials": json.loads(self.credentials), + "credentials": json.loads(self.credentials) if self.credentials else None, "created_at": self.created_at.timestamp(), "updated_at": self.updated_at.timestamp(), "disabled": self.disabled, diff --git a/api/models/task.py b/api/models/task.py index 3da1674536..513f167cce 100644 --- a/api/models/task.py +++ b/api/models/task.py @@ -6,43 +6,43 @@ from sqlalchemy import DateTime, String from sqlalchemy.orm import Mapped, mapped_column from libs.datetime_utils import naive_utc_now -from models.base import Base - -from .engine import db +from models.base import TypeBase -class CeleryTask(Base): +class CeleryTask(TypeBase): """Task result/status.""" __tablename__ = "celery_taskmeta" - id = mapped_column(sa.Integer, sa.Sequence("task_id_sequence"), primary_key=True, autoincrement=True) - task_id = mapped_column(String(155), unique=True) - status = mapped_column(String(50), default=states.PENDING) - result = mapped_column(db.PickleType, nullable=True) - date_done = mapped_column( + id: Mapped[int] = mapped_column( + sa.Integer, sa.Sequence("task_id_sequence"), primary_key=True, autoincrement=True, init=False + ) + task_id: Mapped[str] = mapped_column(String(155), unique=True) + status: Mapped[str] = mapped_column(String(50), default=states.PENDING) + result: Mapped[bytes | None] = mapped_column(sa.PickleType, nullable=True, default=None) + date_done: Mapped[datetime | None] = mapped_column( DateTime, - default=lambda: naive_utc_now(), - onupdate=lambda: naive_utc_now(), + default=naive_utc_now, + onupdate=naive_utc_now, nullable=True, ) - traceback = mapped_column(sa.Text, nullable=True) - name = mapped_column(String(155), nullable=True) - args = mapped_column(sa.LargeBinary, nullable=True) - kwargs = mapped_column(sa.LargeBinary, nullable=True) - worker = mapped_column(String(155), nullable=True) - retries: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) - queue = mapped_column(String(155), nullable=True) + traceback: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) + name: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) + args: Mapped[bytes | None] = mapped_column(sa.LargeBinary, nullable=True, default=None) + kwargs: Mapped[bytes | None] = mapped_column(sa.LargeBinary, nullable=True, default=None) + worker: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) + retries: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None) + queue: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) -class CeleryTaskSet(Base): +class CeleryTaskSet(TypeBase): """TaskSet result.""" __tablename__ = "celery_tasksetmeta" id: Mapped[int] = mapped_column( - sa.Integer, sa.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True + sa.Integer, sa.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True, init=False ) - taskset_id = mapped_column(String(155), unique=True) - result = mapped_column(db.PickleType, nullable=True) - date_done: Mapped[datetime | None] = mapped_column(DateTime, default=lambda: naive_utc_now(), nullable=True) + taskset_id: Mapped[str] = mapped_column(String(155), unique=True) + result: Mapped[bytes | None] = mapped_column(sa.PickleType, nullable=True, default=None) + date_done: Mapped[datetime | None] = mapped_column(DateTime, default=naive_utc_now, nullable=True) diff --git a/api/models/tools.py b/api/models/tools.py index 7211d7aa3a..12acc149b1 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,26 +1,24 @@ import json -from collections.abc import Mapping from datetime import datetime +from decimal import Decimal from typing import TYPE_CHECKING, Any, cast -from urllib.parse import urlparse import sqlalchemy as sa from deprecated import deprecated from sqlalchemy import ForeignKey, String, func from sqlalchemy.orm import Mapped, mapped_column -from core.helper import encrypter from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration -from models.base import Base, TypeBase +from models.base import TypeBase from .engine import db from .model import Account, App, Tenant from .types import StringUUID if TYPE_CHECKING: - from core.mcp.types import Tool as MCPTool + from core.entities.mcp_provider import MCPProviderEntity from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration @@ -42,28 +40,28 @@ class ToolOAuthSystemClient(TypeBase): # tenant level tool oauth client params (client_id, client_secret, etc.) -class ToolOAuthTenantClient(Base): +class ToolOAuthTenantClient(TypeBase): __tablename__ = "tool_oauth_tenant_clients" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="tool_oauth_tenant_client_pkey"), sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_tool_oauth_tenant_client"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) plugin_id: Mapped[str] = mapped_column(String(512), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) - enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) + enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"), init=False) # oauth params of the tool provider - encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False) + encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False, init=False) @property def oauth_params(self) -> dict[str, Any]: return cast(dict[str, Any], json.loads(self.encrypted_oauth_params or "{}")) -class BuiltinToolProvider(Base): +class BuiltinToolProvider(TypeBase): """ This table stores the tool provider information for built-in tools for each tenant. """ @@ -75,37 +73,45 @@ class BuiltinToolProvider(Base): ) # id of the tool provider - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) name: Mapped[str] = mapped_column( - String(256), nullable=False, server_default=sa.text("'API KEY 1'::character varying") + String(256), + nullable=False, + server_default=sa.text("'API KEY 1'::character varying"), ) # id of the tenant - tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=True) + tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) # who created this tool provider user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # name of the tool provider provider: Mapped[str] = mapped_column(String(256), nullable=False) # credential of the tool provider - encrypted_credentials: Mapped[str] = mapped_column(sa.Text, nullable=True) + encrypted_credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False ) updated_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, + nullable=False, + server_default=sa.text("CURRENT_TIMESTAMP(0)"), + onupdate=func.current_timestamp(), + init=False, ) - is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) + is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"), default=False) # credential type, e.g., "api-key", "oauth2" credential_type: Mapped[str] = mapped_column( - String(32), nullable=False, server_default=sa.text("'api-key'::character varying") + String(32), nullable=False, server_default=sa.text("'api-key'::character varying"), default="api-key" ) - expires_at: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, server_default=sa.text("-1")) + expires_at: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, server_default=sa.text("-1"), default=-1) @property def credentials(self) -> dict[str, Any]: + if not self.encrypted_credentials: + return {} return cast(dict[str, Any], json.loads(self.encrypted_credentials)) -class ApiToolProvider(Base): +class ApiToolProvider(TypeBase): """ The table stores the api providers. """ @@ -116,31 +122,43 @@ class ApiToolProvider(Base): sa.UniqueConstraint("name", "tenant_id", name="unique_api_tool_provider"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # name of the api provider - name = mapped_column(String(255), nullable=False, server_default=sa.text("'API KEY 1'::character varying")) + name: Mapped[str] = mapped_column( + String(255), + nullable=False, + server_default=sa.text("'API KEY 1'::character varying"), + ) # icon icon: Mapped[str] = mapped_column(String(255), nullable=False) # original schema - schema = mapped_column(sa.Text, nullable=False) + schema: Mapped[str] = mapped_column(sa.Text, nullable=False) schema_type_str: Mapped[str] = mapped_column(String(40), nullable=False) # who created this tool - user_id = mapped_column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = mapped_column(StringUUID, nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # description of the provider - description = mapped_column(sa.Text, nullable=False) + description: Mapped[str] = mapped_column(sa.Text, nullable=False) # json format tools - tools_str = mapped_column(sa.Text, nullable=False) + tools_str: Mapped[str] = mapped_column(sa.Text, nullable=False) # json format credentials - credentials_str = mapped_column(sa.Text, nullable=False) + credentials_str: Mapped[str] = mapped_column(sa.Text, nullable=False) # privacy policy - privacy_policy = mapped_column(String(255), nullable=True) + privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) # custom_disclaimer custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") - created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) @property def schema_type(self) -> "ApiProviderSchemaType": @@ -152,7 +170,7 @@ class ApiToolProvider(Base): def tools(self) -> list["ApiToolBundle"]: from core.tools.entities.tool_bundle import ApiToolBundle - return [ApiToolBundle(**tool) for tool in json.loads(self.tools_str)] + return [ApiToolBundle.model_validate(tool) for tool in json.loads(self.tools_str)] @property def credentials(self) -> dict[str, Any]: @@ -189,7 +207,7 @@ class ToolLabelBinding(TypeBase): label_name: Mapped[str] = mapped_column(String(40), nullable=False) -class WorkflowToolProvider(Base): +class WorkflowToolProvider(TypeBase): """ The table stores the workflow providers. """ @@ -219,15 +237,19 @@ class WorkflowToolProvider(Base): # description of the provider description: Mapped[str] = mapped_column(sa.Text, nullable=False) # parameter configuration - parameter_configuration: Mapped[str] = mapped_column(sa.Text, nullable=False, server_default="[]") + parameter_configuration: Mapped[str] = mapped_column(sa.Text, nullable=False, server_default="[]", default="[]") # privacy policy - privacy_policy: Mapped[str] = mapped_column(String(255), nullable=True, server_default="") + privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, server_default="", default=None) created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False ) updated_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, + nullable=False, + server_default=sa.text("CURRENT_TIMESTAMP(0)"), + onupdate=func.current_timestamp(), + init=False, ) @property @@ -242,14 +264,17 @@ class WorkflowToolProvider(Base): def parameter_configurations(self) -> list["WorkflowToolParameterConfiguration"]: from core.tools.entities.tool_entities import WorkflowToolParameterConfiguration - return [WorkflowToolParameterConfiguration(**config) for config in json.loads(self.parameter_configuration)] + return [ + WorkflowToolParameterConfiguration.model_validate(config) + for config in json.loads(self.parameter_configuration) + ] @property def app(self) -> App | None: return db.session.query(App).where(App.id == self.app_id).first() -class MCPToolProvider(Base): +class MCPToolProvider(TypeBase): """ The table stores the mcp providers. """ @@ -262,7 +287,7 @@ class MCPToolProvider(Base): sa.UniqueConstraint("tenant_id", "server_identifier", name="unique_mcp_provider_server_identifier"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # name of the mcp provider name: Mapped[str] = mapped_column(String(40), nullable=False) # server identifier of the mcp provider @@ -272,151 +297,70 @@ class MCPToolProvider(Base): # hash of server_url for uniqueness check server_url_hash: Mapped[str] = mapped_column(String(64), nullable=False) # icon of the mcp provider - icon: Mapped[str] = mapped_column(String(255), nullable=True) + icon: Mapped[str | None] = mapped_column(String(255), nullable=True) # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # who created this tool user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # encrypted credentials - encrypted_credentials: Mapped[str] = mapped_column(sa.Text, nullable=True) + encrypted_credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) # authed authed: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False) # tools tools: Mapped[str] = mapped_column(sa.Text, nullable=False, default="[]") created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False ) updated_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, + nullable=False, + server_default=sa.text("CURRENT_TIMESTAMP(0)"), + onupdate=func.current_timestamp(), + init=False, + ) + timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("30"), default=30.0) + sse_read_timeout: Mapped[float] = mapped_column( + sa.Float, nullable=False, server_default=sa.text("300"), default=300.0 ) - timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("30")) - sse_read_timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("300")) # encrypted headers for MCP server requests - encrypted_headers: Mapped[str | None] = mapped_column(sa.Text, nullable=True) + encrypted_headers: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) def load_user(self) -> Account | None: return db.session.query(Account).where(Account.id == self.user_id).first() - @property - def tenant(self) -> Tenant | None: - return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() - @property def credentials(self) -> dict[str, Any]: + if not self.encrypted_credentials: + return {} try: - return cast(dict[str, Any], json.loads(self.encrypted_credentials)) or {} + return json.loads(self.encrypted_credentials) except Exception: return {} @property - def mcp_tools(self) -> list["MCPTool"]: - from core.mcp.types import Tool as MCPTool - - return [MCPTool(**tool) for tool in json.loads(self.tools)] - - @property - def provider_icon(self) -> Mapping[str, str] | str: - from core.file import helpers as file_helpers - + def headers(self) -> dict[str, Any]: + if self.encrypted_headers is None: + return {} try: - return json.loads(self.icon) - except json.JSONDecodeError: - return file_helpers.get_signed_file_url(self.icon) - - @property - def decrypted_server_url(self) -> str: - return encrypter.decrypt_token(self.tenant_id, self.server_url) - - @property - def decrypted_headers(self) -> dict[str, Any]: - """Get decrypted headers for MCP server requests.""" - from core.entities.provider_entities import BasicProviderConfig - from core.helper.provider_cache import NoOpProviderCredentialCache - from core.tools.utils.encryption import create_provider_encrypter - - try: - if not self.encrypted_headers: - return {} - - headers_data = json.loads(self.encrypted_headers) - - # Create dynamic config for all headers as SECRET_INPUT - config = [BasicProviderConfig(type=BasicProviderConfig.Type.SECRET_INPUT, name=key) for key in headers_data] - - encrypter_instance, _ = create_provider_encrypter( - tenant_id=self.tenant_id, - config=config, - cache=NoOpProviderCredentialCache(), - ) - - result = encrypter_instance.decrypt(headers_data) - return result + return json.loads(self.encrypted_headers) except Exception: return {} @property - def masked_headers(self) -> dict[str, Any]: - """Get masked headers for frontend display.""" - from core.entities.provider_entities import BasicProviderConfig - from core.helper.provider_cache import NoOpProviderCredentialCache - from core.tools.utils.encryption import create_provider_encrypter - + def tool_dict(self) -> list[dict[str, Any]]: try: - if not self.encrypted_headers: - return {} + return json.loads(self.tools) if self.tools else [] + except (json.JSONDecodeError, TypeError): + return [] - headers_data = json.loads(self.encrypted_headers) + def to_entity(self) -> "MCPProviderEntity": + """Convert to domain entity""" + from core.entities.mcp_provider import MCPProviderEntity - # Create dynamic config for all headers as SECRET_INPUT - config = [BasicProviderConfig(type=BasicProviderConfig.Type.SECRET_INPUT, name=key) for key in headers_data] - - encrypter_instance, _ = create_provider_encrypter( - tenant_id=self.tenant_id, - config=config, - cache=NoOpProviderCredentialCache(), - ) - - # First decrypt, then mask - decrypted_headers = encrypter_instance.decrypt(headers_data) - result = encrypter_instance.mask_tool_credentials(decrypted_headers) - return result - except Exception: - return {} - - @property - def masked_server_url(self) -> str: - def mask_url(url: str, mask_char: str = "*") -> str: - """ - mask the url to a simple string - """ - parsed = urlparse(url) - base_url = f"{parsed.scheme}://{parsed.netloc}" - - if parsed.path and parsed.path != "/": - return f"{base_url}/{mask_char * 6}" - else: - return base_url - - return mask_url(self.decrypted_server_url) - - @property - def decrypted_credentials(self) -> dict[str, Any]: - from core.helper.provider_cache import NoOpProviderCredentialCache - from core.tools.mcp_tool.provider import MCPToolProviderController - from core.tools.utils.encryption import create_provider_encrypter - - provider_controller = MCPToolProviderController.from_db(self) - - encrypter, _ = create_provider_encrypter( - tenant_id=self.tenant_id, - config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()], - cache=NoOpProviderCredentialCache(), - ) - - return encrypter.decrypt(self.credentials) + return MCPProviderEntity.from_db_model(self) -class ToolModelInvoke(Base): +class ToolModelInvoke(TypeBase): """ store the invoke logs from tool invoke """ @@ -424,37 +368,47 @@ class ToolModelInvoke(Base): __tablename__ = "tool_model_invokes" __table_args__ = (sa.PrimaryKeyConstraint("id", name="tool_model_invoke_pkey"),) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # who invoke this tool - user_id = mapped_column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = mapped_column(StringUUID, nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # provider provider: Mapped[str] = mapped_column(String(255), nullable=False) # type - tool_type = mapped_column(String(40), nullable=False) + tool_type: Mapped[str] = mapped_column(String(40), nullable=False) # tool name - tool_name = mapped_column(String(128), nullable=False) + tool_name: Mapped[str] = mapped_column(String(128), nullable=False) # invoke parameters - model_parameters = mapped_column(sa.Text, nullable=False) + model_parameters: Mapped[str] = mapped_column(sa.Text, nullable=False) # prompt messages - prompt_messages = mapped_column(sa.Text, nullable=False) + prompt_messages: Mapped[str] = mapped_column(sa.Text, nullable=False) # invoke response - model_response = mapped_column(sa.Text, nullable=False) + model_response: Mapped[str] = mapped_column(sa.Text, nullable=False) prompt_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) answer_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) - answer_unit_price = mapped_column(sa.Numeric(10, 4), nullable=False) - answer_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) - provider_response_latency = mapped_column(sa.Float, nullable=False, server_default=sa.text("0")) - total_price = mapped_column(sa.Numeric(10, 7)) + answer_unit_price: Mapped[Decimal] = mapped_column(sa.Numeric(10, 4), nullable=False) + answer_price_unit: Mapped[Decimal] = mapped_column( + sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001") + ) + provider_response_latency: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("0")) + total_price: Mapped[Decimal | None] = mapped_column(sa.Numeric(10, 7)) currency: Mapped[str] = mapped_column(String(255), nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) @deprecated -class ToolConversationVariables(Base): +class ToolConversationVariables(TypeBase): """ store the conversation variables from tool invoke """ @@ -467,18 +421,26 @@ class ToolConversationVariables(Base): sa.Index("conversation_id_idx", "conversation_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # conversation user id - user_id = mapped_column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = mapped_column(StringUUID, nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # conversation id - conversation_id = mapped_column(StringUUID, nullable=False) + conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # variables pool - variables_str = mapped_column(sa.Text, nullable=False) + variables_str: Mapped[str] = mapped_column(sa.Text, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) @property def variables(self): @@ -516,7 +478,7 @@ class ToolFile(TypeBase): @deprecated -class DeprecatedPublishedAppTool(Base): +class DeprecatedPublishedAppTool(TypeBase): """ The table stores the apps published as a tool for each person. """ @@ -527,29 +489,37 @@ class DeprecatedPublishedAppTool(Base): sa.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # id of the app - app_id = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False) + app_id: Mapped[str] = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False) user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # who published this tool - description = mapped_column(sa.Text, nullable=False) + description: Mapped[str] = mapped_column(sa.Text, nullable=False) # llm_description of the tool, for LLM - llm_description = mapped_column(sa.Text, nullable=False) + llm_description: Mapped[str] = mapped_column(sa.Text, nullable=False) # query description, query will be seem as a parameter of the tool, # to describe this parameter to llm, we need this field - query_description = mapped_column(sa.Text, nullable=False) + query_description: Mapped[str] = mapped_column(sa.Text, nullable=False) # query name, the name of the query parameter - query_name = mapped_column(String(40), nullable=False) + query_name: Mapped[str] = mapped_column(String(40), nullable=False) # name of the tool provider - tool_name = mapped_column(String(40), nullable=False) + tool_name: Mapped[str] = mapped_column(String(40), nullable=False) # author - author = mapped_column(String(40), nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")) - updated_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")) + author: Mapped[str] = mapped_column(String(40), nullable=False) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=sa.text("CURRENT_TIMESTAMP(0)"), + onupdate=func.current_timestamp(), + init=False, + ) @property def description_i18n(self) -> "I18nObject": from core.tools.entities.common_entities import I18nObject - return I18nObject(**json.loads(self.description)) + return I18nObject.model_validate(json.loads(self.description)) diff --git a/api/models/web.py b/api/models/web.py index 74f99e187b..7df5bd6e87 100644 --- a/api/models/web.py +++ b/api/models/web.py @@ -4,46 +4,58 @@ import sqlalchemy as sa from sqlalchemy import DateTime, String, func from sqlalchemy.orm import Mapped, mapped_column -from models.base import Base +from models.base import TypeBase from .engine import db from .model import Message from .types import StringUUID -class SavedMessage(Base): +class SavedMessage(TypeBase): __tablename__ = "saved_messages" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="saved_message_pkey"), sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - app_id = mapped_column(StringUUID, nullable=False) - message_id = mapped_column(StringUUID, nullable=False) - created_by_role = mapped_column( + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_by_role: Mapped[str] = mapped_column( String(255), nullable=False, server_default=sa.text("'end_user'::character varying") ) - created_by = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + init=False, + ) @property def message(self): return db.session.query(Message).where(Message.id == self.message_id).first() -class PinnedConversation(Base): +class PinnedConversation(TypeBase): __tablename__ = "pinned_conversations" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="pinned_conversation_pkey"), sa.Index("pinned_conversation_conversation_idx", "app_id", "conversation_id", "created_by_role", "created_by"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - app_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) conversation_id: Mapped[str] = mapped_column(StringUUID) - created_by_role = mapped_column( - String(255), nullable=False, server_default=sa.text("'end_user'::character varying") + created_by_role: Mapped[str] = mapped_column( + String(255), + nullable=False, + server_default=sa.text("'end_user'::character varying"), + ) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + init=False, ) - created_by = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/workflow.py b/api/models/workflow.py index e61005953e..b898f02612 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -360,7 +360,9 @@ class Workflow(Base): @property def environment_variables(self) -> Sequence[StringVariable | IntegerVariable | FloatVariable | SecretVariable]: - # _environment_variables is guaranteed to be non-None due to server_default="{}" + # TODO: find some way to init `self._environment_variables` when instance created. + if self._environment_variables is None: + self._environment_variables = "{}" # Use workflow.tenant_id to avoid relying on request user in background threads tenant_id = self.tenant_id @@ -444,7 +446,9 @@ class Workflow(Base): @property def conversation_variables(self) -> Sequence[Variable]: - # _conversation_variables is guaranteed to be non-None due to server_default="{}" + # TODO: find some way to init `self._conversation_variables` when instance created. + if self._conversation_variables is None: + self._conversation_variables = "{}" variables_dict: dict[str, Any] = json.loads(self._conversation_variables) results = [variable_factory.build_conversation_variable_from_mapping(v) for v in variables_dict.values()] @@ -825,14 +829,14 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo if self.execution_metadata_dict: from core.workflow.nodes import NodeType - if self.node_type == NodeType.TOOL.value and "tool_info" in self.execution_metadata_dict: + if self.node_type == NodeType.TOOL and "tool_info" in self.execution_metadata_dict: tool_info: dict[str, Any] = self.execution_metadata_dict["tool_info"] extras["icon"] = ToolManager.get_tool_icon( tenant_id=self.tenant_id, provider_type=tool_info["provider_type"], provider_id=tool_info["provider_id"], ) - elif self.node_type == NodeType.DATASOURCE.value and "datasource_info" in self.execution_metadata_dict: + elif self.node_type == NodeType.DATASOURCE and "datasource_info" in self.execution_metadata_dict: datasource_info = self.execution_metadata_dict["datasource_info"] extras["icon"] = datasource_info.get("icon") return extras diff --git a/api/pyproject.toml b/api/pyproject.toml index 012702edd2..d6286083d1 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,11 +1,10 @@ [project] name = "dify-api" -version = "1.9.0" +version = "1.9.2" requires-python = ">=3.11,<3.13" dependencies = [ "arize-phoenix-otel~=0.9.2", - "authlib==1.6.4", "azure-identity==1.16.1", "beautifulsoup4==4.12.2", "boto3==1.35.99", @@ -14,7 +13,7 @@ dependencies = [ "celery~=5.5.2", "chardet~=5.1.0", "flask~=3.1.2", - "flask-compress~=1.17", + "flask-compress>=1.17,<1.18", "flask-cors~=6.0.0", "flask-login~=0.6.3", "flask-migrate~=4.0.7", @@ -34,12 +33,10 @@ dependencies = [ "json-repair>=0.41.1", "langfuse~=2.51.3", "langsmith~=0.1.77", - "mailchimp-transactional~=1.0.50", "markdown~=3.5.1", "numpy~=1.26.4", - "openai~=1.61.0", "openpyxl~=3.1.5", - "opik~=1.7.25", + "opik~=1.8.72", "opentelemetry-api==1.27.0", "opentelemetry-distro==0.48b0", "opentelemetry-exporter-otlp==1.27.0", @@ -49,8 +46,9 @@ dependencies = [ "opentelemetry-instrumentation==0.48b0", "opentelemetry-instrumentation-celery==0.48b0", "opentelemetry-instrumentation-flask==0.48b0", + "opentelemetry-instrumentation-httpx==0.48b0", "opentelemetry-instrumentation-redis==0.48b0", - "opentelemetry-instrumentation-requests==0.48b0", + "opentelemetry-instrumentation-httpx==0.48b0", "opentelemetry-instrumentation-sqlalchemy==0.48b0", "opentelemetry-propagator-b3==1.27.0", # opentelemetry-proto1.28.0 depends on protobuf (>=5.0,<6.0), @@ -60,13 +58,12 @@ dependencies = [ "opentelemetry-semantic-conventions==0.48b0", "opentelemetry-util-http==0.48b0", "pandas[excel,output-formatting,performance]~=2.2.2", - "pandoc~=2.4", "psycogreen~=1.0.2", "psycopg2-binary~=2.9.6", "pycryptodome==3.19.1", "pydantic~=2.11.4", "pydantic-extra-types~=2.10.3", - "pydantic-settings~=2.9.1", + "pydantic-settings~=2.11.0", "pyjwt~=2.10.1", "pypdfium2==4.30.0", "python-docx~=1.1.0", @@ -89,6 +86,7 @@ dependencies = [ "sendgrid~=6.12.3", "flask-restx~=1.3.0", "packaging~=23.2", + "weaviate-client==4.17.0", ] # Before adding new dependency, consider place it in # alphabet order (a-z) and suitable group. @@ -113,13 +111,13 @@ dev = [ "lxml-stubs~=0.5.1", "ty~=0.0.1a19", "basedpyright~=1.31.0", - "ruff~=0.12.3", + "ruff~=0.14.0", "pytest~=8.3.2", "pytest-benchmark~=4.0.0", "pytest-cov~=4.1.0", "pytest-env~=1.1.3", "pytest-mock~=3.14.0", - "testcontainers~=4.10.0", + "testcontainers~=4.13.2", "types-aiofiles~=24.1.0", "types-beautifulsoup4~=4.12.0", "types-cachetools~=5.5.0", @@ -148,8 +146,6 @@ dev = [ "types-pywin32~=310.0.0", "types-pyyaml~=6.0.12", "types-regex~=2024.11.6", - "types-requests~=2.32.0", - "types-requests-oauthlib~=2.0.0", "types-shapely~=2.0.0", "types-simplejson>=3.20.0", "types-six>=1.17.0", @@ -171,6 +167,7 @@ dev = [ "mypy~=1.17.1", # "locust>=2.40.4", # Temporarily removed due to compatibility issues. Uncomment when resolved. "sseclient-py>=1.8.0", + "pytest-timeout>=2.4.0", ] ############################################################ @@ -178,10 +175,10 @@ dev = [ # Required for storage clients ############################################################ storage = [ - "azure-storage-blob==12.13.0", + "azure-storage-blob==12.26.0", "bce-python-sdk~=0.9.23", - "cos-python-sdk-v5==1.9.30", - "esdk-obs-python==3.24.6.1", + "cos-python-sdk-v5==1.9.38", + "esdk-obs-python==3.25.8", "google-cloud-storage==2.16.0", "opendal~=0.46.0", "oss2==2.18.5", @@ -207,7 +204,7 @@ vdb = [ "couchbase~=4.3.0", "elasticsearch==8.14.0", "opensearch-py==2.4.0", - "oracledb==3.0.0", + "oracledb==3.3.0", "pgvecto-rs[sqlalchemy]~=0.2.1", "pgvector==0.2.5", "pymilvus~=2.5.0", @@ -219,7 +216,8 @@ vdb = [ "tidb-vector==0.0.9", "upstash-vector==0.6.0", "volcengine-compat~=1.0.0", - "weaviate-client~=3.24.0", + "weaviate-client==4.17.0", "xinference-client~=1.2.2", "mo-vector~=0.1.13", + "mysql-connector-python>=9.3.0", ] diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 61ed3ac3b4..6a689b96df 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -1,19 +1,10 @@ { "include": ["."], "exclude": [ - ".venv", "tests/", + ".venv", "migrations/", - "core/rag", - "extensions", - "libs", - "controllers/console/datasets", - "controllers/service_api/dataset", - "core/ops", - "core/tools", - "core/model_runtime", - "core/workflow/nodes", - "core/app/app_config/easy_ui_based_app/dataset" + "core/rag" ], "typeCheckingMode": "strict", "allowedUntypedLibraries": [ @@ -21,9 +12,29 @@ "flask_login", "opentelemetry.instrumentation.celery", "opentelemetry.instrumentation.flask", + "opentelemetry.instrumentation.httpx", "opentelemetry.instrumentation.requests", "opentelemetry.instrumentation.sqlalchemy", - "opentelemetry.instrumentation.redis" + "opentelemetry.instrumentation.redis", + "langfuse", + "cloudscraper", + "readabilipy", + "pypandoc", + "pypdfium2", + "webvtt", + "flask_compress", + "oss2", + "baidubce.auth.bce_credentials", + "baidubce.bce_client_configuration", + "baidubce.services.bos.bos_client", + "clickzetta", + "google.cloud", + "obs", + "qcloud_cos", + "tos", + "gmpy2", + "sendgrid", + "sendgrid.helpers.mail" ], "reportUnknownMemberType": "hint", "reportUnknownParameterType": "hint", @@ -32,13 +43,11 @@ "reportUnknownLambdaType": "hint", "reportMissingParameterType": "hint", "reportMissingTypeArgument": "hint", - "reportUnnecessaryContains": "hint", "reportUnnecessaryComparison": "hint", - "reportUnnecessaryCast": "hint", "reportUnnecessaryIsInstance": "hint", "reportUntypedFunctionDecorator": "hint", - + "reportUnnecessaryTypeIgnoreComment": "hint", "reportAttributeAccessIssue": "hint", "pythonVersion": "3.11", "pythonPlatform": "All" -} +} \ No newline at end of file diff --git a/api/pytest.ini b/api/pytest.ini index eb49619481..afb53b47cc 100644 --- a/api/pytest.ini +++ b/api/pytest.ini @@ -7,7 +7,7 @@ env = CHATGLM_API_BASE = http://a.abc.com:11451 CODE_EXECUTION_API_KEY = dify-sandbox CODE_EXECUTION_ENDPOINT = http://127.0.0.1:8194 - CODE_MAX_STRING_LENGTH = 80000 + CODE_MAX_STRING_LENGTH = 400000 PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi PLUGIN_DAEMON_URL=http://127.0.0.1:5002 PLUGIN_MAX_PACKAGE_SIZE=15728640 diff --git a/api/repositories/api_workflow_run_repository.py b/api/repositories/api_workflow_run_repository.py index 3ac28fad75..eb6d599224 100644 --- a/api/repositories/api_workflow_run_repository.py +++ b/api/repositories/api_workflow_run_repository.py @@ -28,7 +28,7 @@ Example: runs = repo.get_paginated_workflow_runs( tenant_id="tenant-123", app_id="app-456", - triggered_from="debugging", + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, limit=20 ) ``` @@ -40,7 +40,14 @@ from typing import Protocol from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from libs.infinite_scroll_pagination import InfiniteScrollPagination +from models.enums import WorkflowRunTriggeredFrom from models.workflow import WorkflowRun +from repositories.types import ( + AverageInteractionStats, + DailyRunsStats, + DailyTerminalsStats, + DailyTokenCostStats, +) class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): @@ -56,9 +63,10 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): self, tenant_id: str, app_id: str, - triggered_from: str, + triggered_from: WorkflowRunTriggeredFrom | Sequence[WorkflowRunTriggeredFrom], limit: int = 20, last_id: str | None = None, + status: str | None = None, ) -> InfiniteScrollPagination: """ Get paginated workflow runs with filtering. @@ -70,9 +78,10 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): Args: tenant_id: Tenant identifier for multi-tenant isolation app_id: Application identifier - triggered_from: Filter by trigger source (e.g., "debugging", "app-run") + triggered_from: Filter by trigger source(s) (e.g., "debugging", "app-run", or list of values) limit: Maximum number of records to return (default: 20) last_id: Cursor for pagination - ID of the last record from previous page + status: Optional filter by status (e.g., "running", "succeeded", "failed") Returns: InfiniteScrollPagination object containing: @@ -107,6 +116,68 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): """ ... + def get_workflow_run_by_id_without_tenant( + self, + run_id: str, + ) -> WorkflowRun | None: + """ + Get a specific workflow run by ID without tenant/app context. + + Retrieves a single workflow run using only the run ID, without + requiring tenant_id or app_id. This method is intended for internal + system operations like tracing and monitoring where the tenant context + is not available upfront. + + Args: + run_id: Workflow run identifier + + Returns: + WorkflowRun object if found, None otherwise + + Note: + This method bypasses tenant isolation checks and should only be used + in trusted system contexts like ops trace collection. For user-facing + operations, use get_workflow_run_by_id() with proper tenant isolation. + """ + ... + + def get_workflow_runs_count( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + status: str | None = None, + time_range: str | None = None, + ) -> dict[str, int]: + """ + Get workflow runs count statistics. + + Retrieves total count and count by status for workflow runs + matching the specified filters. + + Args: + tenant_id: Tenant identifier for multi-tenant isolation + app_id: Application identifier + triggered_from: Filter by trigger source (e.g., "debugging", "app-run") + status: Optional filter by specific status + time_range: Optional time range filter (e.g., "7d", "4h", "30m", "30s") + Filters records based on created_at field + + Returns: + Dictionary containing: + - total: Total count of all workflow runs (or filtered by status) + - running: Count of workflow runs with status "running" + - succeeded: Count of workflow runs with status "succeeded" + - failed: Count of workflow runs with status "failed" + - stopped: Count of workflow runs with status "stopped" + - partial_succeeded: Count of workflow runs with status "partial-succeeded" + + Note: If a status is provided, 'total' will be the count for that status, + and the specific status count will also be set to this value, with all + other status counts being 0. + """ + ... + def get_expired_runs_batch( self, tenant_id: str, @@ -179,3 +250,119 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): and ensure proper data retention policies are followed. """ ... + + def get_daily_runs_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyRunsStats]: + """ + Get daily runs statistics. + + Retrieves daily workflow runs count grouped by date for a specific app + and trigger source. Used for workflow statistics dashboard. + + Args: + tenant_id: Tenant identifier for multi-tenant isolation + app_id: Application identifier + triggered_from: Filter by trigger source (e.g., "app-run") + start_date: Optional start date filter + end_date: Optional end date filter + timezone: Timezone for date grouping (default: "UTC") + + Returns: + List of dictionaries containing date and runs count: + [{"date": "2024-01-01", "runs": 10}, ...] + """ + ... + + def get_daily_terminals_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyTerminalsStats]: + """ + Get daily terminals statistics. + + Retrieves daily unique terminal count grouped by date for a specific app + and trigger source. Used for workflow statistics dashboard. + + Args: + tenant_id: Tenant identifier for multi-tenant isolation + app_id: Application identifier + triggered_from: Filter by trigger source (e.g., "app-run") + start_date: Optional start date filter + end_date: Optional end date filter + timezone: Timezone for date grouping (default: "UTC") + + Returns: + List of dictionaries containing date and terminal count: + [{"date": "2024-01-01", "terminal_count": 5}, ...] + """ + ... + + def get_daily_token_cost_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyTokenCostStats]: + """ + Get daily token cost statistics. + + Retrieves daily total token count grouped by date for a specific app + and trigger source. Used for workflow statistics dashboard. + + Args: + tenant_id: Tenant identifier for multi-tenant isolation + app_id: Application identifier + triggered_from: Filter by trigger source (e.g., "app-run") + start_date: Optional start date filter + end_date: Optional end date filter + timezone: Timezone for date grouping (default: "UTC") + + Returns: + List of dictionaries containing date and token count: + [{"date": "2024-01-01", "token_count": 1000}, ...] + """ + ... + + def get_average_app_interaction_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[AverageInteractionStats]: + """ + Get average app interaction statistics. + + Retrieves daily average interactions per user grouped by date for a specific app + and trigger source. Used for workflow statistics dashboard. + + Args: + tenant_id: Tenant identifier for multi-tenant isolation + app_id: Application identifier + triggered_from: Filter by trigger source (e.g., "app-run") + start_date: Optional start date filter + end_date: Optional end date filter + timezone: Timezone for date grouping (default: "UTC") + + Returns: + List of dictionaries containing date and average interactions: + [{"date": "2024-01-01", "interactions": 2.5}, ...] + """ + ... diff --git a/api/repositories/factory.py b/api/repositories/factory.py index 0be9c8908c..96f9f886a4 100644 --- a/api/repositories/factory.py +++ b/api/repositories/factory.py @@ -48,7 +48,7 @@ class DifyAPIRepositoryFactory(DifyCoreRepositoryFactory): try: repository_class = import_string(class_path) - return repository_class(session_maker=session_maker) # type: ignore[no-any-return] + return repository_class(session_maker=session_maker) except (ImportError, Exception) as e: raise RepositoryImportError( f"Failed to create DifyAPIWorkflowNodeExecutionRepository from '{class_path}': {e}" @@ -77,6 +77,6 @@ class DifyAPIRepositoryFactory(DifyCoreRepositoryFactory): try: repository_class = import_string(class_path) - return repository_class(session_maker=session_maker) # type: ignore[no-any-return] + return repository_class(session_maker=session_maker) except (ImportError, Exception) as e: raise RepositoryImportError(f"Failed to create APIWorkflowRunRepository from '{class_path}': {e}") from e diff --git a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py index 9bc6acc41f..7e2173acdd 100644 --- a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py @@ -7,8 +7,10 @@ using SQLAlchemy 2.0 style queries for WorkflowNodeExecutionModel operations. from collections.abc import Sequence from datetime import datetime +from typing import cast from sqlalchemy import asc, delete, desc, select +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, sessionmaker from models.workflow import WorkflowNodeExecutionModel @@ -181,7 +183,7 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut # Delete the batch delete_stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() total_deleted += result.rowcount @@ -228,7 +230,7 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut # Delete the batch delete_stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() total_deleted += result.rowcount @@ -285,6 +287,6 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut with self._session_maker() as session: stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) session.commit() return result.rowcount diff --git a/api/repositories/sqlalchemy_api_workflow_run_repository.py b/api/repositories/sqlalchemy_api_workflow_run_repository.py index 205f8c87ee..f08eab0b01 100644 --- a/api/repositories/sqlalchemy_api_workflow_run_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_run_repository.py @@ -22,13 +22,25 @@ Implementation Notes: import logging from collections.abc import Sequence from datetime import datetime +from decimal import Decimal +from typing import Any, cast -from sqlalchemy import delete, select +import sqlalchemy as sa +from sqlalchemy import delete, func, select +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, sessionmaker from libs.infinite_scroll_pagination import InfiniteScrollPagination +from libs.time_parser import get_time_threshold +from models.enums import WorkflowRunTriggeredFrom from models.workflow import WorkflowRun from repositories.api_workflow_run_repository import APIWorkflowRunRepository +from repositories.types import ( + AverageInteractionStats, + DailyRunsStats, + DailyTerminalsStats, + DailyTokenCostStats, +) logger = logging.getLogger(__name__) @@ -58,9 +70,10 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): self, tenant_id: str, app_id: str, - triggered_from: str, + triggered_from: WorkflowRunTriggeredFrom | Sequence[WorkflowRunTriggeredFrom], limit: int = 20, last_id: str | None = None, + status: str | None = None, ) -> InfiniteScrollPagination: """ Get paginated workflow runs with filtering. @@ -74,9 +87,18 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): base_stmt = select(WorkflowRun).where( WorkflowRun.tenant_id == tenant_id, WorkflowRun.app_id == app_id, - WorkflowRun.triggered_from == triggered_from, ) + # Handle triggered_from values + if isinstance(triggered_from, WorkflowRunTriggeredFrom): + triggered_from = [triggered_from] + if triggered_from: + base_stmt = base_stmt.where(WorkflowRun.triggered_from.in_(triggered_from)) + + # Add optional status filter + if status: + base_stmt = base_stmt.where(WorkflowRun.status == status) + if last_id: # Get the last workflow run for cursor-based pagination last_run_stmt = base_stmt.where(WorkflowRun.id == last_id) @@ -118,6 +140,84 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): ) return session.scalar(stmt) + def get_workflow_run_by_id_without_tenant( + self, + run_id: str, + ) -> WorkflowRun | None: + """ + Get a specific workflow run by ID without tenant/app context. + """ + with self._session_maker() as session: + stmt = select(WorkflowRun).where(WorkflowRun.id == run_id) + return session.scalar(stmt) + + def get_workflow_runs_count( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + status: str | None = None, + time_range: str | None = None, + ) -> dict[str, int]: + """ + Get workflow runs count statistics grouped by status. + """ + _initial_status_counts = { + "running": 0, + "succeeded": 0, + "failed": 0, + "stopped": 0, + "partial-succeeded": 0, + } + + with self._session_maker() as session: + # Build base where conditions + base_conditions = [ + WorkflowRun.tenant_id == tenant_id, + WorkflowRun.app_id == app_id, + WorkflowRun.triggered_from == triggered_from, + ] + + # Add time range filter if provided + if time_range: + time_threshold = get_time_threshold(time_range) + if time_threshold: + base_conditions.append(WorkflowRun.created_at >= time_threshold) + + # If status filter is provided, return simple count + if status: + count_stmt = select(func.count(WorkflowRun.id)).where(*base_conditions, WorkflowRun.status == status) + total = session.scalar(count_stmt) or 0 + + result = {"total": total} | _initial_status_counts + + # Set the count for the filtered status + if status in result: + result[status] = total + + return result + + # No status filter - get counts grouped by status + base_stmt = ( + select(WorkflowRun.status, func.count(WorkflowRun.id).label("count")) + .where(*base_conditions) + .group_by(WorkflowRun.status) + ) + + # Execute query + results = session.execute(base_stmt).all() + + # Build response dictionary + status_counts = _initial_status_counts.copy() + + total = 0 + for status_val, count in results: + total += count + if status_val in status_counts: + status_counts[status_val] = count + + return {"total": total} | status_counts + def get_expired_runs_batch( self, tenant_id: str, @@ -150,7 +250,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): with self._session_maker() as session: stmt = delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids)) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) session.commit() deleted_count = result.rowcount @@ -186,7 +286,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): # Delete the batch delete_stmt = delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() batch_deleted = result.rowcount @@ -200,3 +300,213 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): logger.info("Total deleted %s workflow runs for app %s", total_deleted, app_id) return total_deleted + + def get_daily_runs_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyRunsStats]: + """ + Get daily runs statistics using raw SQL for optimal performance. + """ + sql_query = """SELECT + DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + COUNT(id) AS runs +FROM + workflow_runs +WHERE + tenant_id = :tenant_id + AND app_id = :app_id + AND triggered_from = :triggered_from""" + + arg_dict: dict[str, Any] = { + "tz": timezone, + "tenant_id": tenant_id, + "app_id": app_id, + "triggered_from": triggered_from, + } + + if start_date: + sql_query += " AND created_at >= :start_date" + arg_dict["start_date"] = start_date + + if end_date: + sql_query += " AND created_at < :end_date" + arg_dict["end_date"] = end_date + + sql_query += " GROUP BY date ORDER BY date" + + response_data = [] + with self._session_maker() as session: + rs = session.execute(sa.text(sql_query), arg_dict) + for row in rs: + response_data.append({"date": str(row.date), "runs": row.runs}) + + return cast(list[DailyRunsStats], response_data) + + def get_daily_terminals_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyTerminalsStats]: + """ + Get daily terminals statistics using raw SQL for optimal performance. + """ + sql_query = """SELECT + DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + COUNT(DISTINCT created_by) AS terminal_count +FROM + workflow_runs +WHERE + tenant_id = :tenant_id + AND app_id = :app_id + AND triggered_from = :triggered_from""" + + arg_dict: dict[str, Any] = { + "tz": timezone, + "tenant_id": tenant_id, + "app_id": app_id, + "triggered_from": triggered_from, + } + + if start_date: + sql_query += " AND created_at >= :start_date" + arg_dict["start_date"] = start_date + + if end_date: + sql_query += " AND created_at < :end_date" + arg_dict["end_date"] = end_date + + sql_query += " GROUP BY date ORDER BY date" + + response_data = [] + with self._session_maker() as session: + rs = session.execute(sa.text(sql_query), arg_dict) + for row in rs: + response_data.append({"date": str(row.date), "terminal_count": row.terminal_count}) + + return cast(list[DailyTerminalsStats], response_data) + + def get_daily_token_cost_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyTokenCostStats]: + """ + Get daily token cost statistics using raw SQL for optimal performance. + """ + sql_query = """SELECT + DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + SUM(total_tokens) AS token_count +FROM + workflow_runs +WHERE + tenant_id = :tenant_id + AND app_id = :app_id + AND triggered_from = :triggered_from""" + + arg_dict: dict[str, Any] = { + "tz": timezone, + "tenant_id": tenant_id, + "app_id": app_id, + "triggered_from": triggered_from, + } + + if start_date: + sql_query += " AND created_at >= :start_date" + arg_dict["start_date"] = start_date + + if end_date: + sql_query += " AND created_at < :end_date" + arg_dict["end_date"] = end_date + + sql_query += " GROUP BY date ORDER BY date" + + response_data = [] + with self._session_maker() as session: + rs = session.execute(sa.text(sql_query), arg_dict) + for row in rs: + response_data.append( + { + "date": str(row.date), + "token_count": row.token_count, + } + ) + + return cast(list[DailyTokenCostStats], response_data) + + def get_average_app_interaction_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[AverageInteractionStats]: + """ + Get average app interaction statistics using raw SQL for optimal performance. + """ + sql_query = """SELECT + AVG(sub.interactions) AS interactions, + sub.date +FROM + ( + SELECT + DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + c.created_by, + COUNT(c.id) AS interactions + FROM + workflow_runs c + WHERE + c.tenant_id = :tenant_id + AND c.app_id = :app_id + AND c.triggered_from = :triggered_from + {{start}} + {{end}} + GROUP BY + date, c.created_by + ) sub +GROUP BY + sub.date""" + + arg_dict: dict[str, Any] = { + "tz": timezone, + "tenant_id": tenant_id, + "app_id": app_id, + "triggered_from": triggered_from, + } + + if start_date: + sql_query = sql_query.replace("{{start}}", " AND c.created_at >= :start_date") + arg_dict["start_date"] = start_date + else: + sql_query = sql_query.replace("{{start}}", "") + + if end_date: + sql_query = sql_query.replace("{{end}}", " AND c.created_at < :end_date") + arg_dict["end_date"] = end_date + else: + sql_query = sql_query.replace("{{end}}", "") + + response_data = [] + with self._session_maker() as session: + rs = session.execute(sa.text(sql_query), arg_dict) + for row in rs: + response_data.append( + {"date": str(row.date), "interactions": float(row.interactions.quantize(Decimal("0.01")))} + ) + + return cast(list[AverageInteractionStats], response_data) diff --git a/api/repositories/types.py b/api/repositories/types.py new file mode 100644 index 0000000000..3b3ef7f635 --- /dev/null +++ b/api/repositories/types.py @@ -0,0 +1,21 @@ +from typing import TypedDict + + +class DailyRunsStats(TypedDict): + date: str + runs: int + + +class DailyTerminalsStats(TypedDict): + date: str + terminal_count: int + + +class DailyTokenCostStats(TypedDict): + date: str + token_count: int + + +class AverageInteractionStats(TypedDict): + date: str + interactions: float diff --git a/api/schedule/check_upgradable_plugin_task.py b/api/schedule/check_upgradable_plugin_task.py index a9ad27b059..e91ce07be3 100644 --- a/api/schedule/check_upgradable_plugin_task.py +++ b/api/schedule/check_upgradable_plugin_task.py @@ -6,7 +6,7 @@ import click import app from extensions.ext_database import db from models.account import TenantPluginAutoUpgradeStrategy -from tasks.process_tenant_plugin_autoupgrade_check_task import process_tenant_plugin_autoupgrade_check_task +from tasks import process_tenant_plugin_autoupgrade_check_task as check_task AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes MAX_CONCURRENT_CHECK_TASKS = 20 @@ -43,7 +43,7 @@ def check_upgradable_plugin_task(): for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS): batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS] for strategy in batch_strategies: - process_tenant_plugin_autoupgrade_check_task.delay( + check_task.process_tenant_plugin_autoupgrade_check_task.delay( strategy.tenant_id, strategy.strategy_setting, strategy.upgrade_time_of_day, @@ -52,7 +52,8 @@ def check_upgradable_plugin_task(): strategy.include_plugins, ) - if batch_interval_time > 0.0001: # if lower than 1ms, skip + # Only sleep if batch_interval_time > 0.0001 AND current batch is not the last one + if batch_interval_time > 0.0001 and i + MAX_CONCURRENT_CHECK_TASKS < total_strategies: time.sleep(batch_interval_time) end_at = time.perf_counter() diff --git a/api/schedule/clean_workflow_runlogs_precise.py b/api/schedule/clean_workflow_runlogs_precise.py index 485a79782c..db4198720d 100644 --- a/api/schedule/clean_workflow_runlogs_precise.py +++ b/api/schedule/clean_workflow_runlogs_precise.py @@ -1,8 +1,11 @@ import datetime import logging import time +from collections.abc import Sequence import click +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker import app from configs import dify_config @@ -35,50 +38,53 @@ def clean_workflow_runlogs_precise(): retention_days = dify_config.WORKFLOW_LOG_RETENTION_DAYS cutoff_date = datetime.datetime.now() - datetime.timedelta(days=retention_days) + session_factory = sessionmaker(db.engine, expire_on_commit=False) try: - total_workflow_runs = db.session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count() - if total_workflow_runs == 0: - logger.info("No expired workflow run logs found") - return - logger.info("Found %s expired workflow run logs to clean", total_workflow_runs) + with session_factory.begin() as session: + total_workflow_runs = session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count() + if total_workflow_runs == 0: + logger.info("No expired workflow run logs found") + return + logger.info("Found %s expired workflow run logs to clean", total_workflow_runs) total_deleted = 0 failed_batches = 0 batch_count = 0 - while True: - workflow_runs = ( - db.session.query(WorkflowRun.id).where(WorkflowRun.created_at < cutoff_date).limit(BATCH_SIZE).all() - ) + with session_factory.begin() as session: + workflow_run_ids = session.scalars( + select(WorkflowRun.id) + .where(WorkflowRun.created_at < cutoff_date) + .order_by(WorkflowRun.created_at, WorkflowRun.id) + .limit(BATCH_SIZE) + ).all() - if not workflow_runs: - break - - workflow_run_ids = [run.id for run in workflow_runs] - batch_count += 1 - - success = _delete_batch_with_retry(workflow_run_ids, failed_batches) - - if success: - total_deleted += len(workflow_run_ids) - failed_batches = 0 - else: - failed_batches += 1 - if failed_batches >= MAX_RETRIES: - logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES) + if not workflow_run_ids: break + + batch_count += 1 + + success = _delete_batch(session, workflow_run_ids, failed_batches) + + if success: + total_deleted += len(workflow_run_ids) + failed_batches = 0 else: - # Calculate incremental delay times: 5, 10, 15 minutes - retry_delay_minutes = failed_batches * 5 - logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes) - time.sleep(retry_delay_minutes * 60) - continue + failed_batches += 1 + if failed_batches >= MAX_RETRIES: + logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES) + break + else: + # Calculate incremental delay times: 5, 10, 15 minutes + retry_delay_minutes = failed_batches * 5 + logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes) + time.sleep(retry_delay_minutes * 60) + continue logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted) except Exception: - db.session.rollback() logger.exception("Unexpected error in workflow log cleanup") raise @@ -87,69 +93,56 @@ def clean_workflow_runlogs_precise(): click.echo(click.style(f"Cleaned workflow run logs from db success latency: {execution_time:.2f}s", fg="green")) -def _delete_batch_with_retry(workflow_run_ids: list[str], attempt_count: int) -> bool: - """Delete a single batch with a retry mechanism and complete cascading deletion""" +def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_count: int) -> bool: + """Delete a single batch of workflow runs and all related data within a nested transaction.""" try: - with db.session.begin_nested(): + with session.begin_nested(): message_data = ( - db.session.query(Message.id, Message.conversation_id) + session.query(Message.id, Message.conversation_id) .where(Message.workflow_run_id.in_(workflow_run_ids)) .all() ) message_id_list = [msg.id for msg in message_data] conversation_id_list = list({msg.conversation_id for msg in message_data if msg.conversation_id}) if message_id_list: - db.session.query(AppAnnotationHitHistory).where( - AppAnnotationHitHistory.message_id.in_(message_id_list) - ).delete(synchronize_session=False) + message_related_models = [ + AppAnnotationHitHistory, + MessageAgentThought, + MessageChain, + MessageFile, + MessageAnnotation, + MessageFeedback, + ] + for model in message_related_models: + session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore + # error: "DeclarativeAttributeIntercept" has no attribute "message_id". But this type is only in lib + # and these 6 types all have the message_id field. - db.session.query(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_id_list)).delete( + session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete( synchronize_session=False ) - db.session.query(MessageChain).where(MessageChain.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageFile).where(MessageFile.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageFeedback).where(MessageFeedback.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete( - synchronize_session=False - ) - - db.session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete( + session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete( synchronize_session=False ) - db.session.query(WorkflowNodeExecutionModel).where( + session.query(WorkflowNodeExecutionModel).where( WorkflowNodeExecutionModel.workflow_run_id.in_(workflow_run_ids) ).delete(synchronize_session=False) if conversation_id_list: - db.session.query(ConversationVariable).where( + session.query(ConversationVariable).where( ConversationVariable.conversation_id.in_(conversation_id_list) ).delete(synchronize_session=False) - db.session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete( + session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete( synchronize_session=False ) - db.session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False) + session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False) - db.session.commit() - return True + return True except Exception: - db.session.rollback() logger.exception("Batch deletion failed (attempt %s)", attempt_count + 1) return False diff --git a/api/schedule/mail_clean_document_notify_task.py b/api/schedule/mail_clean_document_notify_task.py index ef6edd6709..b70707b17e 100644 --- a/api/schedule/mail_clean_document_notify_task.py +++ b/api/schedule/mail_clean_document_notify_task.py @@ -10,7 +10,7 @@ from configs import dify_config from extensions.ext_database import db from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service -from models.account import Account, Tenant, TenantAccountJoin +from models import Account, Tenant, TenantAccountJoin from models.dataset import Dataset, DatasetAutoDisableLog from services.feature_service import FeatureService diff --git a/api/services/account_service.py b/api/services/account_service.py index 0e699d16da..13c3993fb5 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -13,7 +13,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Unauthorized from configs import dify_config -from constants.languages import language_timezone_mapping, languages +from constants.languages import get_valid_language, language_timezone_mapping from events.tenant_event import tenant_was_created from extensions.ext_database import db from extensions.ext_redis import redis_client, redis_fallback @@ -22,6 +22,7 @@ from libs.helper import RateLimiter, TokenManager from libs.passport import PassportService from libs.password import compare_password, hash_password, valid_password from libs.rsa import generate_key_pair +from libs.token import generate_csrf_token from models.account import ( Account, AccountIntegrate, @@ -76,6 +77,7 @@ logger = logging.getLogger(__name__) class TokenPair(BaseModel): access_token: str refresh_token: str + csrf_token: str REFRESH_TOKEN_PREFIX = "refresh_token:" @@ -127,7 +129,7 @@ class AccountService: if not account: return None - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise Unauthorized("Account is banned.") current_tenant = db.session.query(TenantAccountJoin).filter_by(account_id=account.id, current=True).first() @@ -178,7 +180,7 @@ class AccountService: if not account: raise AccountPasswordError("Invalid email or password.") - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise AccountLoginError("Account is banned.") if password and invite_token and account.password is None: @@ -193,8 +195,8 @@ class AccountService: if account.password is None or not compare_password(password, account.password, account.password_salt): raise AccountPasswordError("Invalid email or password.") - if account.status == AccountStatus.PENDING.value: - account.status = AccountStatus.ACTIVE.value + if account.status == AccountStatus.PENDING: + account.status = AccountStatus.ACTIVE account.initialized_at = naive_utc_now() db.session.commit() @@ -246,10 +248,8 @@ class AccountService: ) ) - account = Account() - account.email = email - account.name = name - + password_to_set = None + salt_to_set = None if password: valid_password(password) @@ -261,14 +261,18 @@ class AccountService: password_hashed = hash_password(password, salt) base64_password_hashed = base64.b64encode(password_hashed).decode() - account.password = base64_password_hashed - account.password_salt = base64_salt + password_to_set = base64_password_hashed + salt_to_set = base64_salt - account.interface_language = interface_language - account.interface_theme = interface_theme - - # Set timezone based on language - account.timezone = language_timezone_mapping.get(interface_language, "UTC") + account = Account( + name=name, + email=email, + password=password_to_set, + password_salt=salt_to_set, + interface_language=interface_language, + interface_theme=interface_theme, + timezone=language_timezone_mapping.get(interface_language, "UTC"), + ) db.session.add(account) db.session.commit() @@ -355,7 +359,7 @@ class AccountService: @staticmethod def close_account(account: Account): """Close account""" - account.status = AccountStatus.CLOSED.value + account.status = AccountStatus.CLOSED db.session.commit() @staticmethod @@ -395,16 +399,17 @@ class AccountService: if ip_address: AccountService.update_login_info(account=account, ip_address=ip_address) - if account.status == AccountStatus.PENDING.value: - account.status = AccountStatus.ACTIVE.value + if account.status == AccountStatus.PENDING: + account.status = AccountStatus.ACTIVE db.session.commit() access_token = AccountService.get_account_jwt_token(account=account) refresh_token = _generate_refresh_token() + csrf_token = generate_csrf_token(account.id) AccountService._store_refresh_token(refresh_token, account.id) - return TokenPair(access_token=access_token, refresh_token=refresh_token) + return TokenPair(access_token=access_token, refresh_token=refresh_token, csrf_token=csrf_token) @staticmethod def logout(*, account: Account): @@ -429,8 +434,9 @@ class AccountService: AccountService._delete_refresh_token(refresh_token, account.id) AccountService._store_refresh_token(new_refresh_token, account.id) + csrf_token = generate_csrf_token(account.id) - return TokenPair(access_token=new_access_token, refresh_token=new_refresh_token) + return TokenPair(access_token=new_access_token, refresh_token=new_refresh_token, csrf_token=csrf_token) @staticmethod def load_logged_in_account(*, account_id: str): @@ -764,7 +770,7 @@ class AccountService: if not account: return None - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise Unauthorized("Account is banned.") return account @@ -1028,7 +1034,7 @@ class TenantService: @staticmethod def create_tenant_member(tenant: Tenant, account: Account, role: str = "normal") -> TenantAccountJoin: """Create tenant member""" - if role == TenantAccountRole.OWNER.value: + if role == TenantAccountRole.OWNER: if TenantService.has_roles(tenant, [TenantAccountRole.OWNER]): logger.error("Tenant %s has already an owner.", tenant.id) raise Exception("Tenant already has an owner.") @@ -1253,7 +1259,7 @@ class RegisterService: return f"member_invite:token:{token}" @classmethod - def setup(cls, email: str, name: str, password: str, ip_address: str): + def setup(cls, email: str, name: str, password: str, ip_address: str, language: str): """ Setup dify @@ -1263,11 +1269,10 @@ class RegisterService: :param ip_address: ip address """ try: - # Register account = AccountService.create_account( email=email, name=name, - interface_language=languages[0], + interface_language=get_valid_language(language), password=password, is_setup=True, ) @@ -1309,11 +1314,11 @@ class RegisterService: account = AccountService.create_account( email=email, name=name, - interface_language=language or languages[0], + interface_language=get_valid_language(language), password=password, is_setup=is_setup, ) - account.status = AccountStatus.ACTIVE.value if not status else status.value + account.status = status or AccountStatus.ACTIVE account.initialized_at = naive_utc_now() if open_id is not None and provider is not None: @@ -1374,7 +1379,7 @@ class RegisterService: TenantService.create_tenant_member(tenant, account, role) # Support resend invitation email when the account is pending status - if account.status != AccountStatus.PENDING.value: + if account.status != AccountStatus.PENDING: raise AccountAlreadyInTenantError("Account already in tenant.") token = cls.generate_invite_token(tenant, account) diff --git a/api/services/agent_service.py b/api/services/agent_service.py index d631ce812f..b2db895a5a 100644 --- a/api/services/agent_service.py +++ b/api/services/agent_service.py @@ -10,7 +10,7 @@ from core.plugin.impl.exc import PluginDaemonClientSideError from core.tools.tool_manager import ToolManager from extensions.ext_database import db from libs.login import current_user -from models.account import Account +from models import Account from models.model import App, Conversation, EndUser, Message, MessageAgentThought diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index 9feca7337f..c0d26cdd27 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -8,8 +8,7 @@ from werkzeug.exceptions import NotFound from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now -from libs.login import current_user -from models.account import Account +from libs.login import current_account_with_tenant from models.model import App, AppAnnotationHitHistory, AppAnnotationSetting, Message, MessageAnnotation from services.feature_service import FeatureService from tasks.annotation.add_annotation_to_index_task import add_annotation_to_index_task @@ -24,10 +23,10 @@ class AppAnnotationService: @classmethod def up_insert_app_annotation_from_message(cls, args: dict, app_id: str) -> MessageAnnotation: # get app info - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -63,12 +62,12 @@ class AppAnnotationService: db.session.commit() # if annotation reply is enabled , add annotation to index annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() - assert current_user.current_tenant_id is not None + assert current_tenant_id is not None if annotation_setting: add_annotation_to_index_task.delay( annotation.id, args["question"], - current_user.current_tenant_id, + current_tenant_id, app_id, annotation_setting.collection_binding_id, ) @@ -86,13 +85,12 @@ class AppAnnotationService: enable_app_annotation_job_key = f"enable_app_annotation_job_{str(job_id)}" # send batch add segments task redis_client.setnx(enable_app_annotation_job_key, "waiting") - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() enable_annotation_reply_task.delay( str(job_id), app_id, current_user.id, - current_user.current_tenant_id, + current_tenant_id, args["score_threshold"], args["embedding_provider_name"], args["embedding_model_name"], @@ -101,8 +99,7 @@ class AppAnnotationService: @classmethod def disable_app_annotation(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() disable_app_annotation_key = f"disable_app_annotation_{str(app_id)}" cache_result = redis_client.get(disable_app_annotation_key) if cache_result is not None: @@ -113,17 +110,16 @@ class AppAnnotationService: disable_app_annotation_job_key = f"disable_app_annotation_job_{str(job_id)}" # send batch add segments task redis_client.setnx(disable_app_annotation_job_key, "waiting") - disable_annotation_reply_task.delay(str(job_id), app_id, current_user.current_tenant_id) + disable_annotation_reply_task.delay(str(job_id), app_id, current_tenant_id) return {"job_id": job_id, "job_status": "waiting"} @classmethod def get_annotation_list_by_app_id(cls, app_id: str, page: int, limit: int, keyword: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -153,11 +149,10 @@ class AppAnnotationService: @classmethod def export_annotation_list_by_app_id(cls, app_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -174,11 +169,10 @@ class AppAnnotationService: @classmethod def insert_app_annotation_directly(cls, args: dict, app_id: str) -> MessageAnnotation: # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -196,7 +190,7 @@ class AppAnnotationService: add_annotation_to_index_task.delay( annotation.id, args["question"], - current_user.current_tenant_id, + current_tenant_id, app_id, annotation_setting.collection_binding_id, ) @@ -205,11 +199,10 @@ class AppAnnotationService: @classmethod def update_app_annotation_directly(cls, args: dict, app_id: str, annotation_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -234,7 +227,7 @@ class AppAnnotationService: update_annotation_to_index_task.delay( annotation.id, annotation.question, - current_user.current_tenant_id, + current_tenant_id, app_id, app_annotation_setting.collection_binding_id, ) @@ -244,11 +237,10 @@ class AppAnnotationService: @classmethod def delete_app_annotation(cls, app_id: str, annotation_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -277,17 +269,16 @@ class AppAnnotationService: if app_annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, app_annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, app_annotation_setting.collection_binding_id ) @classmethod def delete_app_annotations_in_batch(cls, app_id: str, annotation_ids: list[str]): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -317,7 +308,7 @@ class AppAnnotationService: for annotation, annotation_setting in annotations_to_delete: if annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, annotation_setting.collection_binding_id ) # Step 4: Bulk delete annotations in a single query @@ -333,11 +324,10 @@ class AppAnnotationService: @classmethod def batch_import_app_annotations(cls, app_id, file: FileStorage): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -354,7 +344,7 @@ class AppAnnotationService: if len(result) == 0: raise ValueError("The CSV file is empty.") # check annotation limit - features = FeatureService.get_features(current_user.current_tenant_id) + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: annotation_quota_limit = features.annotation_quota_limit if annotation_quota_limit.limit < len(result) + annotation_quota_limit.size: @@ -364,21 +354,18 @@ class AppAnnotationService: indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" # send batch add segments task redis_client.setnx(indexing_cache_key, "waiting") - batch_import_annotations_task.delay( - str(job_id), result, app_id, current_user.current_tenant_id, current_user.id - ) + batch_import_annotations_task.delay(str(job_id), result, app_id, current_tenant_id, current_user.id) except Exception as e: return {"error_msg": str(e)} return {"job_id": job_id, "job_status": "waiting"} @classmethod def get_annotation_hit_histories(cls, app_id: str, annotation_id: str, page, limit): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -445,12 +432,11 @@ class AppAnnotationService: @classmethod def get_app_annotation_setting_by_app_id(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -481,12 +467,11 @@ class AppAnnotationService: @classmethod def update_app_annotation_setting(cls, app_id: str, annotation_setting_id: str, args: dict): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -531,11 +516,10 @@ class AppAnnotationService: @classmethod def clear_all_annotations(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -558,7 +542,7 @@ class AppAnnotationService: # if annotation reply is enabled, delete annotation index if app_annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, app_annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, app_annotation_setting.collection_binding_id ) db.session.delete(annotation) diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index 8701fe4f4e..edb18a845a 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -7,7 +7,7 @@ from enum import StrEnum from urllib.parse import urlparse from uuid import uuid4 -import yaml # type: ignore +import yaml from Crypto.Cipher import AES from Crypto.Util.Padding import pad, unpad from packaging import version @@ -29,6 +29,7 @@ from core.workflow.nodes.tool.entities import ToolNodeData from events.app_event import app_model_config_was_updated, app_was_created from extensions.ext_redis import redis_client from factories import variable_factory +from libs.datetime_utils import naive_utc_now from models import Account, App, AppMode from models.model import AppModelConfig from models.workflow import Workflow @@ -439,6 +440,7 @@ class AppDslService: app.icon = icon app.icon_background = icon_background or app_data.get("icon_background", app.icon_background) app.updated_by = account.id + app.updated_at = naive_utc_now() else: if account.current_tenant_id is None: raise ValueError("Current tenant is not set") @@ -494,7 +496,7 @@ class AppDslService: unique_hash = None graph = workflow_data.get("graph", {}) for node in graph.get("nodes", []): - if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL.value: + if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL: dataset_ids = node["data"].get("dataset_ids", []) node["data"]["dataset_ids"] = [ decrypted_id @@ -561,7 +563,7 @@ class AppDslService: else: cls._append_model_config_export_data(export_data, app_model) - return yaml.dump(export_data, allow_unicode=True) # type: ignore + return yaml.dump(export_data, allow_unicode=True) @classmethod def _append_workflow_export_data( @@ -584,17 +586,17 @@ class AppDslService: if not node_data: continue data_type = node_data.get("type", "") - if data_type == NodeType.KNOWLEDGE_RETRIEVAL.value: + if data_type == NodeType.KNOWLEDGE_RETRIEVAL: dataset_ids = node_data.get("dataset_ids", []) node_data["dataset_ids"] = [ cls.encrypt_dataset_id(dataset_id=dataset_id, tenant_id=app_model.tenant_id) for dataset_id in dataset_ids ] # filter credential id from tool node - if not include_secret and data_type == NodeType.TOOL.value: + if not include_secret and data_type == NodeType.TOOL: node_data.pop("credential_id", None) # filter credential id from agent node - if not include_secret and data_type == NodeType.AGENT.value: + if not include_secret and data_type == NodeType.AGENT: for tool in node_data.get("agent_parameters", {}).get("tools", {}).get("value", []): tool.pop("credential_id", None) @@ -658,32 +660,32 @@ class AppDslService: try: typ = node.get("data", {}).get("type") match typ: - case NodeType.TOOL.value: - tool_entity = ToolNodeData(**node["data"]) + case NodeType.TOOL: + tool_entity = ToolNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_tool_dependency(tool_entity.provider_id), ) - case NodeType.LLM.value: - llm_entity = LLMNodeData(**node["data"]) + case NodeType.LLM: + llm_entity = LLMNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency(llm_entity.model.provider), ) - case NodeType.QUESTION_CLASSIFIER.value: - question_classifier_entity = QuestionClassifierNodeData(**node["data"]) + case NodeType.QUESTION_CLASSIFIER: + question_classifier_entity = QuestionClassifierNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency( question_classifier_entity.model.provider ), ) - case NodeType.PARAMETER_EXTRACTOR.value: - parameter_extractor_entity = ParameterExtractorNodeData(**node["data"]) + case NodeType.PARAMETER_EXTRACTOR: + parameter_extractor_entity = ParameterExtractorNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency( parameter_extractor_entity.model.provider ), ) - case NodeType.KNOWLEDGE_RETRIEVAL.value: - knowledge_retrieval_entity = KnowledgeRetrievalNodeData(**node["data"]) + case NodeType.KNOWLEDGE_RETRIEVAL: + knowledge_retrieval_entity = KnowledgeRetrievalNodeData.model_validate(node["data"]) if knowledge_retrieval_entity.retrieval_mode == "multiple": if knowledge_retrieval_entity.multiple_retrieval_config: if ( @@ -773,7 +775,7 @@ class AppDslService: """ Returns the leaked dependencies in current workspace """ - dependencies = [PluginDependency(**dep) for dep in dsl_dependencies] + dependencies = [PluginDependency.model_validate(dep) for dep in dsl_dependencies] if not dependencies: return [] diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 8911da4728..b462ddf236 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -2,8 +2,6 @@ import uuid from collections.abc import Generator, Mapping from typing import Any, Union -from openai._exceptions import RateLimitError - from configs import dify_config from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator @@ -122,8 +120,6 @@ class AppGenerateService: ) else: raise ValueError(f"Invalid app mode {app_model.mode}") - except RateLimitError as e: - raise InvokeRateLimitError(str(e)) except Exception: rate_limit.exit(request_id) raise diff --git a/api/services/app_service.py b/api/services/app_service.py index 4fc6cf2494..5f8c5089c9 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -18,7 +18,7 @@ from events.app_event import app_was_created from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account +from models import Account from models.model import App, AppMode, AppModelConfig, Site from models.tools import ApiToolProvider from services.billing_service import BillingService diff --git a/api/services/audio_service.py b/api/services/audio_service.py index 1158fc5197..41ee9c88aa 100644 --- a/api/services/audio_service.py +++ b/api/services/audio_service.py @@ -82,54 +82,51 @@ class AudioService: message_id: str | None = None, is_draft: bool = False, ): - from app import app - def invoke_tts(text_content: str, app_model: App, voice: str | None = None, is_draft: bool = False): - with app.app_context(): - if voice is None: - if app_model.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: - if is_draft: - workflow = WorkflowService().get_draft_workflow(app_model=app_model) - else: - workflow = app_model.workflow - if ( - workflow is None - or "text_to_speech" not in workflow.features_dict - or not workflow.features_dict["text_to_speech"].get("enabled") - ): + if voice is None: + if app_model.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: + if is_draft: + workflow = WorkflowService().get_draft_workflow(app_model=app_model) + else: + workflow = app_model.workflow + if ( + workflow is None + or "text_to_speech" not in workflow.features_dict + or not workflow.features_dict["text_to_speech"].get("enabled") + ): + raise ValueError("TTS is not enabled") + + voice = workflow.features_dict["text_to_speech"].get("voice") + else: + if not is_draft: + if app_model.app_model_config is None: + raise ValueError("AppModelConfig not found") + text_to_speech_dict = app_model.app_model_config.text_to_speech_dict + + if not text_to_speech_dict.get("enabled"): raise ValueError("TTS is not enabled") - voice = workflow.features_dict["text_to_speech"].get("voice") - else: - if not is_draft: - if app_model.app_model_config is None: - raise ValueError("AppModelConfig not found") - text_to_speech_dict = app_model.app_model_config.text_to_speech_dict + voice = text_to_speech_dict.get("voice") - if not text_to_speech_dict.get("enabled"): - raise ValueError("TTS is not enabled") - - voice = text_to_speech_dict.get("voice") - - model_manager = ModelManager() - model_instance = model_manager.get_default_model_instance( - tenant_id=app_model.tenant_id, model_type=ModelType.TTS - ) - try: - if not voice: - voices = model_instance.get_tts_voices() - if voices: - voice = voices[0].get("value") - if not voice: - raise ValueError("Sorry, no voice available.") - else: + model_manager = ModelManager() + model_instance = model_manager.get_default_model_instance( + tenant_id=app_model.tenant_id, model_type=ModelType.TTS + ) + try: + if not voice: + voices = model_instance.get_tts_voices() + if voices: + voice = voices[0].get("value") + if not voice: raise ValueError("Sorry, no voice available.") + else: + raise ValueError("Sorry, no voice available.") - return model_instance.invoke_tts( - content_text=text_content.strip(), user=end_user, tenant_id=app_model.tenant_id, voice=voice - ) - except Exception as e: - raise e + return model_instance.invoke_tts( + content_text=text_content.strip(), user=end_user, tenant_id=app_model.tenant_id, voice=voice + ) + except Exception as e: + raise e if message_id: try: diff --git a/api/services/auth/api_key_auth_service.py b/api/services/auth/api_key_auth_service.py index 055cf65816..56aaf407ee 100644 --- a/api/services/auth/api_key_auth_service.py +++ b/api/services/auth/api_key_auth_service.py @@ -26,10 +26,9 @@ class ApiKeyAuthService: api_key = encrypter.encrypt_token(tenant_id, args["credentials"]["config"]["api_key"]) args["credentials"]["config"]["api_key"] = api_key - data_source_api_key_binding = DataSourceApiKeyAuthBinding() - data_source_api_key_binding.tenant_id = tenant_id - data_source_api_key_binding.category = args["category"] - data_source_api_key_binding.provider = args["provider"] + data_source_api_key_binding = DataSourceApiKeyAuthBinding( + tenant_id=tenant_id, category=args["category"], provider=args["provider"] + ) data_source_api_key_binding.credentials = json.dumps(args["credentials"], ensure_ascii=False) db.session.add(data_source_api_key_binding) db.session.commit() @@ -48,6 +47,8 @@ class ApiKeyAuthService: ) if not data_source_api_key_bindings: return None + if not data_source_api_key_bindings.credentials: + return None credentials = json.loads(data_source_api_key_bindings.credentials) return credentials diff --git a/api/services/billing_service.py b/api/services/billing_service.py index 9d6c5b4b31..a6851d2638 100644 --- a/api/services/billing_service.py +++ b/api/services/billing_service.py @@ -7,7 +7,7 @@ from tenacity import retry, retry_if_exception_type, stop_before_delay, wait_fix from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.helper import RateLimiter -from models.account import Account, TenantAccountJoin, TenantAccountRole +from models import Account, TenantAccountJoin, TenantAccountRole class BillingService: diff --git a/api/services/conversation_service.py b/api/services/conversation_service.py index a8e51a426d..39d6c81621 100644 --- a/api/services/conversation_service.py +++ b/api/services/conversation_service.py @@ -14,8 +14,7 @@ from extensions.ext_database import db from factories import variable_factory from libs.datetime_utils import naive_utc_now from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models import ConversationVariable -from models.account import Account +from models import Account, ConversationVariable from models.model import App, Conversation, EndUser, Message from services.errors.conversation import ( ConversationNotExistsError, diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index c9dd78ddd1..1e040abe3e 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -29,7 +29,7 @@ from extensions.ext_redis import redis_client from libs import helper from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account, TenantAccountRole +from models import Account, TenantAccountRole from models.dataset import ( AppDatasetJoin, ChildChunk, @@ -93,7 +93,7 @@ logger = logging.getLogger(__name__) class DatasetService: @staticmethod def get_datasets(page, per_page, tenant_id=None, user=None, search=None, tag_ids=None, include_all=False): - query = select(Dataset).where(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc()) + query = select(Dataset).where(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc(), Dataset.id) if user: # get permitted dataset ids @@ -241,9 +241,9 @@ class DatasetService: dataset.created_by = account.id dataset.updated_by = account.id dataset.tenant_id = tenant_id - dataset.embedding_model_provider = embedding_model.provider if embedding_model else None # type: ignore - dataset.embedding_model = embedding_model.model if embedding_model else None # type: ignore - dataset.retrieval_model = retrieval_model.model_dump() if retrieval_model else None # type: ignore + dataset.embedding_model_provider = embedding_model.provider if embedding_model else None + dataset.embedding_model = embedding_model.model if embedding_model else None + dataset.retrieval_model = retrieval_model.model_dump() if retrieval_model else None dataset.permission = permission or DatasetPermissionEnum.ONLY_ME dataset.provider = provider db.session.add(dataset) @@ -1416,6 +1416,8 @@ class DocumentService: # check document limit assert isinstance(current_user, Account) assert current_user.current_tenant_id is not None + assert knowledge_config.data_source + assert knowledge_config.data_source.info_list features = FeatureService.get_features(current_user.current_tenant_id) @@ -1424,15 +1426,18 @@ class DocumentService: count = 0 if knowledge_config.data_source: if knowledge_config.data_source.info_list.data_source_type == "upload_file": - upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids # type: ignore + if not knowledge_config.data_source.info_list.file_info_list: + raise ValueError("File source info is required") + upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids count = len(upload_file_list) elif knowledge_config.data_source.info_list.data_source_type == "notion_import": - notion_info_list = knowledge_config.data_source.info_list.notion_info_list - for notion_info in notion_info_list: # type: ignore + notion_info_list = knowledge_config.data_source.info_list.notion_info_list or [] + for notion_info in notion_info_list: count = count + len(notion_info.pages) elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": website_info = knowledge_config.data_source.info_list.website_info_list - count = len(website_info.urls) # type: ignore + assert website_info + count = len(website_info.urls) batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT) if features.billing.subscription.plan == "sandbox" and count > 1: @@ -1444,7 +1449,7 @@ class DocumentService: # if dataset is empty, update dataset data_source_type if not dataset.data_source_type: - dataset.data_source_type = knowledge_config.data_source.info_list.data_source_type # type: ignore + dataset.data_source_type = knowledge_config.data_source.info_list.data_source_type if not dataset.indexing_technique: if knowledge_config.indexing_technique not in Dataset.INDEXING_TECHNIQUE_LIST: @@ -1470,7 +1475,7 @@ class DocumentService: dataset.collection_binding_id = dataset_collection_binding.id if not dataset.retrieval_model: default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -1481,7 +1486,7 @@ class DocumentService: knowledge_config.retrieval_model.model_dump() if knowledge_config.retrieval_model else default_retrieval_model - ) # type: ignore + ) documents = [] if knowledge_config.original_document_id: @@ -1523,11 +1528,14 @@ class DocumentService: db.session.flush() lock_name = f"add_document_lock_dataset_id_{dataset.id}" with redis_client.lock(lock_name, timeout=600): + assert dataset_process_rule position = DocumentService.get_documents_position(dataset.id) document_ids = [] duplicate_document_ids = [] - if knowledge_config.data_source.info_list.data_source_type == "upload_file": # type: ignore - upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids # type: ignore + if knowledge_config.data_source.info_list.data_source_type == "upload_file": + if not knowledge_config.data_source.info_list.file_info_list: + raise ValueError("File source info is required") + upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids for file_id in upload_file_list: file = ( db.session.query(UploadFile) @@ -1540,7 +1548,7 @@ class DocumentService: raise FileNotExistsError() file_name = file.name - data_source_info = { + data_source_info: dict[str, str | bool] = { "upload_file_id": file_id, } # check duplicate @@ -1557,7 +1565,7 @@ class DocumentService: .first() ) if document: - document.dataset_process_rule_id = dataset_process_rule.id # type: ignore + document.dataset_process_rule_id = dataset_process_rule.id document.updated_at = naive_utc_now() document.created_from = created_from document.doc_form = knowledge_config.doc_form @@ -1571,8 +1579,8 @@ class DocumentService: continue document = DocumentService.build_document( dataset, - dataset_process_rule.id, # type: ignore - knowledge_config.data_source.info_list.data_source_type, # type: ignore + dataset_process_rule.id, + knowledge_config.data_source.info_list.data_source_type, knowledge_config.doc_form, knowledge_config.doc_language, data_source_info, @@ -1587,7 +1595,7 @@ class DocumentService: document_ids.append(document.id) documents.append(document) position += 1 - elif knowledge_config.data_source.info_list.data_source_type == "notion_import": # type: ignore + elif knowledge_config.data_source.info_list.data_source_type == "notion_import": notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore if not notion_info_list: raise ValueError("No notion info list found.") @@ -1616,15 +1624,15 @@ class DocumentService: "credential_id": notion_info.credential_id, "notion_workspace_id": workspace_id, "notion_page_id": page.page_id, - "notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, + "notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore "type": page.type, } # Truncate page name to 255 characters to prevent DB field length errors truncated_page_name = page.page_name[:255] if page.page_name else "nopagename" document = DocumentService.build_document( dataset, - dataset_process_rule.id, # type: ignore - knowledge_config.data_source.info_list.data_source_type, # type: ignore + dataset_process_rule.id, + knowledge_config.data_source.info_list.data_source_type, knowledge_config.doc_form, knowledge_config.doc_language, data_source_info, @@ -1644,8 +1652,8 @@ class DocumentService: # delete not selected documents if len(exist_document) > 0: clean_notion_document_task.delay(list(exist_document.values()), dataset.id) - elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": # type: ignore - website_info = knowledge_config.data_source.info_list.website_info_list # type: ignore + elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": + website_info = knowledge_config.data_source.info_list.website_info_list if not website_info: raise ValueError("No website info list found.") urls = website_info.urls @@ -1663,8 +1671,8 @@ class DocumentService: document_name = url document = DocumentService.build_document( dataset, - dataset_process_rule.id, # type: ignore - knowledge_config.data_source.info_list.data_source_type, # type: ignore + dataset_process_rule.id, + knowledge_config.data_source.info_list.data_source_type, knowledge_config.doc_form, knowledge_config.doc_language, data_source_info, @@ -1752,7 +1760,7 @@ class DocumentService: # dataset.collection_binding_id = dataset_collection_binding.id # if not dataset.retrieval_model: # default_retrieval_model = { - # "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + # "search_method": RetrievalMethod.SEMANTIC_SEARCH, # "reranking_enable": False, # "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, # "top_k": 2, @@ -2071,7 +2079,7 @@ class DocumentService: # update document data source if document_data.data_source: file_name = "" - data_source_info = {} + data_source_info: dict[str, str | bool] = {} if document_data.data_source.info_list.data_source_type == "upload_file": if not document_data.data_source.info_list.file_info_list: raise ValueError("No file info list found.") @@ -2128,7 +2136,7 @@ class DocumentService: "url": url, "provider": website_info.provider, "job_id": website_info.job_id, - "only_main_content": website_info.only_main_content, # type: ignore + "only_main_content": website_info.only_main_content, "mode": "crawl", } document.data_source_type = document_data.data_source.info_list.data_source_type @@ -2154,7 +2162,7 @@ class DocumentService: db.session.query(DocumentSegment).filter_by(document_id=document.id).update( {DocumentSegment.status: "re_segment"} - ) # type: ignore + ) db.session.commit() # trigger async task document_indexing_update_task.delay(document.dataset_id, document.id) @@ -2164,25 +2172,26 @@ class DocumentService: def save_document_without_dataset_id(tenant_id: str, knowledge_config: KnowledgeConfig, account: Account): assert isinstance(current_user, Account) assert current_user.current_tenant_id is not None + assert knowledge_config.data_source features = FeatureService.get_features(current_user.current_tenant_id) if features.billing.enabled: count = 0 - if knowledge_config.data_source.info_list.data_source_type == "upload_file": # type: ignore + if knowledge_config.data_source.info_list.data_source_type == "upload_file": upload_file_list = ( - knowledge_config.data_source.info_list.file_info_list.file_ids # type: ignore - if knowledge_config.data_source.info_list.file_info_list # type: ignore + knowledge_config.data_source.info_list.file_info_list.file_ids + if knowledge_config.data_source.info_list.file_info_list else [] ) count = len(upload_file_list) - elif knowledge_config.data_source.info_list.data_source_type == "notion_import": # type: ignore - notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore + elif knowledge_config.data_source.info_list.data_source_type == "notion_import": + notion_info_list = knowledge_config.data_source.info_list.notion_info_list if notion_info_list: for notion_info in notion_info_list: count = count + len(notion_info.pages) - elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": # type: ignore - website_info = knowledge_config.data_source.info_list.website_info_list # type: ignore + elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": + website_info = knowledge_config.data_source.info_list.website_info_list if website_info: count = len(website_info.urls) if features.billing.subscription.plan == "sandbox" and count > 1: @@ -2196,16 +2205,18 @@ class DocumentService: dataset_collection_binding_id = None retrieval_model = None if knowledge_config.indexing_technique == "high_quality": + assert knowledge_config.embedding_model_provider + assert knowledge_config.embedding_model dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( - knowledge_config.embedding_model_provider, # type: ignore - knowledge_config.embedding_model, # type: ignore + knowledge_config.embedding_model_provider, + knowledge_config.embedding_model, ) dataset_collection_binding_id = dataset_collection_binding.id if knowledge_config.retrieval_model: retrieval_model = knowledge_config.retrieval_model else: retrieval_model = RetrievalModel( - search_method=RetrievalMethod.SEMANTIC_SEARCH.value, + search_method=RetrievalMethod.SEMANTIC_SEARCH, reranking_enable=False, reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""), top_k=4, @@ -2215,7 +2226,7 @@ class DocumentService: dataset = Dataset( tenant_id=tenant_id, name="", - data_source_type=knowledge_config.data_source.info_list.data_source_type, # type: ignore + data_source_type=knowledge_config.data_source.info_list.data_source_type, indexing_technique=knowledge_config.indexing_technique, created_by=account.id, embedding_model=knowledge_config.embedding_model, @@ -2224,7 +2235,7 @@ class DocumentService: retrieval_model=retrieval_model.model_dump() if retrieval_model else None, ) - db.session.add(dataset) # type: ignore + db.session.add(dataset) db.session.flush() documents, batch = DocumentService.save_document_with_dataset_id(dataset, knowledge_config, account) diff --git a/api/services/datasource_provider_service.py b/api/services/datasource_provider_service.py index 89a5d89f61..1b690e2266 100644 --- a/api/services/datasource_provider_service.py +++ b/api/services/datasource_provider_service.py @@ -3,7 +3,6 @@ import time from collections.abc import Mapping from typing import Any -from flask_login import current_user from sqlalchemy.orm import Session from configs import dify_config @@ -25,6 +24,16 @@ from services.plugin.plugin_service import PluginService logger = logging.getLogger(__name__) +def get_current_user(): + from libs.login import current_user + from models.account import Account + from models.model import EndUser + + if not isinstance(current_user._get_current_object(), (Account, EndUser)): # type: ignore + raise TypeError(f"current_user must be Account or EndUser, got {type(current_user).__name__}") + return current_user + + class DatasourceProviderService: """ Model Provider Service @@ -109,6 +118,7 @@ class DatasourceProviderService: return {} # refresh the credentials if datasource_provider.expires_at != -1 and (datasource_provider.expires_at - 60) < int(time.time()): + current_user = get_current_user() decrypted_credentials = self.decrypt_datasource_provider_credentials( tenant_id=tenant_id, datasource_provider=datasource_provider, @@ -166,6 +176,7 @@ class DatasourceProviderService: ) if not datasource_providers: return [] + current_user = get_current_user() # refresh the credentials real_credentials_list = [] for datasource_provider in datasource_providers: @@ -604,6 +615,7 @@ class DatasourceProviderService: """ provider_name = provider_id.provider_name plugin_id = provider_id.plugin_id + with Session(db.engine) as session: lock = f"datasource_provider_create_lock:{tenant_id}_{provider_id}_{CredentialType.API_KEY}" with redis_client.lock(lock, timeout=20): @@ -624,6 +636,7 @@ class DatasourceProviderService: raise ValueError("Authorization name is already exists") try: + current_user = get_current_user() self.provider_manager.validate_provider_credentials( tenant_id=tenant_id, user_id=current_user.id, @@ -646,7 +659,7 @@ class DatasourceProviderService: name=db_provider_name, provider=provider_name, plugin_id=plugin_id, - auth_type=CredentialType.API_KEY.value, + auth_type=CredentialType.API_KEY, encrypted_credentials=credentials, ) session.add(datasource_provider) @@ -674,7 +687,7 @@ class DatasourceProviderService: secret_input_form_variables = [] for credential_form_schema in credential_form_schemas: - if credential_form_schema.type.value == FormType.SECRET_INPUT.value: + if credential_form_schema.type.value == FormType.SECRET_INPUT: secret_input_form_variables.append(credential_form_schema.name) return secret_input_form_variables @@ -901,6 +914,7 @@ class DatasourceProviderService: """ update datasource credentials. """ + with Session(db.engine) as session: datasource_provider = ( session.query(DatasourceProvider) @@ -936,6 +950,7 @@ class DatasourceProviderService: for key, value in credentials.items() } try: + current_user = get_current_user() self.provider_manager.validate_provider_credentials( tenant_id=tenant_id, user_id=current_user.id, diff --git a/api/services/enterprise/base.py b/api/services/enterprise/base.py index edb76408e8..bdc960aa2d 100644 --- a/api/services/enterprise/base.py +++ b/api/services/enterprise/base.py @@ -1,10 +1,12 @@ import os +from collections.abc import Mapping +from typing import Any -import requests +import httpx class BaseRequest: - proxies = { + proxies: Mapping[str, str] | None = { "http": "", "https": "", } @@ -13,10 +15,31 @@ class BaseRequest: secret_key_header = "" @classmethod - def send_request(cls, method, endpoint, json=None, params=None): + def _build_mounts(cls) -> dict[str, httpx.BaseTransport] | None: + if not cls.proxies: + return None + + mounts: dict[str, httpx.BaseTransport] = {} + for scheme, value in cls.proxies.items(): + if not value: + continue + key = f"{scheme}://" if not scheme.endswith("://") else scheme + mounts[key] = httpx.HTTPTransport(proxy=value) + return mounts or None + + @classmethod + def send_request( + cls, + method: str, + endpoint: str, + json: Any | None = None, + params: Mapping[str, Any] | None = None, + ) -> Any: headers = {"Content-Type": "application/json", cls.secret_key_header: cls.secret_key} url = f"{cls.base_url}{endpoint}" - response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies) + mounts = cls._build_mounts() + with httpx.Client(mounts=mounts) as client: + response = client.request(method, url, json=json, params=params, headers=headers) return response.json() diff --git a/api/services/enterprise/enterprise_service.py b/api/services/enterprise/enterprise_service.py index f8612456d6..974aa849db 100644 --- a/api/services/enterprise/enterprise_service.py +++ b/api/services/enterprise/enterprise_service.py @@ -46,17 +46,17 @@ class EnterpriseService: class WebAppAuth: @classmethod - def is_user_allowed_to_access_webapp(cls, user_id: str, app_code: str): - params = {"userId": user_id, "appCode": app_code} + def is_user_allowed_to_access_webapp(cls, user_id: str, app_id: str): + params = {"userId": user_id, "appId": app_id} data = EnterpriseRequest.send_request("GET", "/webapp/permission", params=params) return data.get("result", False) @classmethod - def batch_is_user_allowed_to_access_webapps(cls, user_id: str, app_codes: list[str]): - if not app_codes: + def batch_is_user_allowed_to_access_webapps(cls, user_id: str, app_ids: list[str]): + if not app_ids: return {} - body = {"userId": user_id, "appCodes": app_codes} + body = {"userId": user_id, "appIds": app_ids} data = EnterpriseRequest.send_request("POST", "/webapp/permission/batch", json=body) if not data: raise ValueError("No data found.") @@ -70,7 +70,7 @@ class EnterpriseService: data = EnterpriseRequest.send_request("GET", "/webapp/access-mode/id", params=params) if not data: raise ValueError("No data found.") - return WebAppSettings(**data) + return WebAppSettings.model_validate(data) @classmethod def batch_get_app_access_mode_by_id(cls, app_ids: list[str]) -> dict[str, WebAppSettings]: @@ -100,7 +100,7 @@ class EnterpriseService: data = EnterpriseRequest.send_request("GET", "/webapp/access-mode/code", params=params) if not data: raise ValueError("No data found.") - return WebAppSettings(**data) + return WebAppSettings.model_validate(data) @classmethod def update_app_access_mode(cls, app_id: str, access_mode: str): diff --git a/api/services/entities/knowledge_entities/knowledge_entities.py b/api/services/entities/knowledge_entities/knowledge_entities.py index 33f65bde58..b9a210740d 100644 --- a/api/services/entities/knowledge_entities/knowledge_entities.py +++ b/api/services/entities/knowledge_entities/knowledge_entities.py @@ -3,6 +3,8 @@ from typing import Literal from pydantic import BaseModel +from core.rag.retrieval.retrieval_methods import RetrievalMethod + class ParentMode(StrEnum): FULL_DOC = "full-doc" @@ -95,7 +97,7 @@ class WeightModel(BaseModel): class RetrievalModel(BaseModel): - search_method: Literal["hybrid_search", "semantic_search", "full_text_search", "keyword_search"] + search_method: RetrievalMethod reranking_enable: bool reranking_model: RerankingModel | None = None reranking_mode: str | None = None diff --git a/api/services/entities/knowledge_entities/rag_pipeline_entities.py b/api/services/entities/knowledge_entities/rag_pipeline_entities.py index 860bfde401..a97ccab914 100644 --- a/api/services/entities/knowledge_entities/rag_pipeline_entities.py +++ b/api/services/entities/knowledge_entities/rag_pipeline_entities.py @@ -2,6 +2,8 @@ from typing import Literal from pydantic import BaseModel, field_validator +from core.rag.retrieval.retrieval_methods import RetrievalMethod + class IconInfo(BaseModel): icon: str @@ -83,7 +85,7 @@ class RetrievalSetting(BaseModel): Retrieval Setting. """ - search_method: Literal["semantic_search", "full_text_search", "keyword_search", "hybrid_search"] + search_method: RetrievalMethod top_k: int score_threshold: float | None = 0.5 score_threshold_enabled: bool = False diff --git a/api/services/entities/model_provider_entities.py b/api/services/entities/model_provider_entities.py index 49d48f044c..d07badefa7 100644 --- a/api/services/entities/model_provider_entities.py +++ b/api/services/entities/model_provider_entities.py @@ -1,6 +1,7 @@ -from enum import Enum +from collections.abc import Sequence +from enum import StrEnum -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, model_validator from configs import dify_config from core.entities.model_entities import ( @@ -26,7 +27,7 @@ from core.model_runtime.entities.provider_entities import ( from models.provider import ProviderType -class CustomConfigurationStatus(Enum): +class CustomConfigurationStatus(StrEnum): """ Enum class for custom configuration status. """ @@ -71,7 +72,7 @@ class ProviderResponse(BaseModel): icon_large: I18nObject | None = None background: str | None = None help: ProviderHelpEntity | None = None - supported_model_types: list[ModelType] + supported_model_types: Sequence[ModelType] configurate_methods: list[ConfigurateMethod] provider_credential_schema: ProviderCredentialSchema | None = None model_credential_schema: ModelCredentialSchema | None = None @@ -82,9 +83,8 @@ class ProviderResponse(BaseModel): # pydantic configs model_config = ConfigDict(protected_namespaces=()) - def __init__(self, **data): - super().__init__(**data) - + @model_validator(mode="after") + def _(self): url_prefix = ( dify_config.CONSOLE_API_URL + f"/console/api/workspaces/{self.tenant_id}/model-providers/{self.provider}" ) @@ -97,6 +97,7 @@ class ProviderResponse(BaseModel): self.icon_large = I18nObject( en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" ) + return self class ProviderWithModelsResponse(BaseModel): @@ -112,9 +113,8 @@ class ProviderWithModelsResponse(BaseModel): status: CustomConfigurationStatus models: list[ProviderModelWithStatusEntity] - def __init__(self, **data): - super().__init__(**data) - + @model_validator(mode="after") + def _(self): url_prefix = ( dify_config.CONSOLE_API_URL + f"/console/api/workspaces/{self.tenant_id}/model-providers/{self.provider}" ) @@ -127,6 +127,7 @@ class ProviderWithModelsResponse(BaseModel): self.icon_large = I18nObject( en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" ) + return self class SimpleProviderEntityResponse(SimpleProviderEntity): @@ -136,9 +137,8 @@ class SimpleProviderEntityResponse(SimpleProviderEntity): tenant_id: str - def __init__(self, **data): - super().__init__(**data) - + @model_validator(mode="after") + def _(self): url_prefix = ( dify_config.CONSOLE_API_URL + f"/console/api/workspaces/{self.tenant_id}/model-providers/{self.provider}" ) @@ -151,6 +151,7 @@ class SimpleProviderEntityResponse(SimpleProviderEntity): self.icon_large = I18nObject( en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" ) + return self class DefaultModelResponse(BaseModel): diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index b6ba3bafea..5cd3b471f9 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -88,9 +88,9 @@ class ExternalDatasetService: else: raise ValueError(f"invalid endpoint: {endpoint}") try: - response = httpx.post(endpoint, headers={"Authorization": f"Bearer {api_key}"}) - except Exception: - raise ValueError(f"failed to connect to the endpoint: {endpoint}") + response = ssrf_proxy.post(endpoint, headers={"Authorization": f"Bearer {api_key}"}) + except Exception as e: + raise ValueError(f"failed to connect to the endpoint: {endpoint}") from e if response.status_code == 502: raise ValueError(f"Bad Gateway: failed to connect to the endpoint: {endpoint}") if response.status_code == 404: diff --git a/api/services/feature_service.py b/api/services/feature_service.py index 19d96cb972..148442f76e 100644 --- a/api/services/feature_service.py +++ b/api/services/feature_service.py @@ -174,6 +174,7 @@ class FeatureService: if dify_config.ENTERPRISE_ENABLED: features.webapp_copyright_enabled = True + features.knowledge_pipeline.publish_enabled = True cls._fulfill_params_from_workspace_info(features, tenant_id) return features diff --git a/api/services/file_service.py b/api/services/file_service.py index f0bb68766d..dd6a829ea2 100644 --- a/api/services/file_service.py +++ b/api/services/file_service.py @@ -19,7 +19,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from extensions.ext_storage import storage from libs.datetime_utils import naive_utc_now from libs.helper import extract_tenant_id -from models.account import Account +from models import Account from models.enums import CreatorUserRole from models.model import EndUser, UploadFile diff --git a/api/services/hit_testing_service.py b/api/services/hit_testing_service.py index 00ec3babf3..337181728c 100644 --- a/api/services/hit_testing_service.py +++ b/api/services/hit_testing_service.py @@ -9,13 +9,13 @@ from core.rag.models.document import Document from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db -from models.account import Account +from models import Account from models.dataset import Dataset, DatasetQuery logger = logging.getLogger(__name__) default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -46,7 +46,7 @@ class HitTestingService: from core.app.app_config.entities import MetadataFilteringCondition - metadata_filtering_conditions = MetadataFilteringCondition(**metadata_filtering_conditions) + metadata_filtering_conditions = MetadataFilteringCondition.model_validate(metadata_filtering_conditions) metadata_filter_document_ids, metadata_condition = dataset_retrieval.get_metadata_filter_condition( dataset_ids=[dataset.id], @@ -63,7 +63,7 @@ class HitTestingService: if metadata_condition and not document_ids_filter: return cls.compact_retrieve_response(query, []) all_documents = RetrievalService.retrieve( - retrieval_method=retrieval_model.get("search_method", "semantic_search"), + retrieval_method=RetrievalMethod(retrieval_model.get("search_method", RetrievalMethod.SEMANTIC_SEARCH)), dataset_id=dataset.id, query=query, top_k=retrieval_model.get("top_k", 4), @@ -88,7 +88,7 @@ class HitTestingService: db.session.add(dataset_query) db.session.commit() - return cls.compact_retrieve_response(query, all_documents) # type: ignore + return cls.compact_retrieve_response(query, all_documents) @classmethod def external_retrieve( diff --git a/api/services/knowledge_service.py b/api/services/knowledge_service.py index 8df1a6ba14..02fe1d19bc 100644 --- a/api/services/knowledge_service.py +++ b/api/services/knowledge_service.py @@ -1,4 +1,4 @@ -import boto3 # type: ignore +import boto3 from configs import dify_config diff --git a/api/services/message_service.py b/api/services/message_service.py index 5e356bf925..7ed56d80f2 100644 --- a/api/services/message_service.py +++ b/api/services/message_service.py @@ -12,7 +12,7 @@ from core.ops.ops_trace_manager import TraceQueueManager, TraceTask from core.ops.utils import measure_time from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, AppMode, AppModelConfig, EndUser, Message, MessageFeedback from services.conversation_service import ConversationService from services.errors.message import ( @@ -288,9 +288,10 @@ class MessageService: ) with measure_time() as timer: - questions: list[str] = LLMGenerator.generate_suggested_questions_after_answer( + questions_sequence = LLMGenerator.generate_suggested_questions_after_answer( tenant_id=app_model.tenant_id, histories=histories ) + questions: list[str] = list(questions_sequence) # get tracing instance trace_manager = TraceQueueManager(app_id=app_model.id) diff --git a/api/services/metadata_service.py b/api/services/metadata_service.py index 6add830813..b369994d2d 100644 --- a/api/services/metadata_service.py +++ b/api/services/metadata_service.py @@ -1,12 +1,11 @@ import copy import logging -from flask_login import current_user - from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now +from libs.login import current_account_with_tenant from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding from services.dataset_service import DocumentService from services.entities.knowledge_entities.knowledge_entities import ( @@ -23,11 +22,11 @@ class MetadataService: # check if metadata name is too long if len(metadata_args.name) > 255: raise ValueError("Metadata name cannot exceed 255 characters.") - + current_user, current_tenant_id = current_account_with_tenant() # check if metadata name already exists if ( db.session.query(DatasetMetadata) - .filter_by(tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=metadata_args.name) + .filter_by(tenant_id=current_tenant_id, dataset_id=dataset_id, name=metadata_args.name) .first() ): raise ValueError("Metadata name already exists.") @@ -35,7 +34,7 @@ class MetadataService: if field.value == metadata_args.name: raise ValueError("Metadata name already exists in Built-in fields.") metadata = DatasetMetadata( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, dataset_id=dataset_id, type=metadata_args.type, name=metadata_args.name, @@ -53,9 +52,10 @@ class MetadataService: lock_key = f"dataset_metadata_lock_{dataset_id}" # check if metadata name already exists + current_user, current_tenant_id = current_account_with_tenant() if ( db.session.query(DatasetMetadata) - .filter_by(tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=name) + .filter_by(tenant_id=current_tenant_id, dataset_id=dataset_id, name=name) .first() ): raise ValueError("Metadata name already exists.") @@ -89,7 +89,7 @@ class MetadataService: document.doc_metadata = doc_metadata db.session.add(document) db.session.commit() - return metadata # type: ignore + return metadata except Exception: logger.exception("Update metadata name failed") finally: @@ -220,9 +220,10 @@ class MetadataService: db.session.commit() # deal metadata binding db.session.query(DatasetMetadataBinding).filter_by(document_id=operation.document_id).delete() + current_user, current_tenant_id = current_account_with_tenant() for metadata_value in operation.metadata_list: dataset_metadata_binding = DatasetMetadataBinding( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, dataset_id=dataset.id, document_id=operation.document_id, metadata_id=metadata_value.id, diff --git a/api/services/model_provider_service.py b/api/services/model_provider_service.py index 2901a0d273..50ddbbf681 100644 --- a/api/services/model_provider_service.py +++ b/api/services/model_provider_service.py @@ -137,7 +137,7 @@ class ModelProviderService: :return: """ provider_configuration = self._get_provider_configuration(tenant_id, provider) - return provider_configuration.get_provider_credential(credential_id=credential_id) # type: ignore + return provider_configuration.get_provider_credential(credential_id=credential_id) def validate_provider_credentials(self, tenant_id: str, provider: str, credentials: dict): """ @@ -225,7 +225,7 @@ class ModelProviderService: :return: """ provider_configuration = self._get_provider_configuration(tenant_id, provider) - return provider_configuration.get_custom_model_credential( # type: ignore + return provider_configuration.get_custom_model_credential( model_type=ModelType.value_of(model_type), model=model, credential_id=credential_id ) diff --git a/api/services/oauth_server.py b/api/services/oauth_server.py index b722dbee22..b05b43d76e 100644 --- a/api/services/oauth_server.py +++ b/api/services/oauth_server.py @@ -7,7 +7,7 @@ from werkzeug.exceptions import BadRequest from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account +from models import Account from models.model import OAuthProviderApp from services.account_service import AccountService diff --git a/api/services/ops_service.py b/api/services/ops_service.py index c214640653..e490b7ed3c 100644 --- a/api/services/ops_service.py +++ b/api/services/ops_service.py @@ -102,6 +102,15 @@ class OpsService: except Exception: new_decrypt_tracing_config.update({"project_url": "https://arms.console.aliyun.com/"}) + if tracing_provider == "tencent" and ( + "project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url") + ): + try: + project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider) + new_decrypt_tracing_config.update({"project_url": project_url}) + except Exception: + new_decrypt_tracing_config.update({"project_url": "https://console.cloud.tencent.com/apm"}) + trace_config_data.tracing_config = new_decrypt_tracing_config return trace_config_data.to_dict() @@ -123,7 +132,7 @@ class OpsService: config_class: type[BaseTracingConfig] = provider_config["config_class"] other_keys: list[str] = provider_config["other_keys"] - default_config_instance: BaseTracingConfig = config_class(**tracing_config) + default_config_instance = config_class.model_validate(tracing_config) for key in other_keys: if key in tracing_config and tracing_config[key] == "": tracing_config[key] = getattr(default_config_instance, key, None) @@ -144,7 +153,7 @@ class OpsService: project_url = f"{tracing_config.get('host')}/project/{project_key}" except Exception: project_url = None - elif tracing_provider in ("langsmith", "opik"): + elif tracing_provider in ("langsmith", "opik", "tencent"): try: project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) except Exception: diff --git a/api/services/plugin/plugin_migration.py b/api/services/plugin/plugin_migration.py index 99946d8fa9..df5fa3e233 100644 --- a/api/services/plugin/plugin_migration.py +++ b/api/services/plugin/plugin_migration.py @@ -146,7 +146,7 @@ class PluginMigration: futures.append( thread_pool.submit( process_tenant, - current_app._get_current_object(), # type: ignore[attr-defined] + current_app._get_current_object(), # type: ignore tenant_id, ) ) @@ -242,7 +242,7 @@ class PluginMigration: if data.get("type") == "tool": provider_name = data.get("provider_name") provider_type = data.get("provider_type") - if provider_name not in excluded_providers and provider_type == ToolProviderType.BUILT_IN.value: + if provider_name not in excluded_providers and provider_type == ToolProviderType.BUILT_IN: result.append(ToolProviderID(provider_name).plugin_id) return result @@ -269,9 +269,9 @@ class PluginMigration: for tool in agent_config["tools"]: if isinstance(tool, dict): try: - tool_entity = AgentToolEntity(**tool) + tool_entity = AgentToolEntity.model_validate(tool) if ( - tool_entity.provider_type == ToolProviderType.BUILT_IN.value + tool_entity.provider_type == ToolProviderType.BUILT_IN and tool_entity.provider_id not in excluded_providers ): result.append(ToolProviderID(tool_entity.provider_id).plugin_id) diff --git a/api/services/plugin/plugin_service.py b/api/services/plugin/plugin_service.py index 604adeb7b5..525ccc9417 100644 --- a/api/services/plugin/plugin_service.py +++ b/api/services/plugin/plugin_service.py @@ -336,6 +336,8 @@ class PluginService: pkg, verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, ) + PluginService._check_plugin_installation_scope(response.verification) + return response @staticmethod @@ -358,6 +360,8 @@ class PluginService: pkg, verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, ) + PluginService._check_plugin_installation_scope(response.verification) + return response @staticmethod @@ -377,6 +381,10 @@ class PluginService: manager = PluginInstaller() + for plugin_unique_identifier in plugin_unique_identifiers: + resp = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier) + PluginService._check_plugin_installation_scope(resp.verification) + return manager.install_from_identifiers( tenant_id, plugin_unique_identifiers, @@ -393,6 +401,9 @@ class PluginService: PluginService._check_marketplace_only_permission() manager = PluginInstaller() + plugin_decode_response = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier) + PluginService._check_plugin_installation_scope(plugin_decode_response.verification) + return manager.install_from_identifiers( tenant_id, [plugin_unique_identifier], diff --git a/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py b/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py index ca871bcaa1..4ac2e0792b 100644 --- a/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py +++ b/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py @@ -1,7 +1,7 @@ import yaml -from flask_login import current_user from extensions.ext_database import db +from libs.login import current_account_with_tenant from models.dataset import PipelineCustomizedTemplate from services.rag_pipeline.pipeline_template.pipeline_template_base import PipelineTemplateRetrievalBase from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType @@ -13,9 +13,8 @@ class CustomizedPipelineTemplateRetrieval(PipelineTemplateRetrievalBase): """ def get_pipeline_templates(self, language: str) -> dict: - result = self.fetch_pipeline_templates_from_customized( - tenant_id=current_user.current_tenant_id, language=language - ) + _, current_tenant_id = current_account_with_tenant() + result = self.fetch_pipeline_templates_from_customized(tenant_id=current_tenant_id, language=language) return result def get_pipeline_template_detail(self, template_id: str): diff --git a/api/services/rag_pipeline/pipeline_template/database/database_retrieval.py b/api/services/rag_pipeline/pipeline_template/database/database_retrieval.py index ec91f79606..908f9a2684 100644 --- a/api/services/rag_pipeline/pipeline_template/database/database_retrieval.py +++ b/api/services/rag_pipeline/pipeline_template/database/database_retrieval.py @@ -74,5 +74,4 @@ class DatabasePipelineTemplateRetrieval(PipelineTemplateRetrievalBase): "chunk_structure": pipeline_template.chunk_structure, "export_data": pipeline_template.yaml_content, "graph": graph_data, - "created_by": pipeline_template.created_user_name, } diff --git a/api/services/rag_pipeline/pipeline_template/remote/remote_retrieval.py b/api/services/rag_pipeline/pipeline_template/remote/remote_retrieval.py index 8f96842337..571ca6c7a6 100644 --- a/api/services/rag_pipeline/pipeline_template/remote/remote_retrieval.py +++ b/api/services/rag_pipeline/pipeline_template/remote/remote_retrieval.py @@ -1,6 +1,6 @@ import logging -import requests +import httpx from configs import dify_config from services.rag_pipeline.pipeline_template.database.database_retrieval import DatabasePipelineTemplateRetrieval @@ -43,7 +43,7 @@ class RemotePipelineTemplateRetrieval(PipelineTemplateRetrievalBase): """ domain = dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN url = f"{domain}/pipeline-templates/{template_id}" - response = requests.get(url, timeout=(3, 10)) + response = httpx.get(url, timeout=httpx.Timeout(10.0, connect=3.0)) if response.status_code != 200: return None data: dict = response.json() @@ -58,7 +58,7 @@ class RemotePipelineTemplateRetrieval(PipelineTemplateRetrievalBase): """ domain = dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN url = f"{domain}/pipeline-templates?language={language}" - response = requests.get(url, timeout=(3, 10)) + response = httpx.get(url, timeout=httpx.Timeout(10.0, connect=3.0)) if response.status_code != 200: raise ValueError(f"fetch pipeline templates failed, status code: {response.status_code}") diff --git a/api/services/rag_pipeline/rag_pipeline.py b/api/services/rag_pipeline/rag_pipeline.py index fdaaa73bcc..50dec458a9 100644 --- a/api/services/rag_pipeline/rag_pipeline.py +++ b/api/services/rag_pipeline/rag_pipeline.py @@ -9,7 +9,7 @@ from typing import Any, Union, cast from uuid import uuid4 from flask_login import current_user -from sqlalchemy import func, or_, select +from sqlalchemy import func, select from sqlalchemy.orm import Session, sessionmaker import contexts @@ -37,7 +37,6 @@ from core.rag.entities.event import ( from core.repositories.factory import DifyCoreRepositoryFactory from core.repositories.sqlalchemy_workflow_node_execution_repository import SQLAlchemyWorkflowNodeExecutionRepository from core.variables.variables import Variable -from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import ( WorkflowNodeExecution, WorkflowNodeExecutionStatus, @@ -50,11 +49,12 @@ from core.workflow.node_events.base import NodeRunResult from core.workflow.nodes.base.node import Node from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING from core.workflow.repositories.workflow_node_execution_repository import OrderConfig +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.dataset import ( # type: ignore Dataset, Document, @@ -94,6 +94,7 @@ class RagPipelineService: self._node_execution_service_repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository( session_maker ) + self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) @classmethod def get_pipeline_templates(cls, type: str = "built-in", language: str = "en-US") -> dict: @@ -358,7 +359,7 @@ class RagPipelineService: for node in nodes: if node.get("data", {}).get("type") == "knowledge-index": knowledge_configuration = node.get("data", {}) - knowledge_configuration = KnowledgeConfiguration(**knowledge_configuration) + knowledge_configuration = KnowledgeConfiguration.model_validate(knowledge_configuration) # update dataset dataset = pipeline.retrieve_dataset(session=session) @@ -873,7 +874,7 @@ class RagPipelineService: variable_pool = node_instance.graph_runtime_state.variable_pool invoke_from = variable_pool.get(["sys", SystemVariableKey.INVOKE_FROM]) if invoke_from: - if invoke_from.value == InvokeFrom.PUBLISHED.value: + if invoke_from.value == InvokeFrom.PUBLISHED: document_id = variable_pool.get(["sys", SystemVariableKey.DOCUMENT_ID]) if document_id: document = db.session.query(Document).where(Document.id == document_id.value).first() @@ -1015,48 +1016,21 @@ class RagPipelineService: :param args: request args """ limit = int(args.get("limit", 20)) + last_id = args.get("last_id") - base_query = db.session.query(WorkflowRun).where( - WorkflowRun.tenant_id == pipeline.tenant_id, - WorkflowRun.app_id == pipeline.id, - or_( - WorkflowRun.triggered_from == WorkflowRunTriggeredFrom.RAG_PIPELINE_RUN.value, - WorkflowRun.triggered_from == WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING.value, - ), + triggered_from_values = [ + WorkflowRunTriggeredFrom.RAG_PIPELINE_RUN, + WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING, + ] + + return self._workflow_run_repo.get_paginated_workflow_runs( + tenant_id=pipeline.tenant_id, + app_id=pipeline.id, + triggered_from=triggered_from_values, + limit=limit, + last_id=last_id, ) - if args.get("last_id"): - last_workflow_run = base_query.where( - WorkflowRun.id == args.get("last_id"), - ).first() - - if not last_workflow_run: - raise ValueError("Last workflow run not exists") - - workflow_runs = ( - base_query.where( - WorkflowRun.created_at < last_workflow_run.created_at, WorkflowRun.id != last_workflow_run.id - ) - .order_by(WorkflowRun.created_at.desc()) - .limit(limit) - .all() - ) - else: - workflow_runs = base_query.order_by(WorkflowRun.created_at.desc()).limit(limit).all() - - has_more = False - if len(workflow_runs) == limit: - current_page_first_workflow_run = workflow_runs[-1] - rest_count = base_query.where( - WorkflowRun.created_at < current_page_first_workflow_run.created_at, - WorkflowRun.id != current_page_first_workflow_run.id, - ).count() - - if rest_count > 0: - has_more = True - - return InfiniteScrollPagination(data=workflow_runs, limit=limit, has_more=has_more) - def get_rag_pipeline_workflow_run(self, pipeline: Pipeline, run_id: str) -> WorkflowRun | None: """ Get workflow run detail @@ -1064,18 +1038,12 @@ class RagPipelineService: :param app_model: app model :param run_id: workflow run id """ - workflow_run = ( - db.session.query(WorkflowRun) - .where( - WorkflowRun.tenant_id == pipeline.tenant_id, - WorkflowRun.app_id == pipeline.id, - WorkflowRun.id == run_id, - ) - .first() + return self._workflow_run_repo.get_workflow_run_by_id( + tenant_id=pipeline.tenant_id, + app_id=pipeline.id, + run_id=run_id, ) - return workflow_run - def get_rag_pipeline_workflow_run_node_executions( self, pipeline: Pipeline, diff --git a/api/services/rag_pipeline/rag_pipeline_dsl_service.py b/api/services/rag_pipeline/rag_pipeline_dsl_service.py index f74de1bcab..c02fad4dc6 100644 --- a/api/services/rag_pipeline/rag_pipeline_dsl_service.py +++ b/api/services/rag_pipeline/rag_pipeline_dsl_service.py @@ -288,7 +288,7 @@ class RagPipelineDslService: dataset_id = None for node in nodes: if node.get("data", {}).get("type") == "knowledge-index": - knowledge_configuration = KnowledgeConfiguration(**node.get("data", {})) + knowledge_configuration = KnowledgeConfiguration.model_validate(node.get("data", {})) if ( dataset and pipeline.is_published @@ -426,7 +426,7 @@ class RagPipelineDslService: dataset_id = None for node in nodes: if node.get("data", {}).get("type") == "knowledge-index": - knowledge_configuration = KnowledgeConfiguration(**node.get("data", {})) + knowledge_configuration = KnowledgeConfiguration.model_validate(node.get("data", {})) if not dataset: dataset = Dataset( tenant_id=account.current_tenant_id, @@ -556,7 +556,7 @@ class RagPipelineDslService: graph = workflow_data.get("graph", {}) for node in graph.get("nodes", []): - if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL.value: + if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL: dataset_ids = node["data"].get("dataset_ids", []) node["data"]["dataset_ids"] = [ decrypted_id @@ -613,7 +613,7 @@ class RagPipelineDslService: tenant_id=pipeline.tenant_id, app_id=pipeline.id, features="{}", - type=WorkflowType.RAG_PIPELINE.value, + type=WorkflowType.RAG_PIPELINE, version="draft", graph=json.dumps(graph), created_by=account.id, @@ -689,17 +689,17 @@ class RagPipelineDslService: if not node_data: continue data_type = node_data.get("type", "") - if data_type == NodeType.KNOWLEDGE_RETRIEVAL.value: + if data_type == NodeType.KNOWLEDGE_RETRIEVAL: dataset_ids = node_data.get("dataset_ids", []) node["data"]["dataset_ids"] = [ self.encrypt_dataset_id(dataset_id=dataset_id, tenant_id=pipeline.tenant_id) for dataset_id in dataset_ids ] # filter credential id from tool node - if not include_secret and data_type == NodeType.TOOL.value: + if not include_secret and data_type == NodeType.TOOL: node_data.pop("credential_id", None) # filter credential id from agent node - if not include_secret and data_type == NodeType.AGENT.value: + if not include_secret and data_type == NodeType.AGENT: for tool in node_data.get("agent_parameters", {}).get("tools", {}).get("value", []): tool.pop("credential_id", None) @@ -733,36 +733,36 @@ class RagPipelineDslService: try: typ = node.get("data", {}).get("type") match typ: - case NodeType.TOOL.value: - tool_entity = ToolNodeData(**node["data"]) + case NodeType.TOOL: + tool_entity = ToolNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_tool_dependency(tool_entity.provider_id), ) - case NodeType.DATASOURCE.value: - datasource_entity = DatasourceNodeData(**node["data"]) + case NodeType.DATASOURCE: + datasource_entity = DatasourceNodeData.model_validate(node["data"]) if datasource_entity.provider_type != "local_file": dependencies.append(datasource_entity.plugin_id) - case NodeType.LLM.value: - llm_entity = LLMNodeData(**node["data"]) + case NodeType.LLM: + llm_entity = LLMNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency(llm_entity.model.provider), ) - case NodeType.QUESTION_CLASSIFIER.value: - question_classifier_entity = QuestionClassifierNodeData(**node["data"]) + case NodeType.QUESTION_CLASSIFIER: + question_classifier_entity = QuestionClassifierNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency( question_classifier_entity.model.provider ), ) - case NodeType.PARAMETER_EXTRACTOR.value: - parameter_extractor_entity = ParameterExtractorNodeData(**node["data"]) + case NodeType.PARAMETER_EXTRACTOR: + parameter_extractor_entity = ParameterExtractorNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency( parameter_extractor_entity.model.provider ), ) - case NodeType.KNOWLEDGE_INDEX.value: - knowledge_index_entity = KnowledgeConfiguration(**node["data"]) + case NodeType.KNOWLEDGE_INDEX: + knowledge_index_entity = KnowledgeConfiguration.model_validate(node["data"]) if knowledge_index_entity.indexing_technique == "high_quality": if knowledge_index_entity.embedding_model_provider: dependencies.append( @@ -782,8 +782,8 @@ class RagPipelineDslService: knowledge_index_entity.retrieval_model.reranking_model.reranking_provider_name ), ) - case NodeType.KNOWLEDGE_RETRIEVAL.value: - knowledge_retrieval_entity = KnowledgeRetrievalNodeData(**node["data"]) + case NodeType.KNOWLEDGE_RETRIEVAL: + knowledge_retrieval_entity = KnowledgeRetrievalNodeData.model_validate(node["data"]) if knowledge_retrieval_entity.retrieval_mode == "multiple": if knowledge_retrieval_entity.multiple_retrieval_config: if ( @@ -873,7 +873,7 @@ class RagPipelineDslService: """ Returns the leaked dependencies in current workspace """ - dependencies = [PluginDependency(**dep) for dep in dsl_dependencies] + dependencies = [PluginDependency.model_validate(dep) for dep in dsl_dependencies] if not dependencies: return [] @@ -927,7 +927,7 @@ class RagPipelineDslService: account = cast(Account, current_user) rag_pipeline_import_info: RagPipelineImportInfo = self.import_rag_pipeline( account=account, - import_mode=ImportMode.YAML_CONTENT.value, + import_mode=ImportMode.YAML_CONTENT, yaml_content=rag_pipeline_dataset_create_entity.yaml_content, dataset=None, dataset_name=rag_pipeline_dataset_create_entity.name, diff --git a/api/services/rag_pipeline/rag_pipeline_transform_service.py b/api/services/rag_pipeline/rag_pipeline_transform_service.py index db9508824b..d79ab71668 100644 --- a/api/services/rag_pipeline/rag_pipeline_transform_service.py +++ b/api/services/rag_pipeline/rag_pipeline_transform_service.py @@ -9,6 +9,7 @@ from flask_login import current_user from constants import DOCUMENT_EXTENSIONS from core.plugin.impl.plugin import PluginInstaller +from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from factories import variable_factory from models.dataset import Dataset, Document, DocumentPipelineExecutionLog, Pipeline @@ -149,23 +150,22 @@ class RagPipelineTransformService: file_extensions = node.get("data", {}).get("fileExtensions", []) if not file_extensions: return node - file_extensions = [file_extension.lower() for file_extension in file_extensions] - node["data"]["fileExtensions"] = DOCUMENT_EXTENSIONS + node["data"]["fileExtensions"] = [ext.lower() for ext in file_extensions if ext in DOCUMENT_EXTENSIONS] return node def _deal_knowledge_index( self, dataset: Dataset, doc_form: str, indexing_technique: str | None, retrieval_model: dict, node: dict ): knowledge_configuration_dict = node.get("data", {}) - knowledge_configuration = KnowledgeConfiguration(**knowledge_configuration_dict) + knowledge_configuration = KnowledgeConfiguration.model_validate(knowledge_configuration_dict) if indexing_technique == "high_quality": knowledge_configuration.embedding_model = dataset.embedding_model knowledge_configuration.embedding_model_provider = dataset.embedding_model_provider if retrieval_model: - retrieval_setting = RetrievalSetting(**retrieval_model) + retrieval_setting = RetrievalSetting.model_validate(retrieval_model) if indexing_technique == "economy": - retrieval_setting.search_method = "keyword_search" + retrieval_setting.search_method = RetrievalMethod.KEYWORD_SEARCH knowledge_configuration.retrieval_model = retrieval_setting else: dataset.retrieval_model = knowledge_configuration.retrieval_model.model_dump() @@ -215,7 +215,7 @@ class RagPipelineTransformService: tenant_id=pipeline.tenant_id, app_id=pipeline.id, features="{}", - type=WorkflowType.RAG_PIPELINE.value, + type=WorkflowType.RAG_PIPELINE, version="draft", graph=json.dumps(graph), created_by=current_user.id, @@ -227,7 +227,7 @@ class RagPipelineTransformService: tenant_id=pipeline.tenant_id, app_id=pipeline.id, features="{}", - type=WorkflowType.RAG_PIPELINE.value, + type=WorkflowType.RAG_PIPELINE, version=str(datetime.now(UTC).replace(tzinfo=None)), graph=json.dumps(graph), created_by=current_user.id, diff --git a/api/services/recommend_app/remote/remote_retrieval.py b/api/services/recommend_app/remote/remote_retrieval.py index 2d57769f63..b217c9026a 100644 --- a/api/services/recommend_app/remote/remote_retrieval.py +++ b/api/services/recommend_app/remote/remote_retrieval.py @@ -1,6 +1,6 @@ import logging -import requests +import httpx from configs import dify_config from services.recommend_app.buildin.buildin_retrieval import BuildInRecommendAppRetrieval @@ -43,7 +43,7 @@ class RemoteRecommendAppRetrieval(RecommendAppRetrievalBase): """ domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN url = f"{domain}/apps/{app_id}" - response = requests.get(url, timeout=(3, 10)) + response = httpx.get(url, timeout=httpx.Timeout(10.0, connect=3.0)) if response.status_code != 200: return None data: dict = response.json() @@ -58,7 +58,7 @@ class RemoteRecommendAppRetrieval(RecommendAppRetrievalBase): """ domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN url = f"{domain}/apps?language={language}" - response = requests.get(url, timeout=(3, 10)) + response = httpx.get(url, timeout=httpx.Timeout(10.0, connect=3.0)) if response.status_code != 200: raise ValueError(f"fetch recommended apps failed, status code: {response.status_code}") diff --git a/api/services/saved_message_service.py b/api/services/saved_message_service.py index 67a0106bbd..4dd6c8107b 100644 --- a/api/services/saved_message_service.py +++ b/api/services/saved_message_service.py @@ -2,7 +2,7 @@ from typing import Union from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, EndUser from models.web import SavedMessage from services.message_service import MessageService diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index f86d7e51bf..bb024cc846 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -148,7 +148,7 @@ class ApiToolManageService: description=extra_info.get("description", ""), schema_type_str=schema_type, tools_str=json.dumps(jsonable_encoder(tool_bundles)), - credentials_str={}, + credentials_str="{}", privacy_policy=privacy_policy, custom_disclaimer=custom_disclaimer, ) @@ -277,7 +277,7 @@ class ApiToolManageService: provider.icon = json.dumps(icon) provider.schema = schema provider.description = extra_info.get("description", "") - provider.schema_type_str = ApiProviderSchemaType.OPENAPI.value + provider.schema_type_str = ApiProviderSchemaType.OPENAPI provider.tools_str = json.dumps(jsonable_encoder(tool_bundles)) provider.privacy_policy = privacy_policy provider.custom_disclaimer = custom_disclaimer @@ -393,7 +393,7 @@ class ApiToolManageService: icon="", schema=schema, description="", - schema_type_str=ApiProviderSchemaType.OPENAPI.value, + schema_type_str=ApiProviderSchemaType.OPENAPI, tools_str=json.dumps(jsonable_encoder(tool_bundles)), credentials_str=json.dumps(credentials), ) diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 6b0b6b0f0e..0628c8f22e 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -349,14 +349,10 @@ class BuiltinToolManageService: provider_controller = ToolManager.get_builtin_provider(default_provider.provider, tenant_id) credentials: list[ToolProviderCredentialApiEntity] = [] - encrypters = {} for provider in providers: - credential_type = provider.credential_type - if credential_type not in encrypters: - encrypters[credential_type] = BuiltinToolManageService.create_tool_encrypter( - tenant_id, provider, provider.provider, provider_controller - )[0] - encrypter = encrypters[credential_type] + encrypter, _ = BuiltinToolManageService.create_tool_encrypter( + tenant_id, provider, provider.provider, provider_controller + ) decrypt_credential = encrypter.mask_tool_credentials(encrypter.decrypt(provider.credentials)) credential_entity = ToolTransformService.convert_builtin_provider_to_credential_entity( provider=provider, @@ -548,8 +544,8 @@ class BuiltinToolManageService: try: # handle include, exclude if is_filtered( - include_set=dify_config.POSITION_TOOL_INCLUDES_SET, # type: ignore - exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, # type: ignore + include_set=dify_config.POSITION_TOOL_INCLUDES_SET, + exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, data=provider_controller, name_func=lambda x: x.entity.identity.name, ): @@ -687,7 +683,7 @@ class BuiltinToolManageService: cache=NoOpProviderCredentialCache(), ) original_params = encrypter.decrypt(custom_client_params.oauth_params) - new_params: dict = { + new_params = { key: value if value != HIDDEN_VALUE else original_params.get(key, UNKNOWN_VALUE) for key, value in client_params.items() } diff --git a/api/services/tools/mcp_tools_manage_service.py b/api/services/tools/mcp_tools_manage_service.py index dd626dd615..e219bd4ce9 100644 --- a/api/services/tools/mcp_tools_manage_service.py +++ b/api/services/tools/mcp_tools_manage_service.py @@ -1,86 +1,118 @@ import hashlib import json +import logging from datetime import datetime -from typing import Any, cast +from enum import StrEnum +from typing import Any +from urllib.parse import urlparse -from sqlalchemy import or_ +from pydantic import BaseModel, Field +from sqlalchemy import or_, select from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session +from core.entities.mcp_provider import MCPAuthentication, MCPConfiguration, MCPProviderEntity from core.helper import encrypter from core.helper.provider_cache import NoOpProviderCredentialCache +from core.mcp.auth.auth_flow import auth +from core.mcp.auth_client import MCPClientWithAuthRetry from core.mcp.error import MCPAuthError, MCPError -from core.mcp.mcp_client import MCPClient from core.tools.entities.api_entities import ToolProviderApiEntity -from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ToolProviderType -from core.tools.mcp_tool.provider import MCPToolProviderController from core.tools.utils.encryption import ProviderConfigEncrypter -from extensions.ext_database import db from models.tools import MCPToolProvider from services.tools.tools_transform_service import ToolTransformService +logger = logging.getLogger(__name__) + +# Constants UNCHANGED_SERVER_URL_PLACEHOLDER = "[__HIDDEN__]" +CLIENT_NAME = "Dify" +EMPTY_TOOLS_JSON = "[]" +EMPTY_CREDENTIALS_JSON = "{}" + + +class OAuthDataType(StrEnum): + """Types of OAuth data that can be saved.""" + + TOKENS = "tokens" + CLIENT_INFO = "client_info" + CODE_VERIFIER = "code_verifier" + MIXED = "mixed" + + +class ReconnectResult(BaseModel): + """Result of reconnecting to an MCP provider""" + + authed: bool = Field(description="Whether the provider is authenticated") + tools: str = Field(description="JSON string of tool list") + encrypted_credentials: str = Field(description="JSON string of encrypted credentials") + + +class ServerUrlValidationResult(BaseModel): + """Result of server URL validation check""" + + needs_validation: bool + validation_passed: bool = False + reconnect_result: ReconnectResult | None = None + encrypted_server_url: str | None = None + server_url_hash: str | None = None + + @property + def should_update_server_url(self) -> bool: + """Check if server URL should be updated based on validation result""" + return self.needs_validation and self.validation_passed and self.reconnect_result is not None class MCPToolManageService: - """ - Service class for managing mcp tools. - """ + """Service class for managing MCP tools and providers.""" - @staticmethod - def _encrypt_headers(headers: dict[str, str], tenant_id: str) -> dict[str, str]: + def __init__(self, session: Session): + self._session = session + + # ========== Provider CRUD Operations ========== + + def get_provider( + self, *, provider_id: str | None = None, server_identifier: str | None = None, tenant_id: str + ) -> MCPToolProvider: """ - Encrypt headers using ProviderConfigEncrypter with all headers as SECRET_INPUT. + Get MCP provider by ID or server identifier. Args: - headers: Dictionary of headers to encrypt - tenant_id: Tenant ID for encryption + provider_id: Provider ID (UUID) + server_identifier: Server identifier + tenant_id: Tenant ID Returns: - Dictionary with all headers encrypted + MCPToolProvider instance + + Raises: + ValueError: If provider not found """ - if not headers: - return {} + if server_identifier: + stmt = select(MCPToolProvider).where( + MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == server_identifier + ) + else: + stmt = select(MCPToolProvider).where( + MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.id == provider_id + ) - from core.entities.provider_entities import BasicProviderConfig - from core.helper.provider_cache import NoOpProviderCredentialCache - from core.tools.utils.encryption import create_provider_encrypter - - # Create dynamic config for all headers as SECRET_INPUT - config = [BasicProviderConfig(type=BasicProviderConfig.Type.SECRET_INPUT, name=key) for key in headers] - - encrypter_instance, _ = create_provider_encrypter( - tenant_id=tenant_id, - config=config, - cache=NoOpProviderCredentialCache(), - ) - - return cast(dict[str, str], encrypter_instance.encrypt(headers)) - - @staticmethod - def get_mcp_provider_by_provider_id(provider_id: str, tenant_id: str) -> MCPToolProvider: - res = ( - db.session.query(MCPToolProvider) - .where(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.id == provider_id) - .first() - ) - if not res: + provider = self._session.scalar(stmt) + if not provider: raise ValueError("MCP tool not found") - return res + return provider - @staticmethod - def get_mcp_provider_by_server_identifier(server_identifier: str, tenant_id: str) -> MCPToolProvider: - res = ( - db.session.query(MCPToolProvider) - .where(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == server_identifier) - .first() - ) - if not res: - raise ValueError("MCP tool not found") - return res + def get_provider_entity(self, provider_id: str, tenant_id: str, by_server_id: bool = False) -> MCPProviderEntity: + """Get provider entity by ID or server identifier.""" + if by_server_id: + db_provider = self.get_provider(server_identifier=provider_id, tenant_id=tenant_id) + else: + db_provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) + return db_provider.to_entity() - @staticmethod - def create_mcp_provider( + def create_provider( + self, + *, tenant_id: str, name: str, server_url: str, @@ -89,37 +121,30 @@ class MCPToolManageService: icon_type: str, icon_background: str, server_identifier: str, - timeout: float, - sse_read_timeout: float, + configuration: MCPConfiguration, + authentication: MCPAuthentication | None = None, headers: dict[str, str] | None = None, ) -> ToolProviderApiEntity: - server_url_hash = hashlib.sha256(server_url.encode()).hexdigest() - existing_provider = ( - db.session.query(MCPToolProvider) - .where( - MCPToolProvider.tenant_id == tenant_id, - or_( - MCPToolProvider.name == name, - MCPToolProvider.server_url_hash == server_url_hash, - MCPToolProvider.server_identifier == server_identifier, - ), - ) - .first() - ) - if existing_provider: - if existing_provider.name == name: - raise ValueError(f"MCP tool {name} already exists") - if existing_provider.server_url_hash == server_url_hash: - raise ValueError(f"MCP tool {server_url} already exists") - if existing_provider.server_identifier == server_identifier: - raise ValueError(f"MCP tool {server_identifier} already exists") - encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url) - # Encrypt headers - encrypted_headers = None - if headers: - encrypted_headers_dict = MCPToolManageService._encrypt_headers(headers, tenant_id) - encrypted_headers = json.dumps(encrypted_headers_dict) + """Create a new MCP provider.""" + # Validate URL format + if not self._is_valid_url(server_url): + raise ValueError("Server URL is not valid.") + server_url_hash = hashlib.sha256(server_url.encode()).hexdigest() + + # Check for existing provider + self._check_provider_exists(tenant_id, name, server_url_hash, server_identifier) + + # Encrypt sensitive data + encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url) + encrypted_headers = self._prepare_encrypted_dict(headers, tenant_id) if headers else None + encrypted_credentials = None + if authentication is not None and authentication.client_id: + encrypted_credentials = self._build_and_encrypt_credentials( + authentication.client_id, authentication.client_secret, tenant_id + ) + + # Create provider mcp_tool = MCPToolProvider( tenant_id=tenant_id, name=name, @@ -127,91 +152,23 @@ class MCPToolManageService: server_url_hash=server_url_hash, user_id=user_id, authed=False, - tools="[]", - icon=json.dumps({"content": icon, "background": icon_background}) if icon_type == "emoji" else icon, + tools=EMPTY_TOOLS_JSON, + icon=self._prepare_icon(icon, icon_type, icon_background), server_identifier=server_identifier, - timeout=timeout, - sse_read_timeout=sse_read_timeout, + timeout=configuration.timeout, + sse_read_timeout=configuration.sse_read_timeout, encrypted_headers=encrypted_headers, - ) - db.session.add(mcp_tool) - db.session.commit() - return ToolTransformService.mcp_provider_to_user_provider(mcp_tool, for_list=True) - - @staticmethod - def retrieve_mcp_tools(tenant_id: str, for_list: bool = False) -> list[ToolProviderApiEntity]: - mcp_providers = ( - db.session.query(MCPToolProvider) - .where(MCPToolProvider.tenant_id == tenant_id) - .order_by(MCPToolProvider.name) - .all() - ) - return [ - ToolTransformService.mcp_provider_to_user_provider(mcp_provider, for_list=for_list) - for mcp_provider in mcp_providers - ] - - @classmethod - def list_mcp_tool_from_remote_server(cls, tenant_id: str, provider_id: str) -> ToolProviderApiEntity: - mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) - server_url = mcp_provider.decrypted_server_url - authed = mcp_provider.authed - headers = mcp_provider.decrypted_headers - timeout = mcp_provider.timeout - sse_read_timeout = mcp_provider.sse_read_timeout - - try: - with MCPClient( - server_url, - provider_id, - tenant_id, - authed=authed, - for_list=True, - headers=headers, - timeout=timeout, - sse_read_timeout=sse_read_timeout, - ) as mcp_client: - tools = mcp_client.list_tools() - except MCPAuthError: - raise ValueError("Please auth the tool first") - except MCPError as e: - raise ValueError(f"Failed to connect to MCP server: {e}") - - try: - mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) - mcp_provider.tools = json.dumps([tool.model_dump() for tool in tools]) - mcp_provider.authed = True - mcp_provider.updated_at = datetime.now() - db.session.commit() - except Exception: - db.session.rollback() - raise - - user = mcp_provider.load_user() - return ToolProviderApiEntity( - id=mcp_provider.id, - name=mcp_provider.name, - tools=ToolTransformService.mcp_tool_to_user_tool(mcp_provider, tools), - type=ToolProviderType.MCP, - icon=mcp_provider.icon, - author=user.name if user else "Anonymous", - server_url=mcp_provider.masked_server_url, - updated_at=int(mcp_provider.updated_at.timestamp()), - description=I18nObject(en_US="", zh_Hans=""), - label=I18nObject(en_US=mcp_provider.name, zh_Hans=mcp_provider.name), - plugin_unique_identifier=mcp_provider.server_identifier, + encrypted_credentials=encrypted_credentials, ) - @classmethod - def delete_mcp_tool(cls, tenant_id: str, provider_id: str): - mcp_tool = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) + self._session.add(mcp_tool) + self._session.flush() + mcp_providers = ToolTransformService.mcp_provider_to_user_provider(mcp_tool, for_list=True) + return mcp_providers - db.session.delete(mcp_tool) - db.session.commit() - - @classmethod - def update_mcp_provider( - cls, + def update_provider( + self, + *, tenant_id: str, provider_id: str, name: str, @@ -220,129 +177,546 @@ class MCPToolManageService: icon_type: str, icon_background: str, server_identifier: str, - timeout: float | None = None, - sse_read_timeout: float | None = None, headers: dict[str, str] | None = None, - ): - mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) + configuration: MCPConfiguration, + authentication: MCPAuthentication | None = None, + validation_result: ServerUrlValidationResult | None = None, + ) -> None: + """ + Update an MCP provider. - reconnect_result = None + Args: + validation_result: Pre-validation result from validate_server_url_change. + If provided and contains reconnect_result, it will be used + instead of performing network operations. + """ + mcp_provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) + + # Check for duplicate name (excluding current provider) + if name != mcp_provider.name: + stmt = select(MCPToolProvider).where( + MCPToolProvider.tenant_id == tenant_id, + MCPToolProvider.name == name, + MCPToolProvider.id != provider_id, + ) + existing_provider = self._session.scalar(stmt) + if existing_provider: + raise ValueError(f"MCP tool {name} already exists") + + # Get URL update data from validation result encrypted_server_url = None server_url_hash = None + reconnect_result = None - if UNCHANGED_SERVER_URL_PLACEHOLDER not in server_url: - encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url) - server_url_hash = hashlib.sha256(server_url.encode()).hexdigest() - - if server_url_hash != mcp_provider.server_url_hash: - reconnect_result = cls._re_connect_mcp_provider(server_url, provider_id, tenant_id) + if validation_result and validation_result.encrypted_server_url: + # Use all data from validation result + encrypted_server_url = validation_result.encrypted_server_url + server_url_hash = validation_result.server_url_hash + reconnect_result = validation_result.reconnect_result try: + # Update basic fields mcp_provider.updated_at = datetime.now() mcp_provider.name = name - mcp_provider.icon = ( - json.dumps({"content": icon, "background": icon_background}) if icon_type == "emoji" else icon - ) + mcp_provider.icon = self._prepare_icon(icon, icon_type, icon_background) mcp_provider.server_identifier = server_identifier - if encrypted_server_url is not None and server_url_hash is not None: + # Update server URL if changed + if encrypted_server_url and server_url_hash: mcp_provider.server_url = encrypted_server_url mcp_provider.server_url_hash = server_url_hash if reconnect_result: - mcp_provider.authed = reconnect_result["authed"] - mcp_provider.tools = reconnect_result["tools"] - mcp_provider.encrypted_credentials = reconnect_result["encrypted_credentials"] + mcp_provider.authed = reconnect_result.authed + mcp_provider.tools = reconnect_result.tools + mcp_provider.encrypted_credentials = reconnect_result.encrypted_credentials - if timeout is not None: - mcp_provider.timeout = timeout - if sse_read_timeout is not None: - mcp_provider.sse_read_timeout = sse_read_timeout + # Update optional configuration fields + self._update_optional_fields(mcp_provider, configuration) + + # Update headers if provided if headers is not None: - # Merge masked headers from frontend with existing real values - if headers: - # existing decrypted and masked headers - existing_decrypted = mcp_provider.decrypted_headers - existing_masked = mcp_provider.masked_headers + mcp_provider.encrypted_headers = self._process_headers(headers, mcp_provider, tenant_id) - # Build final headers: if value equals masked existing, keep original decrypted value - final_headers: dict[str, str] = {} - for key, incoming_value in headers.items(): - if ( - key in existing_masked - and key in existing_decrypted - and isinstance(incoming_value, str) - and incoming_value == existing_masked.get(key) - ): - # unchanged, use original decrypted value - final_headers[key] = str(existing_decrypted[key]) - else: - final_headers[key] = incoming_value + # Update credentials if provided + if authentication and authentication.client_id: + mcp_provider.encrypted_credentials = self._process_credentials(authentication, mcp_provider, tenant_id) - encrypted_headers_dict = MCPToolManageService._encrypt_headers(final_headers, tenant_id) - mcp_provider.encrypted_headers = json.dumps(encrypted_headers_dict) - else: - # Explicitly clear headers if empty dict passed - mcp_provider.encrypted_headers = None - db.session.commit() + # Flush changes to database + self._session.flush() except IntegrityError as e: - db.session.rollback() - error_msg = str(e.orig) - if "unique_mcp_provider_name" in error_msg: - raise ValueError(f"MCP tool {name} already exists") - if "unique_mcp_provider_server_url" in error_msg: - raise ValueError(f"MCP tool {server_url} already exists") - if "unique_mcp_provider_server_identifier" in error_msg: - raise ValueError(f"MCP tool {server_identifier} already exists") - raise - except Exception: - db.session.rollback() - raise + self._handle_integrity_error(e, name, server_url, server_identifier) - @classmethod - def update_mcp_provider_credentials( - cls, mcp_provider: MCPToolProvider, credentials: dict[str, Any], authed: bool = False - ): - provider_controller = MCPToolProviderController.from_db(mcp_provider) + def delete_provider(self, *, tenant_id: str, provider_id: str) -> None: + """Delete an MCP provider.""" + mcp_tool = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) + self._session.delete(mcp_tool) + + def list_providers( + self, *, tenant_id: str, for_list: bool = False, include_sensitive: bool = True + ) -> list[ToolProviderApiEntity]: + """List all MCP providers for a tenant. + + Args: + tenant_id: Tenant ID + for_list: If True, return provider ID; if False, return server identifier + include_sensitive: If False, skip expensive decryption operations (default: True for backward compatibility) + """ + from models.account import Account + + stmt = select(MCPToolProvider).where(MCPToolProvider.tenant_id == tenant_id).order_by(MCPToolProvider.name) + mcp_providers = self._session.scalars(stmt).all() + + if not mcp_providers: + return [] + + # Batch query all users to avoid N+1 problem + user_ids = {provider.user_id for provider in mcp_providers} + users = self._session.query(Account).where(Account.id.in_(user_ids)).all() + user_name_map = {user.id: user.name for user in users} + + return [ + ToolTransformService.mcp_provider_to_user_provider( + provider, + for_list=for_list, + user_name=user_name_map.get(provider.user_id), + include_sensitive=include_sensitive, + ) + for provider in mcp_providers + ] + + # ========== Tool Operations ========== + + def list_provider_tools(self, *, tenant_id: str, provider_id: str) -> ToolProviderApiEntity: + """List tools from remote MCP server.""" + # Load provider and convert to entity + db_provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) + provider_entity = db_provider.to_entity() + + # Verify authentication + if not provider_entity.authed: + raise ValueError("Please auth the tool first") + + # Prepare headers with auth token + headers = self._prepare_auth_headers(provider_entity) + + # Retrieve tools from remote server + server_url = provider_entity.decrypt_server_url() + try: + tools = self._retrieve_remote_mcp_tools(server_url, headers, provider_entity) + except MCPError as e: + raise ValueError(f"Failed to connect to MCP server: {e}") + + # Update database with retrieved tools + db_provider.tools = json.dumps([tool.model_dump() for tool in tools]) + db_provider.authed = True + db_provider.updated_at = datetime.now() + self._session.flush() + + # Build API response + return self._build_tool_provider_response(db_provider, provider_entity, tools) + + # ========== OAuth and Credentials Operations ========== + + def update_provider_credentials( + self, *, provider_id: str, tenant_id: str, credentials: dict[str, Any], authed: bool | None = None + ) -> None: + """ + Update provider credentials with encryption. + + Args: + provider_id: Provider ID + tenant_id: Tenant ID + credentials: Credentials to save + authed: Whether provider is authenticated (None means keep current state) + """ + from core.tools.mcp_tool.provider import MCPToolProviderController + + # Get provider from current session + provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) + + # Encrypt new credentials + provider_controller = MCPToolProviderController.from_db(provider) tool_configuration = ProviderConfigEncrypter( - tenant_id=mcp_provider.tenant_id, - config=list(provider_controller.get_credentials_schema()), # ty: ignore [invalid-argument-type] + tenant_id=provider.tenant_id, + config=list(provider_controller.get_credentials_schema()), provider_config_cache=NoOpProviderCredentialCache(), ) - credentials = tool_configuration.encrypt(credentials) - mcp_provider.updated_at = datetime.now() - mcp_provider.encrypted_credentials = json.dumps({**mcp_provider.credentials, **credentials}) - mcp_provider.authed = authed - if not authed: - mcp_provider.tools = "[]" - db.session.commit() + encrypted_credentials = tool_configuration.encrypt(credentials) - @classmethod - def _re_connect_mcp_provider(cls, server_url: str, provider_id: str, tenant_id: str): - # Get the existing provider to access headers and timeout settings - mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) - headers = mcp_provider.decrypted_headers - timeout = mcp_provider.timeout - sse_read_timeout = mcp_provider.sse_read_timeout + # Update provider + provider.updated_at = datetime.now() + provider.encrypted_credentials = json.dumps({**provider.credentials, **encrypted_credentials}) + + if authed is not None: + provider.authed = authed + if not authed: + provider.tools = EMPTY_TOOLS_JSON + + # Flush changes to database + self._session.flush() + + def save_oauth_data( + self, provider_id: str, tenant_id: str, data: dict[str, Any], data_type: OAuthDataType = OAuthDataType.MIXED + ) -> None: + """ + Save OAuth-related data (tokens, client info, code verifier). + + Args: + provider_id: Provider ID + tenant_id: Tenant ID + data: Data to save (tokens, client info, or code verifier) + data_type: Type of OAuth data to save + """ + # Determine if this makes the provider authenticated + authed = ( + data_type == OAuthDataType.TOKENS or (data_type == OAuthDataType.MIXED and "access_token" in data) or None + ) + + # update_provider_credentials will validate provider existence + self.update_provider_credentials(provider_id=provider_id, tenant_id=tenant_id, credentials=data, authed=authed) + + def clear_provider_credentials(self, *, provider_id: str, tenant_id: str) -> None: + """ + Clear all credentials for a provider. + + Args: + provider_id: Provider ID + tenant_id: Tenant ID + """ + # Get provider from current session + provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) + + provider.tools = EMPTY_TOOLS_JSON + provider.encrypted_credentials = EMPTY_CREDENTIALS_JSON + provider.updated_at = datetime.now() + provider.authed = False + + # ========== Private Helper Methods ========== + + def _check_provider_exists(self, tenant_id: str, name: str, server_url_hash: str, server_identifier: str) -> None: + """Check if provider with same attributes already exists.""" + stmt = select(MCPToolProvider).where( + MCPToolProvider.tenant_id == tenant_id, + or_( + MCPToolProvider.name == name, + MCPToolProvider.server_url_hash == server_url_hash, + MCPToolProvider.server_identifier == server_identifier, + ), + ) + existing_provider = self._session.scalar(stmt) + + if existing_provider: + if existing_provider.name == name: + raise ValueError(f"MCP tool {name} already exists") + if existing_provider.server_url_hash == server_url_hash: + raise ValueError("MCP tool with this server URL already exists") + if existing_provider.server_identifier == server_identifier: + raise ValueError(f"MCP tool {server_identifier} already exists") + + def _prepare_icon(self, icon: str, icon_type: str, icon_background: str) -> str: + """Prepare icon data for storage.""" + if icon_type == "emoji": + return json.dumps({"content": icon, "background": icon_background}) + return icon + + def _encrypt_dict_fields(self, data: dict[str, Any], secret_fields: list[str], tenant_id: str) -> dict[str, str]: + """Encrypt specified fields in a dictionary. + + Args: + data: Dictionary containing data to encrypt + secret_fields: List of field names to encrypt + tenant_id: Tenant ID for encryption + + Returns: + JSON string of encrypted data + """ + from core.entities.provider_entities import BasicProviderConfig + from core.tools.utils.encryption import create_provider_encrypter + + # Create config for secret fields + config = [ + BasicProviderConfig(type=BasicProviderConfig.Type.SECRET_INPUT, name=field) for field in secret_fields + ] + + encrypter_instance, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=config, + cache=NoOpProviderCredentialCache(), + ) + + encrypted_data = encrypter_instance.encrypt(data) + return encrypted_data + + def _prepare_encrypted_dict(self, headers: dict[str, str], tenant_id: str) -> str: + """Encrypt headers and prepare for storage.""" + # All headers are treated as secret + return json.dumps(self._encrypt_dict_fields(headers, list(headers.keys()), tenant_id)) + + def _prepare_auth_headers(self, provider_entity: MCPProviderEntity) -> dict[str, str]: + """Prepare headers with OAuth token if available.""" + headers = provider_entity.decrypt_headers() + tokens = provider_entity.retrieve_tokens() + if tokens: + headers["Authorization"] = f"{tokens.token_type.capitalize()} {tokens.access_token}" + return headers + + def _retrieve_remote_mcp_tools( + self, + server_url: str, + headers: dict[str, str], + provider_entity: MCPProviderEntity, + ): + """Retrieve tools from remote MCP server.""" + with MCPClientWithAuthRetry( + server_url=server_url, + headers=headers, + timeout=provider_entity.timeout, + sse_read_timeout=provider_entity.sse_read_timeout, + provider_entity=provider_entity, + ) as mcp_client: + return mcp_client.list_tools() + + def execute_auth_actions(self, auth_result: Any) -> dict[str, str]: + """ + Execute the actions returned by the auth function. + + This method processes the AuthResult and performs the necessary database operations. + + Args: + auth_result: The result from the auth function + + Returns: + The response from the auth result + """ + from core.mcp.entities import AuthAction, AuthActionType + + action: AuthAction + for action in auth_result.actions: + if action.provider_id is None or action.tenant_id is None: + continue + + if action.action_type == AuthActionType.SAVE_CLIENT_INFO: + self.save_oauth_data(action.provider_id, action.tenant_id, action.data, OAuthDataType.CLIENT_INFO) + elif action.action_type == AuthActionType.SAVE_TOKENS: + self.save_oauth_data(action.provider_id, action.tenant_id, action.data, OAuthDataType.TOKENS) + elif action.action_type == AuthActionType.SAVE_CODE_VERIFIER: + self.save_oauth_data(action.provider_id, action.tenant_id, action.data, OAuthDataType.CODE_VERIFIER) + + return auth_result.response + + def auth_with_actions( + self, provider_entity: MCPProviderEntity, authorization_code: str | None = None + ) -> dict[str, str]: + """ + Perform authentication and execute all resulting actions. + + This method is used by MCPClientWithAuthRetry for automatic re-authentication. + + Args: + provider_entity: The MCP provider entity + authorization_code: Optional authorization code + + Returns: + Response dictionary from auth result + """ + auth_result = auth(provider_entity, authorization_code) + return self.execute_auth_actions(auth_result) + + def _reconnect_provider(self, *, server_url: str, provider: MCPToolProvider) -> ReconnectResult: + """Attempt to reconnect to MCP provider with new server URL.""" + provider_entity = provider.to_entity() + headers = provider_entity.headers try: - with MCPClient( - server_url, - provider_id, - tenant_id, - authed=False, - for_list=True, - headers=headers, - timeout=timeout, - sse_read_timeout=sse_read_timeout, - ) as mcp_client: - tools = mcp_client.list_tools() - return { - "authed": True, - "tools": json.dumps([tool.model_dump() for tool in tools]), - "encrypted_credentials": "{}", - } + tools = self._retrieve_remote_mcp_tools(server_url, headers, provider_entity) + return ReconnectResult( + authed=True, + tools=json.dumps([tool.model_dump() for tool in tools]), + encrypted_credentials=EMPTY_CREDENTIALS_JSON, + ) except MCPAuthError: - return {"authed": False, "tools": "[]", "encrypted_credentials": "{}"} + return ReconnectResult(authed=False, tools=EMPTY_TOOLS_JSON, encrypted_credentials=EMPTY_CREDENTIALS_JSON) except MCPError as e: raise ValueError(f"Failed to re-connect MCP server: {e}") from e + + def validate_server_url_change( + self, *, tenant_id: str, provider_id: str, new_server_url: str + ) -> ServerUrlValidationResult: + """ + Validate server URL change by attempting to connect to the new server. + This method should be called BEFORE update_provider to perform network operations + outside of the database transaction. + + Returns: + ServerUrlValidationResult: Validation result with connection status and tools if successful + """ + # Handle hidden/unchanged URL + if UNCHANGED_SERVER_URL_PLACEHOLDER in new_server_url: + return ServerUrlValidationResult(needs_validation=False) + + # Validate URL format + if not self._is_valid_url(new_server_url): + raise ValueError("Server URL is not valid.") + + # Always encrypt and hash the URL + encrypted_server_url = encrypter.encrypt_token(tenant_id, new_server_url) + new_server_url_hash = hashlib.sha256(new_server_url.encode()).hexdigest() + + # Get current provider + provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) + + # Check if URL is actually different + if new_server_url_hash == provider.server_url_hash: + # URL hasn't changed, but still return the encrypted data + return ServerUrlValidationResult( + needs_validation=False, encrypted_server_url=encrypted_server_url, server_url_hash=new_server_url_hash + ) + + # Perform validation by attempting to connect + reconnect_result = self._reconnect_provider(server_url=new_server_url, provider=provider) + return ServerUrlValidationResult( + needs_validation=True, + validation_passed=True, + reconnect_result=reconnect_result, + encrypted_server_url=encrypted_server_url, + server_url_hash=new_server_url_hash, + ) + + def _build_tool_provider_response( + self, db_provider: MCPToolProvider, provider_entity: MCPProviderEntity, tools: list + ) -> ToolProviderApiEntity: + """Build API response for tool provider.""" + user = db_provider.load_user() + response = provider_entity.to_api_response( + user_name=user.name if user else None, + ) + response["tools"] = ToolTransformService.mcp_tool_to_user_tool(db_provider, tools) + response["plugin_unique_identifier"] = provider_entity.provider_id + return ToolProviderApiEntity(**response) + + def _handle_integrity_error( + self, error: IntegrityError, name: str, server_url: str, server_identifier: str + ) -> None: + """Handle database integrity errors with user-friendly messages.""" + error_msg = str(error.orig) + if "unique_mcp_provider_name" in error_msg: + raise ValueError(f"MCP tool {name} already exists") + if "unique_mcp_provider_server_url" in error_msg: + raise ValueError(f"MCP tool {server_url} already exists") + if "unique_mcp_provider_server_identifier" in error_msg: + raise ValueError(f"MCP tool {server_identifier} already exists") + raise + + def _is_valid_url(self, url: str) -> bool: + """Validate URL format.""" + if not url: + return False + try: + parsed = urlparse(url) + return all([parsed.scheme, parsed.netloc]) and parsed.scheme in ["http", "https"] + except (ValueError, TypeError): + return False + + def _update_optional_fields(self, mcp_provider: MCPToolProvider, configuration: MCPConfiguration) -> None: + """Update optional configuration fields using setattr for cleaner code.""" + field_mapping = {"timeout": configuration.timeout, "sse_read_timeout": configuration.sse_read_timeout} + + for field, value in field_mapping.items(): + if value is not None: + setattr(mcp_provider, field, value) + + def _process_headers(self, headers: dict[str, str], mcp_provider: MCPToolProvider, tenant_id: str) -> str | None: + """Process headers update, handling empty dict to clear headers.""" + if not headers: + return None + + # Merge with existing headers to preserve masked values + final_headers = self._merge_headers_with_masked(incoming_headers=headers, mcp_provider=mcp_provider) + return self._prepare_encrypted_dict(final_headers, tenant_id) + + def _process_credentials( + self, authentication: MCPAuthentication, mcp_provider: MCPToolProvider, tenant_id: str + ) -> str: + """Process credentials update, handling masked values.""" + # Merge with existing credentials + final_client_id, final_client_secret = self._merge_credentials_with_masked( + authentication.client_id, authentication.client_secret, mcp_provider + ) + + # Build and encrypt + return self._build_and_encrypt_credentials(final_client_id, final_client_secret, tenant_id) + + def _merge_headers_with_masked( + self, incoming_headers: dict[str, str], mcp_provider: MCPToolProvider + ) -> dict[str, str]: + """Merge incoming headers with existing ones, preserving unchanged masked values. + + Args: + incoming_headers: Headers from frontend (may contain masked values) + mcp_provider: The MCP provider instance + + Returns: + Final headers dict with proper values (original for unchanged masked, new for changed) + """ + mcp_provider_entity = mcp_provider.to_entity() + existing_decrypted = mcp_provider_entity.decrypt_headers() + existing_masked = mcp_provider_entity.masked_headers() + + return { + key: (str(existing_decrypted[key]) if key in existing_masked and value == existing_masked[key] else value) + for key, value in incoming_headers.items() + if key in existing_decrypted or value != existing_masked.get(key) + } + + def _merge_credentials_with_masked( + self, + client_id: str, + client_secret: str | None, + mcp_provider: MCPToolProvider, + ) -> tuple[ + str, + str | None, + ]: + """Merge incoming credentials with existing ones, preserving unchanged masked values. + + Args: + client_id: Client ID from frontend (may be masked) + client_secret: Client secret from frontend (may be masked) + mcp_provider: The MCP provider instance + + Returns: + Tuple of (final_client_id, final_client_secret) + """ + mcp_provider_entity = mcp_provider.to_entity() + existing_decrypted = mcp_provider_entity.decrypt_credentials() + existing_masked = mcp_provider_entity.masked_credentials() + + # Check if client_id is masked and unchanged + final_client_id = client_id + if existing_masked.get("client_id") and client_id == existing_masked["client_id"]: + # Use existing decrypted value + final_client_id = existing_decrypted.get("client_id", client_id) + + # Check if client_secret is masked and unchanged + final_client_secret = client_secret + if existing_masked.get("client_secret") and client_secret == existing_masked["client_secret"]: + # Use existing decrypted value + final_client_secret = existing_decrypted.get("client_secret", client_secret) + + return final_client_id, final_client_secret + + def _build_and_encrypt_credentials(self, client_id: str, client_secret: str | None, tenant_id: str) -> str: + """Build credentials and encrypt sensitive fields.""" + # Create a flat structure with all credential data + credentials_data = { + "client_id": client_id, + "client_name": CLIENT_NAME, + "is_dynamic_registration": False, + } + secret_fields = [] + if client_secret is not None: + credentials_data["encrypted_client_secret"] = client_secret + secret_fields = ["encrypted_client_secret"] + client_info = self._encrypt_dict_fields(credentials_data, secret_fields, tenant_id) + return json.dumps({"client_information": client_info}) diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 6b36ed0eb7..6e95513318 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -3,9 +3,11 @@ import logging from collections.abc import Mapping from typing import Any, Union +from pydantic import ValidationError from yarl import URL from configs import dify_config +from core.entities.mcp_provider import MCPConfiguration from core.helper.provider_cache import ToolProviderCredentialsCache from core.mcp.types import Tool as MCPTool from core.plugin.entities.plugin_daemon import PluginDatasourceProviderEntity @@ -50,16 +52,16 @@ class ToolTransformService: URL(dify_config.CONSOLE_API_URL or "/") / "console" / "api" / "workspaces" / "current" / "tool-provider" ) - if provider_type == ToolProviderType.BUILT_IN.value: + if provider_type == ToolProviderType.BUILT_IN: return str(url_prefix / "builtin" / provider_name / "icon") - elif provider_type in {ToolProviderType.API.value, ToolProviderType.WORKFLOW.value}: + elif provider_type in {ToolProviderType.API, ToolProviderType.WORKFLOW}: try: if isinstance(icon, str): return json.loads(icon) return icon except Exception: return {"background": "#252525", "content": "\ud83d\ude01"} - elif provider_type == ToolProviderType.MCP.value: + elif provider_type == ToolProviderType.MCP: return icon return "" @@ -152,7 +154,8 @@ class ToolTransformService: if decrypt_credentials: credentials = db_provider.credentials - + if not db_provider.tenant_id: + raise ValueError(f"Required tenant_id is missing for BuiltinToolProvider with id {db_provider.id}") # init tool configuration encrypter, _ = create_provider_encrypter( tenant_id=db_provider.tenant_id, @@ -231,40 +234,57 @@ class ToolTransformService: ) @staticmethod - def mcp_provider_to_user_provider(db_provider: MCPToolProvider, for_list: bool = False) -> ToolProviderApiEntity: - user = db_provider.load_user() - return ToolProviderApiEntity( - id=db_provider.server_identifier if not for_list else db_provider.id, - author=user.name if user else "Anonymous", - name=db_provider.name, - icon=db_provider.provider_icon, - type=ToolProviderType.MCP, - is_team_authorization=db_provider.authed, - server_url=db_provider.masked_server_url, - tools=ToolTransformService.mcp_tool_to_user_tool( - db_provider, [MCPTool(**tool) for tool in json.loads(db_provider.tools)] - ), - updated_at=int(db_provider.updated_at.timestamp()), - label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name), - description=I18nObject(en_US="", zh_Hans=""), - server_identifier=db_provider.server_identifier, - timeout=db_provider.timeout, - sse_read_timeout=db_provider.sse_read_timeout, - masked_headers=db_provider.masked_headers, - original_headers=db_provider.decrypted_headers, - ) + def mcp_provider_to_user_provider( + db_provider: MCPToolProvider, + for_list: bool = False, + user_name: str | None = None, + include_sensitive: bool = True, + ) -> ToolProviderApiEntity: + # Use provided user_name to avoid N+1 query, fallback to load_user() if not provided + if user_name is None: + user = db_provider.load_user() + user_name = user.name if user else None + + # Convert to entity and use its API response method + provider_entity = db_provider.to_entity() + + response = provider_entity.to_api_response(user_name=user_name, include_sensitive=include_sensitive) + try: + mcp_tools = [MCPTool(**tool) for tool in json.loads(db_provider.tools)] + except (ValidationError, json.JSONDecodeError): + mcp_tools = [] + # Add additional fields specific to the transform + response["id"] = db_provider.server_identifier if not for_list else db_provider.id + response["tools"] = ToolTransformService.mcp_tool_to_user_tool(db_provider, mcp_tools, user_name=user_name) + response["server_identifier"] = db_provider.server_identifier + + # Convert configuration dict to MCPConfiguration object + if "configuration" in response and isinstance(response["configuration"], dict): + response["configuration"] = MCPConfiguration( + timeout=float(response["configuration"]["timeout"]), + sse_read_timeout=float(response["configuration"]["sse_read_timeout"]), + ) + + return ToolProviderApiEntity(**response) @staticmethod - def mcp_tool_to_user_tool(mcp_provider: MCPToolProvider, tools: list[MCPTool]) -> list[ToolApiEntity]: - user = mcp_provider.load_user() + def mcp_tool_to_user_tool( + mcp_provider: MCPToolProvider, tools: list[MCPTool], user_name: str | None = None + ) -> list[ToolApiEntity]: + # Use provided user_name to avoid N+1 query, fallback to load_user() if not provided + if user_name is None: + user = mcp_provider.load_user() + user_name = user.name if user else "Anonymous" + return [ ToolApiEntity( - author=user.name if user else "Anonymous", + author=user_name or "Anonymous", name=tool.name, label=I18nObject(en_US=tool.name, zh_Hans=tool.name), description=I18nObject(en_US=tool.description or "", zh_Hans=tool.description or ""), parameters=ToolTransformService.convert_mcp_schema_to_parameter(tool.inputSchema), labels=[], + output_schema=tool.outputSchema or {}, ) for tool in tools ] @@ -387,6 +407,7 @@ class ToolTransformService: labels=labels or [], ) else: + assert tool.operation_id return ToolApiEntity( author=tool.author, name=tool.operation_id or "", @@ -410,7 +431,7 @@ class ToolTransformService: ) @staticmethod - def convert_mcp_schema_to_parameter(schema: dict) -> list["ToolParameter"]: + def convert_mcp_schema_to_parameter(schema: dict[str, Any]) -> list["ToolParameter"]: """ Convert MCP JSON schema to tool parameters @@ -419,7 +440,7 @@ class ToolTransformService: """ def create_parameter( - name: str, description: str, param_type: str, required: bool, input_schema: dict | None = None + name: str, description: str, param_type: str, required: bool, input_schema: dict[str, Any] | None = None ) -> ToolParameter: """Create a ToolParameter instance with given attributes""" input_schema_dict: dict[str, Any] = {"input_schema": input_schema} if input_schema else {} @@ -434,7 +455,9 @@ class ToolTransformService: **input_schema_dict, ) - def process_properties(props: dict, required: list, prefix: str = "") -> list[ToolParameter]: + def process_properties( + props: dict[str, dict[str, Any]], required: list[str], prefix: str = "" + ) -> list[ToolParameter]: """Process properties recursively""" TYPE_MAPPING = {"integer": "number", "float": "number"} COMPLEX_TYPES = ["array", "object"] diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 2449536d5c..b1cc963681 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -4,6 +4,7 @@ from datetime import datetime from typing import Any from sqlalchemy import or_, select +from sqlalchemy.orm import Session from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.__base.tool_provider import ToolProviderController @@ -13,6 +14,7 @@ from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurati from core.tools.workflow_as_tool.provider import WorkflowToolProviderController from core.tools.workflow_as_tool.tool import WorkflowTool from extensions.ext_database import db +from libs.uuid_utils import uuidv7 from models.model import App from models.tools import WorkflowToolProvider from models.workflow import Workflow @@ -63,27 +65,27 @@ class WorkflowToolManageService: if workflow is None: raise ValueError(f"Workflow not found for app {workflow_app_id}") - workflow_tool_provider = WorkflowToolProvider( - tenant_id=tenant_id, - user_id=user_id, - app_id=workflow_app_id, - name=name, - label=label, - icon=json.dumps(icon), - description=description, - parameter_configuration=json.dumps(parameters), - privacy_policy=privacy_policy, - version=workflow.version, - ) + with Session(db.engine, expire_on_commit=False) as session, session.begin(): + workflow_tool_provider = WorkflowToolProvider( + id=str(uuidv7()), + tenant_id=tenant_id, + user_id=user_id, + app_id=workflow_app_id, + name=name, + label=label, + icon=json.dumps(icon), + description=description, + parameter_configuration=json.dumps(parameters), + privacy_policy=privacy_policy, + version=workflow.version, + ) + session.add(workflow_tool_provider) try: WorkflowToolProviderController.from_db(workflow_tool_provider) except Exception as e: raise ValueError(str(e)) - db.session.add(workflow_tool_provider) - db.session.commit() - if labels is not None: ToolLabelManager.update_tool_labels( ToolTransformService.workflow_provider_to_controller(workflow_tool_provider), labels @@ -168,7 +170,6 @@ class WorkflowToolManageService: except Exception as e: raise ValueError(str(e)) - db.session.add(workflow_tool_provider) db.session.commit() if labels is not None: diff --git a/api/services/variable_truncator.py b/api/services/variable_truncator.py index d02508e4f3..6eb8d0031d 100644 --- a/api/services/variable_truncator.py +++ b/api/services/variable_truncator.py @@ -1,4 +1,5 @@ import dataclasses +from abc import ABC, abstractmethod from collections.abc import Mapping from typing import Any, Generic, TypeAlias, TypeVar, overload @@ -17,6 +18,7 @@ from core.variables.segments import ( StringSegment, ) from core.variables.utils import dumps_with_segments +from core.workflow.nodes.variable_assigner.common.helpers import UpdatedVariable _MAX_DEPTH = 100 @@ -56,7 +58,7 @@ class UnknownTypeError(Exception): pass -JSONTypes: TypeAlias = int | float | str | list | dict | None | bool +JSONTypes: TypeAlias = int | float | str | list[object] | dict[str, object] | None | bool @dataclasses.dataclass(frozen=True) @@ -65,7 +67,17 @@ class TruncationResult: truncated: bool -class VariableTruncator: +class BaseTruncator(ABC): + @abstractmethod + def truncate(self, segment: Segment) -> TruncationResult: + pass + + @abstractmethod + def truncate_variable_mapping(self, v: Mapping[str, Any]) -> tuple[Mapping[str, Any], bool]: + pass + + +class VariableTruncator(BaseTruncator): """ Handles variable truncation with structure-preserving strategies. @@ -79,7 +91,7 @@ class VariableTruncator: self, string_length_limit=5000, array_element_limit: int = 20, - max_size_bytes: int = 1024_000, # 100KB + max_size_bytes: int = 1024_000, # 1000 KiB ): if string_length_limit <= 3: raise ValueError("string_length_limit should be greater than 3.") @@ -202,6 +214,9 @@ class VariableTruncator: """Recursively calculate JSON size without serialization.""" if isinstance(value, Segment): return VariableTruncator.calculate_json_size(value.value) + if isinstance(value, UpdatedVariable): + # TODO(Workflow): migrate UpdatedVariable serialization upstream and drop this fallback. + return VariableTruncator.calculate_json_size(value.model_dump(), depth=depth + 1) if depth > _MAX_DEPTH: raise MaxDepthExceededError() if isinstance(value, str): @@ -248,14 +263,14 @@ class VariableTruncator: truncated_value = value[:truncated_size] + "..." return _PartResult(truncated_value, self.calculate_json_size(truncated_value), True) - def _truncate_array(self, value: list, target_size: int) -> _PartResult[list]: + def _truncate_array(self, value: list[object], target_size: int) -> _PartResult[list[object]]: """ Truncate array with correct strategy: 1. First limit to 20 items 2. If still too large, truncate individual items """ - truncated_value: list[Any] = [] + truncated_value: list[object] = [] truncated = False used_size = self.calculate_json_size([]) @@ -278,7 +293,11 @@ class VariableTruncator: if used_size > target_size: break - part_result = self._truncate_json_primitives(item, target_size - used_size) + remaining_budget = target_size - used_size + if item is None or isinstance(item, (str, list, dict, bool, int, float, UpdatedVariable)): + part_result = self._truncate_json_primitives(item, remaining_budget) + else: + raise UnknownTypeError(f"got unknown type {type(item)} in array truncation") truncated_value.append(part_result.value) used_size += part_result.value_size truncated = part_result.truncated @@ -365,14 +384,19 @@ class VariableTruncator: return _PartResult(truncated_obj, used_size, truncated) + @overload + def _truncate_json_primitives( + self, val: UpdatedVariable, target_size: int + ) -> _PartResult[Mapping[str, object]]: ... + @overload def _truncate_json_primitives(self, val: str, target_size: int) -> _PartResult[str]: ... @overload - def _truncate_json_primitives(self, val: list, target_size: int) -> _PartResult[list]: ... + def _truncate_json_primitives(self, val: list[object], target_size: int) -> _PartResult[list[object]]: ... @overload - def _truncate_json_primitives(self, val: dict, target_size: int) -> _PartResult[dict]: ... + def _truncate_json_primitives(self, val: dict[str, object], target_size: int) -> _PartResult[dict[str, object]]: ... @overload def _truncate_json_primitives(self, val: bool, target_size: int) -> _PartResult[bool]: ... # type: ignore @@ -387,10 +411,15 @@ class VariableTruncator: def _truncate_json_primitives(self, val: None, target_size: int) -> _PartResult[None]: ... def _truncate_json_primitives( - self, val: str | list | dict | bool | int | float | None, target_size: int + self, + val: UpdatedVariable | str | list[object] | dict[str, object] | bool | int | float | None, + target_size: int, ) -> _PartResult[Any]: """Truncate a value within an object to fit within budget.""" - if isinstance(val, str): + if isinstance(val, UpdatedVariable): + # TODO(Workflow): push UpdatedVariable normalization closer to its producer. + return self._truncate_object(val.model_dump(), target_size) + elif isinstance(val, str): return self._truncate_string(val, target_size) elif isinstance(val, list): return self._truncate_array(val, target_size) @@ -400,3 +429,38 @@ class VariableTruncator: return _PartResult(val, self.calculate_json_size(val), False) else: raise AssertionError("this statement should be unreachable.") + + +class DummyVariableTruncator(BaseTruncator): + """ + A no-op variable truncator that doesn't truncate any data. + + This is used for Service API calls where truncation should be disabled + to maintain backward compatibility and provide complete data. + """ + + def truncate_variable_mapping(self, v: Mapping[str, Any]) -> tuple[Mapping[str, Any], bool]: + """ + Return original mapping without truncation. + + Args: + v: The variable mapping to process + + Returns: + Tuple of (original_mapping, False) where False indicates no truncation occurred + """ + return v, False + + def truncate(self, segment: Segment) -> TruncationResult: + """ + Return original segment without truncation. + + Args: + segment: The segment to process + + Returns: + The original segment unchanged + """ + # For Service API, we want to preserve the original segment + # without any truncation, so just return it as-is + return TruncationResult(result=segment, truncated=False) diff --git a/api/services/vector_service.py b/api/services/vector_service.py index 1c559f2c2b..abc92a0181 100644 --- a/api/services/vector_service.py +++ b/api/services/vector_service.py @@ -134,7 +134,7 @@ class VectorService: ) # use full doc mode to generate segment's child chunk processing_rule_dict = processing_rule.to_dict() - processing_rule_dict["rules"]["parent_mode"] = ParentMode.FULL_DOC.value + processing_rule_dict["rules"]["parent_mode"] = ParentMode.FULL_DOC documents = index_processor.transform( [document], embedding_model_instance=embedding_model_instance, diff --git a/api/services/web_conversation_service.py b/api/services/web_conversation_service.py index 0f54e838f3..560aec2330 100644 --- a/api/services/web_conversation_service.py +++ b/api/services/web_conversation_service.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, EndUser from models.web import PinnedConversation from services.conversation_service import ConversationService diff --git a/api/services/webapp_auth_service.py b/api/services/webapp_auth_service.py index 066dc9d741..9bd797a45f 100644 --- a/api/services/webapp_auth_service.py +++ b/api/services/webapp_auth_service.py @@ -10,7 +10,7 @@ from extensions.ext_database import db from libs.helper import TokenManager from libs.passport import PassportService from libs.password import compare_password -from models.account import Account, AccountStatus +from models import Account, AccountStatus from models.model import App, EndUser, Site from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -36,7 +36,7 @@ class WebAppAuthService: if not account: raise AccountNotFoundError() - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise AccountLoginError("Account is banned.") if account.password is None or not compare_password(password, account.password, account.password_salt): @@ -56,7 +56,7 @@ class WebAppAuthService: if not account: return None - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise Unauthorized("Account is banned.") return account @@ -172,7 +172,8 @@ class WebAppAuthService: return WebAppAuthType.EXTERNAL if app_code: - webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_code(app_code) + app_id = AppService.get_app_id_by_code(app_code) + webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=app_id) return cls.get_app_auth_type(access_mode=webapp_settings.access_mode) raise ValueError("Could not determine app authentication type.") diff --git a/api/services/website_service.py b/api/services/website_service.py index 37588d6ba5..a23f01ec71 100644 --- a/api/services/website_service.py +++ b/api/services/website_service.py @@ -23,6 +23,7 @@ class CrawlOptions: only_main_content: bool = False includes: str | None = None excludes: str | None = None + prompt: str | None = None max_depth: int | None = None use_sitemap: bool = True @@ -70,6 +71,7 @@ class WebsiteCrawlApiRequest: only_main_content=self.options.get("only_main_content", False), includes=self.options.get("includes"), excludes=self.options.get("excludes"), + prompt=self.options.get("prompt"), max_depth=self.options.get("max_depth"), use_sitemap=self.options.get("use_sitemap", True), ) @@ -174,6 +176,7 @@ class WebsiteService: def _crawl_with_firecrawl(cls, request: CrawlRequest, api_key: str, config: dict) -> dict[str, Any]: firecrawl_app = FirecrawlApp(api_key=api_key, base_url=config.get("base_url")) + params: dict[str, Any] if not request.options.crawl_sub_pages: params = { "includePaths": [], @@ -188,8 +191,10 @@ class WebsiteService: "limit": request.options.limit, "scrapeOptions": {"onlyMainContent": request.options.only_main_content}, } - if request.options.max_depth: - params["maxDepth"] = request.options.max_depth + + # Add optional prompt for Firecrawl v2 crawl-params compatibility + if request.options.prompt: + params["prompt"] = request.options.prompt job_id = firecrawl_app.crawl_url(request.url, params) website_crawl_time_cache_key = f"website_crawl_{job_id}" diff --git a/api/services/workflow/workflow_converter.py b/api/services/workflow/workflow_converter.py index dccd891981..e70b2b5c95 100644 --- a/api/services/workflow/workflow_converter.py +++ b/api/services/workflow/workflow_converter.py @@ -22,7 +22,7 @@ from core.prompt.utils.prompt_template_parser import PromptTemplateParser from core.workflow.nodes import NodeType from events.app_event import app_was_created from extensions.ext_database import db -from models.account import Account +from models import Account from models.api_based_extension import APIBasedExtension, APIBasedExtensionPoint from models.model import App, AppMode, AppModelConfig from models.workflow import Workflow, WorkflowType @@ -79,7 +79,6 @@ class WorkflowConverter: new_app.updated_by = account.id db.session.add(new_app) db.session.flush() - db.session.commit() workflow.app_id = new_app.id db.session.commit() @@ -229,7 +228,7 @@ class WorkflowConverter: "position": None, "data": { "title": "START", - "type": NodeType.START.value, + "type": NodeType.START, "variables": [jsonable_encoder(v) for v in variables], }, } @@ -274,7 +273,7 @@ class WorkflowConverter: inputs[v.variable] = "{{#start." + v.variable + "#}}" request_body = { - "point": APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY.value, + "point": APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY, "params": { "app_id": app_model.id, "tool_variable": tool_variable, @@ -291,7 +290,7 @@ class WorkflowConverter: "position": None, "data": { "title": f"HTTP REQUEST {api_based_extension.name}", - "type": NodeType.HTTP_REQUEST.value, + "type": NodeType.HTTP_REQUEST, "method": "post", "url": api_based_extension.api_endpoint, "authorization": {"type": "api-key", "config": {"type": "bearer", "api_key": api_key}}, @@ -309,7 +308,7 @@ class WorkflowConverter: "position": None, "data": { "title": f"Parse {api_based_extension.name} Response", - "type": NodeType.CODE.value, + "type": NodeType.CODE, "variables": [{"variable": "response_json", "value_selector": [http_request_node["id"], "body"]}], "code_language": "python3", "code": "import json\n\ndef main(response_json: str) -> str:\n response_body = json.loads(" @@ -349,7 +348,7 @@ class WorkflowConverter: "position": None, "data": { "title": "KNOWLEDGE RETRIEVAL", - "type": NodeType.KNOWLEDGE_RETRIEVAL.value, + "type": NodeType.KNOWLEDGE_RETRIEVAL, "query_variable_selector": query_variable_selector, "dataset_ids": dataset_config.dataset_ids, "retrieval_mode": retrieve_config.retrieve_strategy.value, @@ -397,16 +396,16 @@ class WorkflowConverter: :param external_data_variable_node_mapping: external data variable node mapping """ # fetch start and knowledge retrieval node - start_node = next(filter(lambda n: n["data"]["type"] == NodeType.START.value, graph["nodes"])) + start_node = next(filter(lambda n: n["data"]["type"] == NodeType.START, graph["nodes"])) knowledge_retrieval_node = next( - filter(lambda n: n["data"]["type"] == NodeType.KNOWLEDGE_RETRIEVAL.value, graph["nodes"]), None + filter(lambda n: n["data"]["type"] == NodeType.KNOWLEDGE_RETRIEVAL, graph["nodes"]), None ) role_prefix = None prompts: Any | None = None # Chat Model - if model_config.mode == LLMMode.CHAT.value: + if model_config.mode == LLMMode.CHAT: if prompt_template.prompt_type == PromptTemplateEntity.PromptType.SIMPLE: if not prompt_template.simple_prompt_template: raise ValueError("Simple prompt template is required") @@ -518,7 +517,7 @@ class WorkflowConverter: "position": None, "data": { "title": "LLM", - "type": NodeType.LLM.value, + "type": NodeType.LLM, "model": { "provider": model_config.provider, "name": model_config.model, @@ -573,7 +572,7 @@ class WorkflowConverter: "position": None, "data": { "title": "END", - "type": NodeType.END.value, + "type": NodeType.END, "outputs": [{"variable": "result", "value_selector": ["llm", "text"]}], }, } @@ -587,7 +586,7 @@ class WorkflowConverter: return { "id": "answer", "position": None, - "data": {"title": "ANSWER", "type": NodeType.ANSWER.value, "answer": "{{#llm.text#}}"}, + "data": {"title": "ANSWER", "type": NodeType.ANSWER, "answer": "{{#llm.text#}}"}, } def _create_edge(self, source: str, target: str): diff --git a/api/services/workflow_app_service.py b/api/services/workflow_app_service.py index ced6dca324..23dd436675 100644 --- a/api/services/workflow_app_service.py +++ b/api/services/workflow_app_service.py @@ -86,12 +86,16 @@ class WorkflowAppService: ), ) if created_by_account: + account = session.scalar(select(Account).where(Account.email == created_by_account)) + if not account: + raise ValueError(f"Account not found: {created_by_account}") + stmt = stmt.join( Account, and_( WorkflowAppLog.created_by == Account.id, WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT, - Account.email == created_by_account, + Account.id == account.id, ), ) diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py index 1378c20128..5e63a83bb1 100644 --- a/api/services/workflow_draft_variable_service.py +++ b/api/services/workflow_draft_variable_service.py @@ -32,8 +32,7 @@ from factories.file_factory import StorageKeyLoader from factories.variable_factory import build_segment, segment_to_variable from libs.datetime_utils import naive_utc_now from libs.uuid_utils import uuidv7 -from models import App, Conversation -from models.account import Account +from models import Account, App, Conversation from models.enums import DraftVariableType from models.workflow import Workflow, WorkflowDraftVariable, WorkflowDraftVariableFile, is_system_variable_editable from repositories.factory import DifyAPIRepositoryFactory @@ -569,7 +568,7 @@ class WorkflowDraftVariableService: system_instruction="", system_instruction_tokens=0, status="normal", - invoke_from=InvokeFrom.DEBUGGER.value, + invoke_from=InvokeFrom.DEBUGGER, from_source="console", from_end_user_id=None, from_account_id=account_id, diff --git a/api/services/workflow_run_service.py b/api/services/workflow_run_service.py index 79d91cab4c..5c8719b499 100644 --- a/api/services/workflow_run_service.py +++ b/api/services/workflow_run_service.py @@ -26,13 +26,15 @@ class WorkflowRunService: ) self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) - def get_paginate_advanced_chat_workflow_runs(self, app_model: App, args: dict) -> InfiniteScrollPagination: + def get_paginate_advanced_chat_workflow_runs( + self, app_model: App, args: dict, triggered_from: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.DEBUGGING + ) -> InfiniteScrollPagination: """ Get advanced chat app workflow run list - Only return triggered_from == advanced_chat :param app_model: app model :param args: request args + :param triggered_from: workflow run triggered from (default: DEBUGGING for preview runs) """ class WorkflowWithMessage: @@ -45,7 +47,7 @@ class WorkflowRunService: def __getattr__(self, item): return getattr(self._workflow_run, item) - pagination = self.get_paginate_workflow_runs(app_model, args) + pagination = self.get_paginate_workflow_runs(app_model, args, triggered_from) with_message_workflow_runs = [] for workflow_run in pagination.data: @@ -60,23 +62,27 @@ class WorkflowRunService: pagination.data = with_message_workflow_runs return pagination - def get_paginate_workflow_runs(self, app_model: App, args: dict) -> InfiniteScrollPagination: + def get_paginate_workflow_runs( + self, app_model: App, args: dict, triggered_from: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.DEBUGGING + ) -> InfiniteScrollPagination: """ - Get debug workflow run list - Only return triggered_from == debugging + Get workflow run list :param app_model: app model :param args: request args + :param triggered_from: workflow run triggered from (default: DEBUGGING) """ limit = int(args.get("limit", 20)) last_id = args.get("last_id") + status = args.get("status") return self._workflow_run_repo.get_paginated_workflow_runs( tenant_id=app_model.tenant_id, app_id=app_model.id, - triggered_from=WorkflowRunTriggeredFrom.DEBUGGING.value, + triggered_from=triggered_from, limit=limit, last_id=last_id, + status=status, ) def get_workflow_run(self, app_model: App, run_id: str) -> WorkflowRun | None: @@ -92,6 +98,30 @@ class WorkflowRunService: run_id=run_id, ) + def get_workflow_runs_count( + self, + app_model: App, + status: str | None = None, + time_range: str | None = None, + triggered_from: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.DEBUGGING, + ) -> dict[str, int]: + """ + Get workflow runs count statistics + + :param app_model: app model + :param status: optional status filter + :param time_range: optional time range filter (e.g., "7d", "4h", "30m", "30s") + :param triggered_from: workflow run triggered from (default: DEBUGGING) + :return: dict with total and status counts + """ + return self._workflow_run_repo.get_workflow_runs_count( + tenant_id=app_model.tenant_id, + app_id=app_model.id, + triggered_from=triggered_from, + status=status, + time_range=time_range, + ) + def get_workflow_run_node_executions( self, app_model: App, diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 359fdb85fd..2f69e46074 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -14,7 +14,7 @@ from core.file import File from core.repositories import DifyCoreRepositoryFactory from core.variables import Variable from core.variables.variables import VariableUnion -from core.workflow.entities import VariablePool, WorkflowNodeExecution +from core.workflow.entities import WorkflowNodeExecution from core.workflow.enums import ErrorStrategy, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent, NodeRunSucceededEvent @@ -23,6 +23,7 @@ from core.workflow.nodes import NodeType from core.workflow.nodes.base.node import Node from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING from core.workflow.nodes.start.entities import StartNodeData +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.workflow_entry import WorkflowEntry from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated @@ -30,7 +31,7 @@ from extensions.ext_database import db from extensions.ext_storage import storage from factories.file_factory import build_from_mapping, build_from_mappings from libs.datetime_utils import naive_utc_now -from models.account import Account +from models import Account from models.model import App, AppMode from models.tools import WorkflowToolProvider from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowType @@ -1006,7 +1007,7 @@ def _setup_variable_pool( ) # Only add chatflow-specific variables for non-workflow types - if workflow.type != WorkflowType.WORKFLOW.value: + if workflow.type != WorkflowType.WORKFLOW: system_variable.query = query system_variable.conversation_id = conversation_id system_variable.dialogue_count = 1 diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index 951b9e5653..bd95af2614 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -8,7 +8,6 @@ import click import pandas as pd from celery import shared_task from sqlalchemy import func -from sqlalchemy.orm import Session from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType @@ -50,54 +49,48 @@ def batch_create_segment_to_index_task( indexing_cache_key = f"segment_batch_import_{job_id}" try: - with Session(db.engine) as session: - dataset = session.get(Dataset, dataset_id) - if not dataset: - raise ValueError("Dataset not exist.") + dataset = db.session.get(Dataset, dataset_id) + if not dataset: + raise ValueError("Dataset not exist.") - dataset_document = session.get(Document, document_id) - if not dataset_document: - raise ValueError("Document not exist.") + dataset_document = db.session.get(Document, document_id) + if not dataset_document: + raise ValueError("Document not exist.") - if ( - not dataset_document.enabled - or dataset_document.archived - or dataset_document.indexing_status != "completed" - ): - raise ValueError("Document is not available.") + if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": + raise ValueError("Document is not available.") - upload_file = session.get(UploadFile, upload_file_id) - if not upload_file: - raise ValueError("UploadFile not found.") + upload_file = db.session.get(UploadFile, upload_file_id) + if not upload_file: + raise ValueError("UploadFile not found.") - with tempfile.TemporaryDirectory() as temp_dir: - suffix = Path(upload_file.key).suffix - # FIXME mypy: Cannot determine type of 'tempfile._get_candidate_names' better not use it here - file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore - storage.download(upload_file.key, file_path) + with tempfile.TemporaryDirectory() as temp_dir: + suffix = Path(upload_file.key).suffix + file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore + storage.download(upload_file.key, file_path) - # Skip the first row - df = pd.read_csv(file_path) - content = [] - for _, row in df.iterrows(): - if dataset_document.doc_form == "qa_model": - data = {"content": row.iloc[0], "answer": row.iloc[1]} - else: - data = {"content": row.iloc[0]} - content.append(data) - if len(content) == 0: - raise ValueError("The CSV file is empty.") + df = pd.read_csv(file_path) + content = [] + for _, row in df.iterrows(): + if dataset_document.doc_form == "qa_model": + data = {"content": row.iloc[0], "answer": row.iloc[1]} + else: + data = {"content": row.iloc[0]} + content.append(data) + if len(content) == 0: + raise ValueError("The CSV file is empty.") + + document_segments = [] + embedding_model = None + if dataset.indexing_technique == "high_quality": + model_manager = ModelManager() + embedding_model = model_manager.get_model_instance( + tenant_id=dataset.tenant_id, + provider=dataset.embedding_model_provider, + model_type=ModelType.TEXT_EMBEDDING, + model=dataset.embedding_model, + ) - document_segments = [] - embedding_model = None - if dataset.indexing_technique == "high_quality": - model_manager = ModelManager() - embedding_model = model_manager.get_model_instance( - tenant_id=dataset.tenant_id, - provider=dataset.embedding_model_provider, - model_type=ModelType.TEXT_EMBEDDING, - model=dataset.embedding_model, - ) word_count_change = 0 if embedding_model: tokens_list = embedding_model.get_text_embedding_num_tokens( @@ -105,10 +98,11 @@ def batch_create_segment_to_index_task( ) else: tokens_list = [0] * len(content) + for segment, tokens in zip(content, tokens_list): content = segment["content"] doc_id = str(uuid.uuid4()) - segment_hash = helper.generate_text_hash(content) # type: ignore + segment_hash = helper.generate_text_hash(content) max_position = ( db.session.query(func.max(DocumentSegment.position)) .where(DocumentSegment.document_id == dataset_document.id) @@ -135,11 +129,11 @@ def batch_create_segment_to_index_task( word_count_change += segment_document.word_count db.session.add(segment_document) document_segments.append(segment_document) - # update document word count + assert dataset_document.word_count is not None dataset_document.word_count += word_count_change db.session.add(dataset_document) - # add index to db + VectorService.create_segments_vector(None, document_segments, dataset, dataset_document.doc_form) db.session.commit() redis_client.setex(indexing_cache_key, 600, "completed") diff --git a/api/tasks/delete_account_task.py b/api/tasks/delete_account_task.py index 611aef86ad..fb5eb1d691 100644 --- a/api/tasks/delete_account_task.py +++ b/api/tasks/delete_account_task.py @@ -3,7 +3,7 @@ import logging from celery import shared_task from extensions.ext_database import db -from models.account import Account +from models import Account from services.billing_service import BillingService from tasks.mail_account_deletion_task import send_deletion_success_task diff --git a/api/tasks/ops_trace_task.py b/api/tasks/ops_trace_task.py index 7b254ac3b5..72e3b42ca7 100644 --- a/api/tasks/ops_trace_task.py +++ b/api/tasks/ops_trace_task.py @@ -36,7 +36,7 @@ def process_trace_tasks(file_info): if trace_info.get("workflow_data"): trace_info["workflow_data"] = WorkflowRun.from_dict(data=trace_info["workflow_data"]) if trace_info.get("documents"): - trace_info["documents"] = [Document(**doc) for doc in trace_info["documents"]] + trace_info["documents"] = [Document.model_validate(doc) for doc in trace_info["documents"]] try: if trace_instance: diff --git a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py index bae8f1c4db..124971e8e2 100644 --- a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py +++ b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py @@ -1,5 +1,5 @@ +import json import operator -import traceback import typing import click @@ -9,38 +9,106 @@ from core.helper import marketplace from core.helper.marketplace import MarketplacePluginDeclaration from core.plugin.entities.plugin import PluginInstallationSource from core.plugin.impl.plugin import PluginInstaller +from extensions.ext_redis import redis_client from models.account import TenantPluginAutoUpgradeStrategy RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3 +CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:" +CACHE_REDIS_TTL = 60 * 15 # 15 minutes -cached_plugin_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {} +def _get_redis_cache_key(plugin_id: str) -> str: + """Generate Redis cache key for plugin manifest.""" + return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}" + + +def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]: + """ + Get cached plugin manifest from Redis. + Returns: + - MarketplacePluginDeclaration: if found in cache + - None: if cached as not found (marketplace returned no result) + - False: if not in cache at all + """ + try: + key = _get_redis_cache_key(plugin_id) + cached_data = redis_client.get(key) + if cached_data is None: + return False + + cached_json = json.loads(cached_data) + if cached_json is None: + return None + + return MarketplacePluginDeclaration.model_validate(cached_json) + except Exception: + return False + + +def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None: + """ + Cache plugin manifest in Redis. + Args: + plugin_id: The plugin ID + manifest: The manifest to cache, or None if not found in marketplace + """ + try: + key = _get_redis_cache_key(plugin_id) + if manifest is None: + # Cache the fact that this plugin was not found + redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None)) + else: + # Cache the manifest data + redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json()) + except Exception: + # If Redis fails, continue without caching + # traceback.print_exc() + pass def marketplace_batch_fetch_plugin_manifests( plugin_ids_plain_list: list[str], ) -> list[MarketplacePluginDeclaration]: - global cached_plugin_manifests - # return marketplace.batch_fetch_plugin_manifests(plugin_ids_plain_list) - not_included_plugin_ids = [ - plugin_id for plugin_id in plugin_ids_plain_list if plugin_id not in cached_plugin_manifests - ] - if not_included_plugin_ids: - manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_included_plugin_ids) + """Fetch plugin manifests with Redis caching support.""" + cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {} + not_cached_plugin_ids: list[str] = [] + + # Check Redis cache for each plugin + for plugin_id in plugin_ids_plain_list: + cached_result = _get_cached_manifest(plugin_id) + if cached_result is False: + # Not in cache, need to fetch + not_cached_plugin_ids.append(plugin_id) + else: + # Either found manifest or cached as None (not found in marketplace) + # At this point, cached_result is either MarketplacePluginDeclaration or None + if isinstance(cached_result, bool): + # This should never happen due to the if condition above, but for type safety + continue + cached_manifests[plugin_id] = cached_result + + # Fetch uncached plugins from marketplace + if not_cached_plugin_ids: + manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids) + + # Cache the fetched manifests for manifest in manifests: - cached_plugin_manifests[manifest.plugin_id] = manifest + cached_manifests[manifest.plugin_id] = manifest + _set_cached_manifest(manifest.plugin_id, manifest) - if ( - len(manifests) == 0 - ): # this indicates that the plugin not found in marketplace, should set None in cache to prevent future check - for plugin_id in not_included_plugin_ids: - cached_plugin_manifests[plugin_id] = None + # Cache plugins that were not found in marketplace + fetched_plugin_ids = {manifest.plugin_id for manifest in manifests} + for plugin_id in not_cached_plugin_ids: + if plugin_id not in fetched_plugin_ids: + cached_manifests[plugin_id] = None + _set_cached_manifest(plugin_id, None) + # Build result list from cached manifests result: list[MarketplacePluginDeclaration] = [] for plugin_id in plugin_ids_plain_list: - final_manifest = cached_plugin_manifests.get(plugin_id) - if final_manifest is not None: - result.append(final_manifest) + cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id) + if cached_manifest is not None: + result.append(cached_manifest) return result @@ -157,10 +225,10 @@ def process_tenant_plugin_autoupgrade_check_task( ) except Exception as e: click.echo(click.style(f"Error when upgrading plugin: {e}", fg="red")) - traceback.print_exc() + # traceback.print_exc() break except Exception as e: click.echo(click.style(f"Error when checking upgradable plugin: {e}", fg="red")) - traceback.print_exc() + # traceback.print_exc() return diff --git a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py index 028f635188..6de95a3b85 100644 --- a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py @@ -16,7 +16,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom, RagPipelineGenerat from core.app.entities.rag_pipeline_invoke_entities import RagPipelineInvokeEntity from core.repositories.factory import DifyCoreRepositoryFactory from extensions.ext_database import db -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Pipeline from models.enums import WorkflowRunTriggeredFrom from models.workflow import Workflow, WorkflowNodeExecutionTriggeredFrom @@ -29,23 +29,10 @@ def priority_rag_pipeline_run_task( tenant_id: str, ): """ - Async Run rag pipeline - :param rag_pipeline_invoke_entities: Rag pipeline invoke entities - rag_pipeline_invoke_entities include: - :param pipeline_id: Pipeline ID - :param user_id: User ID - :param tenant_id: Tenant ID - :param workflow_id: Workflow ID - :param invoke_from: Invoke source (debugger, published, etc.) - :param streaming: Whether to stream results - :param datasource_type: Type of datasource - :param datasource_info: Datasource information dict - :param batch: Batch identifier - :param document_id: Document ID (optional) - :param start_node_id: Starting node ID - :param inputs: Input parameters dict - :param workflow_execution_id: Workflow execution ID - :param workflow_thread_pool_id: Thread pool ID for workflow execution + Async Run rag pipeline task using high priority queue. + + :param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities + :param tenant_id: Tenant ID for the pipeline execution """ # run with threading, thread pool size is 10 @@ -92,7 +79,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], # Create Flask application context for this thread with flask_app.app_context(): try: - rag_pipeline_invoke_entity_model = RagPipelineInvokeEntity(**rag_pipeline_invoke_entity) + rag_pipeline_invoke_entity_model = RagPipelineInvokeEntity.model_validate(rag_pipeline_invoke_entity) user_id = rag_pipeline_invoke_entity_model.user_id tenant_id = rag_pipeline_invoke_entity_model.tenant_id pipeline_id = rag_pipeline_invoke_entity_model.pipeline_id @@ -125,7 +112,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], workflow_execution_id = str(uuid.uuid4()) # Create application generate entity from dict - entity = RagPipelineGenerateEntity(**application_generate_entity) + entity = RagPipelineGenerateEntity.model_validate(application_generate_entity) # Create workflow repositories session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) diff --git a/api/tasks/rag_pipeline/rag_pipeline_run_task.py b/api/tasks/rag_pipeline/rag_pipeline_run_task.py index ee904c4649..f4a092d97e 100644 --- a/api/tasks/rag_pipeline/rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/rag_pipeline_run_task.py @@ -17,7 +17,7 @@ from core.app.entities.rag_pipeline_invoke_entities import RagPipelineInvokeEnti from core.repositories.factory import DifyCoreRepositoryFactory from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Pipeline from models.enums import WorkflowRunTriggeredFrom from models.workflow import Workflow, WorkflowNodeExecutionTriggeredFrom @@ -30,23 +30,10 @@ def rag_pipeline_run_task( tenant_id: str, ): """ - Async Run rag pipeline - :param rag_pipeline_invoke_entities: Rag pipeline invoke entities - rag_pipeline_invoke_entities include: - :param pipeline_id: Pipeline ID - :param user_id: User ID - :param tenant_id: Tenant ID - :param workflow_id: Workflow ID - :param invoke_from: Invoke source (debugger, published, etc.) - :param streaming: Whether to stream results - :param datasource_type: Type of datasource - :param datasource_info: Datasource information dict - :param batch: Batch identifier - :param document_id: Document ID (optional) - :param start_node_id: Starting node ID - :param inputs: Input parameters dict - :param workflow_execution_id: Workflow execution ID - :param workflow_thread_pool_id: Thread pool ID for workflow execution + Async Run rag pipeline task using regular priority queue. + + :param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities + :param tenant_id: Tenant ID for the pipeline execution """ # run with threading, thread pool size is 10 @@ -113,7 +100,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], # Create Flask application context for this thread with flask_app.app_context(): try: - rag_pipeline_invoke_entity_model = RagPipelineInvokeEntity(**rag_pipeline_invoke_entity) + rag_pipeline_invoke_entity_model = RagPipelineInvokeEntity.model_validate(rag_pipeline_invoke_entity) user_id = rag_pipeline_invoke_entity_model.user_id tenant_id = rag_pipeline_invoke_entity_model.tenant_id pipeline_id = rag_pipeline_invoke_entity_model.pipeline_id @@ -146,7 +133,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], workflow_execution_id = str(uuid.uuid4()) # Create application generate entity from dict - entity = RagPipelineGenerateEntity(**application_generate_entity) + entity = RagPipelineGenerateEntity.model_validate(application_generate_entity) # Create workflow repositories session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) diff --git a/api/tasks/retry_document_indexing_task.py b/api/tasks/retry_document_indexing_task.py index 9c12696824..9d208647e6 100644 --- a/api/tasks/retry_document_indexing_task.py +++ b/api/tasks/retry_document_indexing_task.py @@ -10,7 +10,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Dataset, Document, DocumentSegment from services.feature_service import FeatureService from services.rag_pipeline.rag_pipeline import RagPipelineService diff --git a/api/tasks/workflow_draft_var_tasks.py b/api/tasks/workflow_draft_var_tasks.py index 457d46a9d8..fcb98ec39e 100644 --- a/api/tasks/workflow_draft_var_tasks.py +++ b/api/tasks/workflow_draft_var_tasks.py @@ -5,15 +5,10 @@ These tasks provide asynchronous storage capabilities for workflow execution dat improving performance by offloading storage operations to background workers. """ -import logging - from celery import shared_task # type: ignore[import-untyped] from sqlalchemy.orm import Session from extensions.ext_database import db - -_logger = logging.getLogger(__name__) - from services.workflow_draft_variable_service import DraftVarFileDeletion, WorkflowDraftVariableService diff --git a/api/templates/without-brand/change_mail_confirm_new_template_en-US.html b/api/templates/without-brand/change_mail_confirm_new_template_en-US.html index 69a8978f42..861b1bcdb6 100644 --- a/api/templates/without-brand/change_mail_confirm_new_template_en-US.html +++ b/api/templates/without-brand/change_mail_confirm_new_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

Confirm Your New Email Address

-

You’re updating the email address linked to your Dify account.

+

You're updating the email address linked to your account.

To confirm this action, please use the verification code below.

This code will only be valid for the next 5 minutes:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html b/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html index e3e9e7c45a..e411680e89 100644 --- a/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html +++ b/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

确认您的邮箱地址变更

-

您正在更新与您的 Dify 账户关联的邮箱地址。

+

您正在更新与您的账户关联的邮箱地址。

为了确认此操作,请使用以下验证码。

此验证码仅在接下来的5分钟内有效:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_old_template_en-US.html b/api/templates/without-brand/change_mail_confirm_old_template_en-US.html index 9d79fa7ff9..9fe52255a5 100644 --- a/api/templates/without-brand/change_mail_confirm_old_template_en-US.html +++ b/api/templates/without-brand/change_mail_confirm_old_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

Verify Your Request to Change Email

-

We received a request to change the email address associated with your Dify account.

+

We received a request to change the email address associated with your account.

To confirm this action, please use the verification code below.

This code will only be valid for the next 5 minutes:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html b/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html index 41f0839190..98cbd2f0c6 100644 --- a/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html +++ b/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

验证您的邮箱变更请求

-

我们收到了一个变更您 Dify 账户关联邮箱地址的请求。

+

我们收到了一个变更您账户关联邮箱地址的请求。

此验证码仅在接下来的5分钟内有效:

@@ -117,5 +120,4 @@
- - + \ No newline at end of file diff --git a/api/templates/without-brand/invite_member_mail_template_en-US.html b/api/templates/without-brand/invite_member_mail_template_en-US.html index fc7f3679ba..f9157284fa 100644 --- a/api/templates/without-brand/invite_member_mail_template_en-US.html +++ b/api/templates/without-brand/invite_member_mail_template_en-US.html @@ -1,5 +1,6 @@ + +
-
- - Dify Logo -
+

Dear {{ to }},

-

{{ inviter_name }} is pleased to invite you to join our workspace on {{application_title}}, a platform specifically designed for LLM application development. On {{application_title}}, you can explore, create, and collaborate to build and operate AI applications.

+

{{ inviter_name }} is pleased to invite you to join our workspace on {{application_title}}, a + platform specifically designed for LLM application development. On {{application_title}}, you can explore, + create, and collaborate to build and operate AI applications.

Click the button below to log in to {{application_title}} and join the workspace.

-

Login Here

+

Login Here

Best regards,

{{application_title}} Team

- + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html index a5758a2184..659c285324 100644 --- a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html +++ b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -80,10 +82,9 @@

You have been assigned as the new owner of the workspace "{{WorkspaceName}}".

As the new owner, you now have full administrative privileges for this workspace.

-

If you have any questions, please contact support@dify.ai.

+

If you have any questions, please contact support.

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html index 53bab92552..f710dbb289 100644 --- a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html +++ b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -80,10 +82,9 @@

您已被分配为工作空间“{{WorkspaceName}}”的新所有者。

作为新所有者,您现在对该工作空间拥有完全的管理权限。

-

如果您有任何问题,请联系support@dify.ai。

+

如果您有任何问题,请联系支持团队。

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html index 3e7faeb01e..149ec77aea 100644 --- a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html +++ b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -97,7 +99,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -108,12 +111,14 @@

Workspace ownership has been transferred

-

You have successfully transferred ownership of the workspace "{{WorkspaceName}}" to {{NewOwnerEmail}}.

-

You no longer have owner privileges for this workspace. Your access level has been changed to Admin.

-

If you did not initiate this transfer or have concerns about this change, please contact support@dify.ai immediately.

+

You have successfully transferred ownership of the workspace "{{WorkspaceName}}" to + {{NewOwnerEmail}}.

+

You no longer have owner privileges for this workspace. Your access level has been changed to + Admin.

+

If you did not initiate this transfer or have concerns about this change, please contact + support immediately.

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html index 31e3c23140..d7aed40068 100644 --- a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html +++ b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -97,7 +99,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -110,10 +113,9 @@

您已成功将工作空间“{{WorkspaceName}}”的所有权转移给{{NewOwnerEmail}}。

您不再拥有此工作空间的拥有者权限。您的访问级别已更改为管理员。

-

如果您没有发起此转移或对此变更有任何疑问,请立即联系support@dify.ai。

+

如果您没有发起此转移或对此变更有任何疑问,请立即联系支持团队。

- - + \ No newline at end of file diff --git a/api/tests/fixtures/workflow/iteration_flatten_output_disabled_workflow.yml b/api/tests/fixtures/workflow/iteration_flatten_output_disabled_workflow.yml new file mode 100644 index 0000000000..9cae6385c8 --- /dev/null +++ b/api/tests/fixtures/workflow/iteration_flatten_output_disabled_workflow.yml @@ -0,0 +1,258 @@ +app: + description: 'This workflow tests the iteration node with flatten_output=False. + + + It processes [1, 2, 3], outputs [item, item*2] for each iteration. + + + With flatten_output=False, it should output nested arrays: + + + ``` + + {"output": [[1, 2], [2, 4], [3, 6]]} + + ```' + icon: 🤖 + icon_background: '#FFEAD5' + mode: workflow + name: test_iteration_flatten_disabled + use_icon_as_answer_icon: false +dependencies: [] +kind: app +version: 0.3.1 +workflow: + conversation_variables: [] + environment_variables: [] + features: + file_upload: + enabled: false + opening_statement: '' + retriever_resource: + enabled: true + sensitive_word_avoidance: + enabled: false + speech_to_text: + enabled: false + suggested_questions: [] + suggested_questions_after_answer: + enabled: false + text_to_speech: + enabled: false + graph: + edges: + - data: + isInIteration: false + isInLoop: false + sourceType: start + targetType: code + id: start-source-code-target + source: start_node + sourceHandle: source + target: code_node + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: code + targetType: iteration + id: code-source-iteration-target + source: code_node + sourceHandle: source + target: iteration_node + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: true + isInLoop: false + iteration_id: iteration_node + sourceType: iteration-start + targetType: code + id: iteration-start-source-code-inner-target + source: iteration_nodestart + sourceHandle: source + target: code_inner_node + targetHandle: target + type: custom + zIndex: 1002 + - data: + isInIteration: false + isInLoop: false + sourceType: iteration + targetType: end + id: iteration-source-end-target + source: iteration_node + sourceHandle: source + target: end_node + targetHandle: target + type: custom + zIndex: 0 + nodes: + - data: + desc: '' + selected: false + title: Start + type: start + variables: [] + height: 54 + id: start_node + position: + x: 80 + y: 282 + positionAbsolute: + x: 80 + y: 282 + sourcePosition: right + targetPosition: left + type: custom + width: 244 + - data: + code: "\ndef main() -> dict:\n return {\n \"result\": [1, 2, 3],\n\ + \ }\n" + code_language: python3 + desc: '' + outputs: + result: + children: null + type: array[number] + selected: false + title: Generate Array + type: code + variables: [] + height: 54 + id: code_node + position: + x: 384 + y: 282 + positionAbsolute: + x: 384 + y: 282 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 244 + - data: + desc: '' + error_handle_mode: terminated + flatten_output: false + height: 178 + is_parallel: false + iterator_input_type: array[number] + iterator_selector: + - code_node + - result + output_selector: + - code_inner_node + - result + output_type: array[array[number]] + parallel_nums: 10 + selected: false + start_node_id: iteration_nodestart + title: Iteration with Flatten Disabled + type: iteration + width: 388 + height: 178 + id: iteration_node + position: + x: 684 + y: 282 + positionAbsolute: + x: 684 + y: 282 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 388 + zIndex: 1 + - data: + desc: '' + isInIteration: true + selected: false + title: '' + type: iteration-start + draggable: false + height: 48 + id: iteration_nodestart + parentId: iteration_node + position: + x: 24 + y: 68 + positionAbsolute: + x: 708 + y: 350 + selectable: false + sourcePosition: right + targetPosition: left + type: custom-iteration-start + width: 44 + zIndex: 1002 + - data: + code: "\ndef main(arg1: int) -> dict:\n return {\n \"result\": [arg1,\ + \ arg1 * 2],\n }\n" + code_language: python3 + desc: '' + isInIteration: true + isInLoop: false + iteration_id: iteration_node + outputs: + result: + children: null + type: array[number] + selected: false + title: Generate Pair + type: code + variables: + - value_selector: + - iteration_node + - item + value_type: number + variable: arg1 + height: 54 + id: code_inner_node + parentId: iteration_node + position: + x: 128 + y: 68 + positionAbsolute: + x: 812 + y: 350 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 244 + zIndex: 1002 + - data: + desc: '' + outputs: + - value_selector: + - iteration_node + - output + value_type: array[array[number]] + variable: output + selected: false + title: End + type: end + height: 90 + id: end_node + position: + x: 1132 + y: 282 + positionAbsolute: + x: 1132 + y: 282 + selected: true + sourcePosition: right + targetPosition: left + type: custom + width: 244 + viewport: + x: -476 + y: 3 + zoom: 1 + diff --git a/api/tests/fixtures/workflow/iteration_flatten_output_enabled_workflow.yml b/api/tests/fixtures/workflow/iteration_flatten_output_enabled_workflow.yml new file mode 100644 index 0000000000..0fc76df768 --- /dev/null +++ b/api/tests/fixtures/workflow/iteration_flatten_output_enabled_workflow.yml @@ -0,0 +1,258 @@ +app: + description: 'This workflow tests the iteration node with flatten_output=True. + + + It processes [1, 2, 3], outputs [item, item*2] for each iteration. + + + With flatten_output=True (default), it should output: + + + ``` + + {"output": [1, 2, 2, 4, 3, 6]} + + ```' + icon: 🤖 + icon_background: '#FFEAD5' + mode: workflow + name: test_iteration_flatten_enabled + use_icon_as_answer_icon: false +dependencies: [] +kind: app +version: 0.3.1 +workflow: + conversation_variables: [] + environment_variables: [] + features: + file_upload: + enabled: false + opening_statement: '' + retriever_resource: + enabled: true + sensitive_word_avoidance: + enabled: false + speech_to_text: + enabled: false + suggested_questions: [] + suggested_questions_after_answer: + enabled: false + text_to_speech: + enabled: false + graph: + edges: + - data: + isInIteration: false + isInLoop: false + sourceType: start + targetType: code + id: start-source-code-target + source: start_node + sourceHandle: source + target: code_node + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: code + targetType: iteration + id: code-source-iteration-target + source: code_node + sourceHandle: source + target: iteration_node + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: true + isInLoop: false + iteration_id: iteration_node + sourceType: iteration-start + targetType: code + id: iteration-start-source-code-inner-target + source: iteration_nodestart + sourceHandle: source + target: code_inner_node + targetHandle: target + type: custom + zIndex: 1002 + - data: + isInIteration: false + isInLoop: false + sourceType: iteration + targetType: end + id: iteration-source-end-target + source: iteration_node + sourceHandle: source + target: end_node + targetHandle: target + type: custom + zIndex: 0 + nodes: + - data: + desc: '' + selected: false + title: Start + type: start + variables: [] + height: 54 + id: start_node + position: + x: 80 + y: 282 + positionAbsolute: + x: 80 + y: 282 + sourcePosition: right + targetPosition: left + type: custom + width: 244 + - data: + code: "\ndef main() -> dict:\n return {\n \"result\": [1, 2, 3],\n\ + \ }\n" + code_language: python3 + desc: '' + outputs: + result: + children: null + type: array[number] + selected: false + title: Generate Array + type: code + variables: [] + height: 54 + id: code_node + position: + x: 384 + y: 282 + positionAbsolute: + x: 384 + y: 282 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 244 + - data: + desc: '' + error_handle_mode: terminated + flatten_output: true + height: 178 + is_parallel: false + iterator_input_type: array[number] + iterator_selector: + - code_node + - result + output_selector: + - code_inner_node + - result + output_type: array[array[number]] + parallel_nums: 10 + selected: false + start_node_id: iteration_nodestart + title: Iteration with Flatten Enabled + type: iteration + width: 388 + height: 178 + id: iteration_node + position: + x: 684 + y: 282 + positionAbsolute: + x: 684 + y: 282 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 388 + zIndex: 1 + - data: + desc: '' + isInIteration: true + selected: false + title: '' + type: iteration-start + draggable: false + height: 48 + id: iteration_nodestart + parentId: iteration_node + position: + x: 24 + y: 68 + positionAbsolute: + x: 708 + y: 350 + selectable: false + sourcePosition: right + targetPosition: left + type: custom-iteration-start + width: 44 + zIndex: 1002 + - data: + code: "\ndef main(arg1: int) -> dict:\n return {\n \"result\": [arg1,\ + \ arg1 * 2],\n }\n" + code_language: python3 + desc: '' + isInIteration: true + isInLoop: false + iteration_id: iteration_node + outputs: + result: + children: null + type: array[number] + selected: false + title: Generate Pair + type: code + variables: + - value_selector: + - iteration_node + - item + value_type: number + variable: arg1 + height: 54 + id: code_inner_node + parentId: iteration_node + position: + x: 128 + y: 68 + positionAbsolute: + x: 812 + y: 350 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 244 + zIndex: 1002 + - data: + desc: '' + outputs: + - value_selector: + - iteration_node + - output + value_type: array[number] + variable: output + selected: false + title: End + type: end + height: 90 + id: end_node + position: + x: 1132 + y: 282 + positionAbsolute: + x: 1132 + y: 282 + selected: true + sourcePosition: right + targetPosition: left + type: custom + width: 244 + viewport: + x: -476 + y: 3 + zoom: 1 + diff --git a/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml b/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml new file mode 100644 index 0000000000..ffc6eb9120 --- /dev/null +++ b/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml @@ -0,0 +1,316 @@ +app: + description: 'This chatflow receives a sys.query, writes it into the `answer` variable, + and then outputs the `answer` variable. + + + `answer` is a conversation variable with a blank default value; it will be updated + in an iteration node. + + + if this chatflow works correctly, it will output the `sys.query` as the same.' + icon: 🤖 + icon_background: '#FFEAD5' + mode: advanced-chat + name: update-conversation-variable-in-iteration + use_icon_as_answer_icon: false +dependencies: [] +kind: app +version: 0.4.0 +workflow: + conversation_variables: + - description: '' + id: c30af82d-b2ec-417d-a861-4dd78584faa4 + name: answer + selector: + - conversation + - answer + value: '' + value_type: string + environment_variables: [] + features: + file_upload: + allowed_file_extensions: + - .JPG + - .JPEG + - .PNG + - .GIF + - .WEBP + - .SVG + allowed_file_types: + - image + allowed_file_upload_methods: + - local_file + - remote_url + enabled: false + fileUploadConfig: + audio_file_size_limit: 50 + batch_count_limit: 5 + file_size_limit: 15 + image_file_size_limit: 10 + video_file_size_limit: 100 + workflow_file_upload_limit: 10 + image: + enabled: false + number_limits: 3 + transfer_methods: + - local_file + - remote_url + number_limits: 3 + opening_statement: '' + retriever_resource: + enabled: true + sensitive_word_avoidance: + enabled: false + speech_to_text: + enabled: false + suggested_questions: [] + suggested_questions_after_answer: + enabled: false + text_to_speech: + enabled: false + language: '' + voice: '' + graph: + edges: + - data: + isInIteration: false + isInLoop: false + sourceType: start + targetType: code + id: 1759032354471-source-1759032363865-target + source: '1759032354471' + sourceHandle: source + target: '1759032363865' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: code + targetType: iteration + id: 1759032363865-source-1759032379989-target + source: '1759032363865' + sourceHandle: source + target: '1759032379989' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + sourceType: iteration-start + targetType: assigner + id: 1759032379989start-source-1759032394460-target + source: 1759032379989start + sourceHandle: source + target: '1759032394460' + targetHandle: target + type: custom + zIndex: 1002 + - data: + isInIteration: false + isInLoop: false + sourceType: iteration + targetType: answer + id: 1759032379989-source-1759032410331-target + source: '1759032379989' + sourceHandle: source + target: '1759032410331' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + sourceType: assigner + targetType: code + id: 1759032394460-source-1759032476318-target + source: '1759032394460' + sourceHandle: source + target: '1759032476318' + targetHandle: target + type: custom + zIndex: 1002 + nodes: + - data: + selected: false + title: Start + type: start + variables: [] + height: 52 + id: '1759032354471' + position: + x: 30 + y: 302 + positionAbsolute: + x: 30 + y: 302 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + code: "\ndef main():\n return {\n \"result\": [1],\n }\n" + code_language: python3 + outputs: + result: + children: null + type: array[number] + selected: false + title: Code + type: code + variables: [] + height: 52 + id: '1759032363865' + position: + x: 332 + y: 302 + positionAbsolute: + x: 332 + y: 302 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + error_handle_mode: terminated + height: 204 + is_parallel: false + iterator_input_type: array[number] + iterator_selector: + - '1759032363865' + - result + output_selector: + - '1759032476318' + - result + output_type: array[string] + parallel_nums: 10 + selected: false + start_node_id: 1759032379989start + title: Iteration + type: iteration + width: 808 + height: 204 + id: '1759032379989' + position: + x: 634 + y: 302 + positionAbsolute: + x: 634 + y: 302 + selected: true + sourcePosition: right + targetPosition: left + type: custom + width: 808 + zIndex: 1 + - data: + desc: '' + isInIteration: true + selected: false + title: '' + type: iteration-start + draggable: false + height: 48 + id: 1759032379989start + parentId: '1759032379989' + position: + x: 60 + y: 78 + positionAbsolute: + x: 694 + y: 380 + selectable: false + sourcePosition: right + targetPosition: left + type: custom-iteration-start + width: 44 + zIndex: 1002 + - data: + isInIteration: true + isInLoop: false + items: + - input_type: variable + operation: over-write + value: + - sys + - query + variable_selector: + - conversation + - answer + write_mode: over-write + iteration_id: '1759032379989' + selected: false + title: Variable Assigner + type: assigner + version: '2' + height: 84 + id: '1759032394460' + parentId: '1759032379989' + position: + x: 204 + y: 60 + positionAbsolute: + x: 838 + y: 362 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + zIndex: 1002 + - data: + answer: '{{#conversation.answer#}}' + selected: false + title: Answer + type: answer + variables: [] + height: 104 + id: '1759032410331' + position: + x: 1502 + y: 302 + positionAbsolute: + x: 1502 + y: 302 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + code: "\ndef main():\n return {\n \"result\": '',\n }\n" + code_language: python3 + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + outputs: + result: + children: null + type: string + selected: false + title: Code 2 + type: code + variables: [] + height: 52 + id: '1759032476318' + parentId: '1759032379989' + position: + x: 506 + y: 76 + positionAbsolute: + x: 1140 + y: 378 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + zIndex: 1002 + viewport: + x: 120.39999999999998 + y: 85.20000000000005 + zoom: 0.7 + rag_pipeline_variables: [] diff --git a/api/tests/integration_tests/conftest.py b/api/tests/integration_tests/conftest.py index 9dc7b76e04..4395a9815a 100644 --- a/api/tests/integration_tests/conftest.py +++ b/api/tests/integration_tests/conftest.py @@ -58,6 +58,7 @@ def setup_account(request) -> Generator[Account, None, None]: name=name, password=secrets.token_hex(16), ip_address="localhost", + language="en-US", ) with _CACHED_APP.test_request_context(): diff --git a/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py b/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py index c8d353ad0a..498ac56d5d 100644 --- a/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py +++ b/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py @@ -11,8 +11,8 @@ from controllers.console.app import completion as completion_api from controllers.console.app import message as message_api from controllers.console.app import wraps from libs.datetime_utils import naive_utc_now -from models import Account, App, Tenant -from models.account import TenantAccountRole +from models import App, Tenant +from models.account import Account, TenantAccountJoin, TenantAccountRole from models.model import AppMode from services.app_generate_service import AppGenerateService @@ -25,29 +25,42 @@ class TestChatMessageApiPermissions: """Create a mock App model for testing.""" app = App() app.id = str(uuid.uuid4()) - app.mode = AppMode.CHAT.value + app.mode = AppMode.CHAT app.tenant_id = str(uuid.uuid4()) app.status = "normal" return app @pytest.fixture - def mock_account(self): + def mock_account(self, monkeypatch: pytest.MonkeyPatch): """Create a mock Account for testing.""" - account = Account() - account.id = str(uuid.uuid4()) - account.name = "Test User" - account.email = "test@example.com" + account = Account( + name="Test User", + email="test@example.com", + ) account.last_active_at = naive_utc_now() account.created_at = naive_utc_now() account.updated_at = naive_utc_now() + account.id = str(uuid.uuid4()) # Create mock tenant - tenant = Tenant() + tenant = Tenant(name="Test Tenant") tenant.id = str(uuid.uuid4()) - tenant.name = "Test Tenant" - account._current_tenant = tenant + mock_session_instance = mock.Mock() + + mock_tenant_join = TenantAccountJoin(role=TenantAccountRole.OWNER) + monkeypatch.setattr(mock_session_instance, "scalar", mock.Mock(return_value=mock_tenant_join)) + + mock_scalars_result = mock.Mock() + mock_scalars_result.one.return_value = tenant + monkeypatch.setattr(mock_session_instance, "scalars", mock.Mock(return_value=mock_scalars_result)) + + mock_session_context = mock.Mock() + mock_session_context.__enter__.return_value = mock_session_instance + monkeypatch.setattr("models.account.Session", lambda _, expire_on_commit: mock_session_context) + + account.current_tenant = tenant return account @pytest.mark.parametrize( diff --git a/api/tests/integration_tests/controllers/console/app/test_description_validation.py b/api/tests/integration_tests/controllers/console/app/test_description_validation.py index 2d0ceac760..8160807e48 100644 --- a/api/tests/integration_tests/controllers/console/app/test_description_validation.py +++ b/api/tests/integration_tests/controllers/console/app/test_description_validation.py @@ -18,124 +18,87 @@ class TestAppDescriptionValidationUnit: """Unit tests for description validation function""" def test_validate_description_length_function(self): - """Test the _validate_description_length function directly""" - from controllers.console.app.app import _validate_description_length + """Test the validate_description_length function directly""" + from libs.validators import validate_description_length # Test valid descriptions - assert _validate_description_length("") == "" - assert _validate_description_length("x" * 400) == "x" * 400 - assert _validate_description_length(None) is None + assert validate_description_length("") == "" + assert validate_description_length("x" * 400) == "x" * 400 + assert validate_description_length(None) is None # Test invalid descriptions with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 401) + validate_description_length("x" * 401) assert "Description cannot exceed 400 characters." in str(exc_info.value) with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 500) + validate_description_length("x" * 500) assert "Description cannot exceed 400 characters." in str(exc_info.value) with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 1000) + validate_description_length("x" * 1000) assert "Description cannot exceed 400 characters." in str(exc_info.value) - def test_validation_consistency_with_dataset(self): - """Test that App and Dataset validation functions are consistent""" - from controllers.console.app.app import _validate_description_length as app_validate - from controllers.console.datasets.datasets import _validate_description_length as dataset_validate - from controllers.service_api.dataset.dataset import _validate_description_length as service_dataset_validate - - # Test same valid inputs - valid_desc = "x" * 400 - assert app_validate(valid_desc) == dataset_validate(valid_desc) == service_dataset_validate(valid_desc) - assert app_validate("") == dataset_validate("") == service_dataset_validate("") - assert app_validate(None) == dataset_validate(None) == service_dataset_validate(None) - - # Test same invalid inputs produce same error - invalid_desc = "x" * 401 - - app_error = None - dataset_error = None - service_dataset_error = None - - try: - app_validate(invalid_desc) - except ValueError as e: - app_error = str(e) - - try: - dataset_validate(invalid_desc) - except ValueError as e: - dataset_error = str(e) - - try: - service_dataset_validate(invalid_desc) - except ValueError as e: - service_dataset_error = str(e) - - assert app_error == dataset_error == service_dataset_error - assert app_error == "Description cannot exceed 400 characters." - def test_boundary_values(self): """Test boundary values for description validation""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test exact boundary exactly_400 = "x" * 400 - assert _validate_description_length(exactly_400) == exactly_400 + assert validate_description_length(exactly_400) == exactly_400 # Test just over boundary just_over_400 = "x" * 401 with pytest.raises(ValueError): - _validate_description_length(just_over_400) + validate_description_length(just_over_400) # Test just under boundary just_under_400 = "x" * 399 - assert _validate_description_length(just_under_400) == just_under_400 + assert validate_description_length(just_under_400) == just_under_400 def test_edge_cases(self): """Test edge cases for description validation""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test None input - assert _validate_description_length(None) is None + assert validate_description_length(None) is None # Test empty string - assert _validate_description_length("") == "" + assert validate_description_length("") == "" # Test single character - assert _validate_description_length("a") == "a" + assert validate_description_length("a") == "a" # Test unicode characters unicode_desc = "测试" * 200 # 400 characters in Chinese - assert _validate_description_length(unicode_desc) == unicode_desc + assert validate_description_length(unicode_desc) == unicode_desc # Test unicode over limit unicode_over = "测试" * 201 # 402 characters with pytest.raises(ValueError): - _validate_description_length(unicode_over) + validate_description_length(unicode_over) def test_whitespace_handling(self): """Test how validation handles whitespace""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test description with spaces spaces_400 = " " * 400 - assert _validate_description_length(spaces_400) == spaces_400 + assert validate_description_length(spaces_400) == spaces_400 # Test description with spaces over limit spaces_401 = " " * 401 with pytest.raises(ValueError): - _validate_description_length(spaces_401) + validate_description_length(spaces_401) # Test mixed content mixed_400 = "a" * 200 + " " * 200 - assert _validate_description_length(mixed_400) == mixed_400 + assert validate_description_length(mixed_400) == mixed_400 # Test mixed over limit mixed_401 = "a" * 200 + " " * 201 with pytest.raises(ValueError): - _validate_description_length(mixed_401) + validate_description_length(mixed_401) if __name__ == "__main__": diff --git a/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py b/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py index ca4d452963..04945e57a0 100644 --- a/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py +++ b/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py @@ -9,8 +9,8 @@ from flask.testing import FlaskClient from controllers.console.app import model_config as model_config_api from controllers.console.app import wraps from libs.datetime_utils import naive_utc_now -from models import Account, App, Tenant -from models.account import TenantAccountRole +from models import App, Tenant +from models.account import Account, TenantAccountJoin, TenantAccountRole from models.model import AppMode from services.app_model_config_service import AppModelConfigService @@ -23,30 +23,40 @@ class TestModelConfigResourcePermissions: """Create a mock App model for testing.""" app = App() app.id = str(uuid.uuid4()) - app.mode = AppMode.CHAT.value + app.mode = AppMode.CHAT app.tenant_id = str(uuid.uuid4()) app.status = "normal" app.app_model_config_id = str(uuid.uuid4()) return app @pytest.fixture - def mock_account(self): + def mock_account(self, monkeypatch: pytest.MonkeyPatch): """Create a mock Account for testing.""" - account = Account() + account = Account(name="Test User", email="test@example.com") account.id = str(uuid.uuid4()) - account.name = "Test User" - account.email = "test@example.com" account.last_active_at = naive_utc_now() account.created_at = naive_utc_now() account.updated_at = naive_utc_now() # Create mock tenant - tenant = Tenant() + tenant = Tenant(name="Test Tenant") tenant.id = str(uuid.uuid4()) - tenant.name = "Test Tenant" - account._current_tenant = tenant + mock_session_instance = mock.Mock() + + mock_tenant_join = TenantAccountJoin(role=TenantAccountRole.OWNER) + monkeypatch.setattr(mock_session_instance, "scalar", mock.Mock(return_value=mock_tenant_join)) + + mock_scalars_result = mock.Mock() + mock_scalars_result.one.return_value = tenant + monkeypatch.setattr(mock_session_instance, "scalars", mock.Mock(return_value=mock_scalars_result)) + + mock_session_context = mock.Mock() + mock_session_context.__enter__.return_value = mock_session_instance + monkeypatch.setattr("models.account.Session", lambda _, expire_on_commit: mock_session_context) + + account.current_tenant = tenant return account @pytest.mark.parametrize( diff --git a/api/tests/integration_tests/services/test_workflow_draft_variable_service.py b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py index aeee882750..f3a5ba0d11 100644 --- a/api/tests/integration_tests/services/test_workflow_draft_variable_service.py +++ b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py @@ -542,7 +542,7 @@ class TestWorkflowDraftVariableServiceResetVariable(unittest.TestCase): index=1, node_execution_id=str(uuid.uuid4()), node_id=self._node_id, - node_type=NodeType.LLM.value, + node_type=NodeType.LLM, title="Test Node", inputs='{"input": "test input"}', process_data='{"test_var": "process_value", "other_var": "other_process"}', diff --git a/api/tests/integration_tests/tools/api_tool/test_api_tool.py b/api/tests/integration_tests/tools/api_tool/test_api_tool.py index 7c1a200c8f..e637530265 100644 --- a/api/tests/integration_tests/tools/api_tool/test_api_tool.py +++ b/api/tests/integration_tests/tools/api_tool/test_api_tool.py @@ -36,7 +36,7 @@ def test_api_tool(setup_http_mock): entity=ToolEntity( identity=ToolIdentity(provider="", author="", name="", label=I18nObject(en_US="test tool")), ), - api_bundle=ApiToolBundle(**tool_bundle), + api_bundle=ApiToolBundle.model_validate(tool_bundle), runtime=ToolRuntime(tenant_id="", credentials={"auth_type": "none"}), provider_id="test_tool", ) diff --git a/api/tests/integration_tests/vdb/__mock/baiduvectordb.py b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py index 6d2aff5197..3984078ee9 100644 --- a/api/tests/integration_tests/vdb/__mock/baiduvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py @@ -1,15 +1,15 @@ import os from collections import UserDict +from typing import Any from unittest.mock import MagicMock import pytest from _pytest.monkeypatch import MonkeyPatch -from pymochow import MochowClient # type: ignore -from pymochow.model.database import Database # type: ignore -from pymochow.model.enum import IndexState, IndexType, MetricType, ReadConsistency, TableState # type: ignore -from pymochow.model.schema import HNSWParams, VectorIndex # type: ignore -from pymochow.model.table import Table # type: ignore -from requests.adapters import HTTPAdapter +from pymochow import MochowClient +from pymochow.model.database import Database +from pymochow.model.enum import IndexState, IndexType, MetricType, ReadConsistency, TableState +from pymochow.model.schema import HNSWParams, VectorIndex +from pymochow.model.table import Table class AttrDict(UserDict): @@ -21,7 +21,7 @@ class MockBaiduVectorDBClass: def mock_vector_db_client( self, config=None, - adapter: HTTPAdapter | None = None, + adapter: Any | None = None, ): self.conn = MagicMock() self._config = MagicMock() diff --git a/api/tests/integration_tests/vdb/__mock/huaweicloudvectordb.py b/api/tests/integration_tests/vdb/__mock/huaweicloudvectordb.py index 9706c52455..9e24672317 100644 --- a/api/tests/integration_tests/vdb/__mock/huaweicloudvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/huaweicloudvectordb.py @@ -44,25 +44,25 @@ class MockClient: "hits": [ { "_source": { - Field.CONTENT_KEY.value: "abcdef", - Field.VECTOR.value: [1, 2], - Field.METADATA_KEY.value: {}, + Field.CONTENT_KEY: "abcdef", + Field.VECTOR: [1, 2], + Field.METADATA_KEY: {}, }, "_score": 1.0, }, { "_source": { - Field.CONTENT_KEY.value: "123456", - Field.VECTOR.value: [2, 2], - Field.METADATA_KEY.value: {}, + Field.CONTENT_KEY: "123456", + Field.VECTOR: [2, 2], + Field.METADATA_KEY: {}, }, "_score": 0.9, }, { "_source": { - Field.CONTENT_KEY.value: "a1b2c3", - Field.VECTOR.value: [3, 2], - Field.METADATA_KEY.value: {}, + Field.CONTENT_KEY: "a1b2c3", + Field.VECTOR: [3, 2], + Field.METADATA_KEY: {}, }, "_score": 0.8, }, diff --git a/api/tests/integration_tests/vdb/__mock/tcvectordb.py b/api/tests/integration_tests/vdb/__mock/tcvectordb.py index e0b908cece..8f87d6a073 100644 --- a/api/tests/integration_tests/vdb/__mock/tcvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/tcvectordb.py @@ -1,18 +1,17 @@ import os -from typing import Union +from typing import Any, Union import pytest from _pytest.monkeypatch import MonkeyPatch -from requests.adapters import HTTPAdapter -from tcvectordb import RPCVectorDBClient # type: ignore +from tcvectordb import RPCVectorDBClient from tcvectordb.model import enum from tcvectordb.model.collection import FilterIndexConfig -from tcvectordb.model.document import AnnSearch, Document, Filter, KeywordSearch, Rerank # type: ignore -from tcvectordb.model.enum import ReadConsistency # type: ignore -from tcvectordb.model.index import FilterIndex, HNSWParams, Index, IndexField, VectorIndex # type: ignore +from tcvectordb.model.document import AnnSearch, Document, Filter, KeywordSearch, Rerank +from tcvectordb.model.enum import ReadConsistency +from tcvectordb.model.index import FilterIndex, HNSWParams, Index, IndexField, VectorIndex from tcvectordb.rpc.model.collection import RPCCollection from tcvectordb.rpc.model.database import RPCDatabase -from xinference_client.types import Embedding # type: ignore +from xinference_client.types import Embedding class MockTcvectordbClass: @@ -23,7 +22,7 @@ class MockTcvectordbClass: key="", read_consistency: ReadConsistency = ReadConsistency.EVENTUAL_CONSISTENCY, timeout=10, - adapter: HTTPAdapter | None = None, + adapter: Any | None = None, pool_size: int = 2, proxies: dict | None = None, password: str | None = None, diff --git a/api/tests/integration_tests/vdb/__mock/vikingdb.py b/api/tests/integration_tests/vdb/__mock/vikingdb.py index 3ad72e5550..289c515b85 100644 --- a/api/tests/integration_tests/vdb/__mock/vikingdb.py +++ b/api/tests/integration_tests/vdb/__mock/vikingdb.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock import pytest from _pytest.monkeypatch import MonkeyPatch -from volcengine.viking_db import ( # type: ignore +from volcengine.viking_db import ( Collection, Data, DistanceType, @@ -40,13 +40,13 @@ class MockVikingDBClass: collection_name=collection_name, description="Collection For Dify", viking_db_service=self._viking_db_service, - primary_key=vdb_Field.PRIMARY_KEY.value, + primary_key=vdb_Field.PRIMARY_KEY, fields=[ - Field(field_name=vdb_Field.PRIMARY_KEY.value, field_type=FieldType.String, is_primary_key=True), - Field(field_name=vdb_Field.METADATA_KEY.value, field_type=FieldType.String), - Field(field_name=vdb_Field.GROUP_KEY.value, field_type=FieldType.String), - Field(field_name=vdb_Field.CONTENT_KEY.value, field_type=FieldType.Text), - Field(field_name=vdb_Field.VECTOR.value, field_type=FieldType.Vector, dim=768), + Field(field_name=vdb_Field.PRIMARY_KEY, field_type=FieldType.String, is_primary_key=True), + Field(field_name=vdb_Field.METADATA_KEY, field_type=FieldType.String), + Field(field_name=vdb_Field.GROUP_KEY, field_type=FieldType.String), + Field(field_name=vdb_Field.CONTENT_KEY, field_type=FieldType.Text), + Field(field_name=vdb_Field.VECTOR, field_type=FieldType.Vector, dim=768), ], indexes=[ Index( @@ -71,7 +71,7 @@ class MockVikingDBClass: return Collection( collection_name=collection_name, description=description, - primary_key=vdb_Field.PRIMARY_KEY.value, + primary_key=vdb_Field.PRIMARY_KEY, viking_db_service=self._viking_db_service, fields=fields, ) @@ -126,11 +126,11 @@ class MockVikingDBClass: def fetch_data(self, id: Union[str, list[str], int, list[int]]): return Data( fields={ - vdb_Field.GROUP_KEY.value: "test_group", - vdb_Field.METADATA_KEY.value: "{}", - vdb_Field.CONTENT_KEY.value: "content", - vdb_Field.PRIMARY_KEY.value: id, - vdb_Field.VECTOR.value: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], + vdb_Field.GROUP_KEY: "test_group", + vdb_Field.METADATA_KEY: "{}", + vdb_Field.CONTENT_KEY: "content", + vdb_Field.PRIMARY_KEY: id, + vdb_Field.VECTOR: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], }, id=id, ) @@ -151,16 +151,16 @@ class MockVikingDBClass: return [ Data( fields={ - vdb_Field.GROUP_KEY.value: "test_group", - vdb_Field.METADATA_KEY.value: '\ + vdb_Field.GROUP_KEY: "test_group", + vdb_Field.METADATA_KEY: '\ {"source": "/var/folders/ml/xxx/xxx.txt", \ "document_id": "test_document_id", \ "dataset_id": "test_dataset_id", \ "doc_id": "test_id", \ "doc_hash": "test_hash"}', - vdb_Field.CONTENT_KEY.value: "content", - vdb_Field.PRIMARY_KEY.value: "test_id", - vdb_Field.VECTOR.value: vector, + vdb_Field.CONTENT_KEY: "content", + vdb_Field.PRIMARY_KEY: "test_id", + vdb_Field.VECTOR: vector, }, id="test_id", score=0.10, @@ -173,16 +173,16 @@ class MockVikingDBClass: return [ Data( fields={ - vdb_Field.GROUP_KEY.value: "test_group", - vdb_Field.METADATA_KEY.value: '\ + vdb_Field.GROUP_KEY: "test_group", + vdb_Field.METADATA_KEY: '\ {"source": "/var/folders/ml/xxx/xxx.txt", \ "document_id": "test_document_id", \ "dataset_id": "test_dataset_id", \ "doc_id": "test_id", \ "doc_hash": "test_hash"}', - vdb_Field.CONTENT_KEY.value: "content", - vdb_Field.PRIMARY_KEY.value: "test_id", - vdb_Field.VECTOR.value: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], + vdb_Field.CONTENT_KEY: "content", + vdb_Field.PRIMARY_KEY: "test_id", + vdb_Field.VECTOR: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], }, id="test_id", score=0.10, diff --git a/api/tests/integration_tests/vdb/opensearch/test_opensearch.py b/api/tests/integration_tests/vdb/opensearch/test_opensearch.py index 2d44dd2924..210dee4c36 100644 --- a/api/tests/integration_tests/vdb/opensearch/test_opensearch.py +++ b/api/tests/integration_tests/vdb/opensearch/test_opensearch.py @@ -129,8 +129,8 @@ class TestOpenSearchVector: "hits": [ { "_source": { - Field.CONTENT_KEY.value: get_example_text(), - Field.METADATA_KEY.value: {"document_id": self.example_doc_id}, + Field.CONTENT_KEY: get_example_text(), + Field.METADATA_KEY: {"document_id": self.example_doc_id}, }, "_score": 1.0, } @@ -182,6 +182,28 @@ class TestOpenSearchVector: assert len(ids) == 1 assert ids[0] == "mock_id" + def test_delete_nonexistent_index(self): + """Test deleting a non-existent index.""" + # Create a vector instance with a non-existent collection name + self.vector._client.indices.exists.return_value = False + + # Should not raise an exception + self.vector.delete() + + # Verify that exists was called but delete was not + self.vector._client.indices.exists.assert_called_once_with(index=self.collection_name.lower()) + self.vector._client.indices.delete.assert_not_called() + + def test_delete_existing_index(self): + """Test deleting an existing index.""" + self.vector._client.indices.exists.return_value = True + + self.vector.delete() + + # Verify both exists and delete were called + self.vector._client.indices.exists.assert_called_once_with(index=self.collection_name.lower()) + self.vector._client.indices.delete.assert_called_once_with(index=self.collection_name.lower()) + @pytest.mark.usefixtures("setup_mock_redis") class TestOpenSearchVectorWithRedis: diff --git a/api/tests/integration_tests/workflow/nodes/test_code.py b/api/tests/integration_tests/workflow/nodes/test_code.py index e2f3a74bf9..78878cdeef 100644 --- a/api/tests/integration_tests/workflow/nodes/test_code.py +++ b/api/tests/integration_tests/workflow/nodes/test_code.py @@ -1,21 +1,22 @@ import time import uuid -from os import getenv import pytest +from configs import dify_config from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.node_events import NodeRunResult from core.workflow.nodes.code.code_node import CodeNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock -CODE_MAX_STRING_LENGTH = int(getenv("CODE_MAX_STRING_LENGTH", "10000")) +CODE_MAX_STRING_LENGTH = dify_config.CODE_MAX_STRING_LENGTH def init_code_node(code_config: dict): diff --git a/api/tests/integration_tests/workflow/nodes/test_http.py b/api/tests/integration_tests/workflow/nodes/test_http.py index ea99beacaa..2367990d3e 100644 --- a/api/tests/integration_tests/workflow/nodes/test_http.py +++ b/api/tests/integration_tests/workflow/nodes/test_http.py @@ -5,10 +5,11 @@ from urllib.parse import urlencode import pytest from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.nodes.http_request.node import HttpRequestNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from tests.integration_tests.workflow.nodes.__mock.http import setup_http_mock @@ -174,13 +175,13 @@ def test_custom_authorization_header(setup_http_mock): @pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True) def test_custom_auth_with_empty_api_key_does_not_set_header(setup_http_mock): """Test: In custom authentication mode, when the api_key is empty, no header should be set.""" - from core.workflow.entities.variable_pool import VariablePool from core.workflow.nodes.http_request.entities import ( HttpRequestNodeAuthorization, HttpRequestNodeData, HttpRequestNodeTimeout, ) from core.workflow.nodes.http_request.executor import Executor + from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable # Create variable pool diff --git a/api/tests/integration_tests/workflow/nodes/test_llm.py b/api/tests/integration_tests/workflow/nodes/test_llm.py index 31281cd8ad..3b16c3920b 100644 --- a/api/tests/integration_tests/workflow/nodes/test_llm.py +++ b/api/tests/integration_tests/workflow/nodes/test_llm.py @@ -6,12 +6,13 @@ from unittest.mock import MagicMock, patch from core.app.entities.app_invoke_entities import InvokeFrom from core.llm_generator.output_parser.structured_output import _parse_structured_output -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.node_events import StreamCompletedEvent from core.workflow.nodes.llm.node import LLMNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from extensions.ext_database import db from models.enums import UserFrom diff --git a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py index 76918f689f..9d9102cee2 100644 --- a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py +++ b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py @@ -5,11 +5,12 @@ from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom from core.model_runtime.entities import AssistantPromptMessage -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.parameter_extractor.parameter_extractor_node import ParameterExtractorNode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from extensions.ext_database import db from models.enums import UserFrom diff --git a/api/tests/integration_tests/workflow/nodes/test_template_transform.py b/api/tests/integration_tests/workflow/nodes/test_template_transform.py index 53252c7f2e..285387b817 100644 --- a/api/tests/integration_tests/workflow/nodes/test_template_transform.py +++ b/api/tests/integration_tests/workflow/nodes/test_template_transform.py @@ -4,11 +4,12 @@ import uuid import pytest from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.template_transform.template_transform_node import TemplateTransformNode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock diff --git a/api/tests/integration_tests/workflow/nodes/test_tool.py b/api/tests/integration_tests/workflow/nodes/test_tool.py index 16d44d1eaf..8dd8150b1c 100644 --- a/api/tests/integration_tests/workflow/nodes/test_tool.py +++ b/api/tests/integration_tests/workflow/nodes/test_tool.py @@ -4,12 +4,13 @@ from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom from core.tools.utils.configuration import ToolParameterConfigurationManager -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.node_events import StreamCompletedEvent from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.tool.tool_node import ToolNode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom diff --git a/api/tests/test_containers_integration_tests/conftest.py b/api/tests/test_containers_integration_tests/conftest.py index 243c8d1d62..180ee1c963 100644 --- a/api/tests/test_containers_integration_tests/conftest.py +++ b/api/tests/test_containers_integration_tests/conftest.py @@ -18,6 +18,7 @@ from flask.testing import FlaskClient from sqlalchemy import Engine, text from sqlalchemy.orm import Session from testcontainers.core.container import DockerContainer +from testcontainers.core.network import Network from testcontainers.core.waiting_utils import wait_for_logs from testcontainers.postgres import PostgresContainer from testcontainers.redis import RedisContainer @@ -41,6 +42,7 @@ class DifyTestContainers: def __init__(self): """Initialize container management with default configurations.""" + self.network: Network | None = None self.postgres: PostgresContainer | None = None self.redis: RedisContainer | None = None self.dify_sandbox: DockerContainer | None = None @@ -62,12 +64,18 @@ class DifyTestContainers: logger.info("Starting test containers for Dify integration tests...") + # Create Docker network for container communication + logger.info("Creating Docker network for container communication...") + self.network = Network() + self.network.create() + logger.info("Docker network created successfully with name: %s", self.network.name) + # Start PostgreSQL container for main application database # PostgreSQL is used for storing user data, workflows, and application state logger.info("Initializing PostgreSQL container...") self.postgres = PostgresContainer( image="postgres:14-alpine", - ) + ).with_network(self.network) self.postgres.start() db_host = self.postgres.get_container_host_ip() db_port = self.postgres.get_exposed_port(5432) @@ -137,7 +145,7 @@ class DifyTestContainers: # Start Redis container for caching and session management # Redis is used for storing session data, cache entries, and temporary data logger.info("Initializing Redis container...") - self.redis = RedisContainer(image="redis:6-alpine", port=6379) + self.redis = RedisContainer(image="redis:6-alpine", port=6379).with_network(self.network) self.redis.start() redis_host = self.redis.get_container_host_ip() redis_port = self.redis.get_exposed_port(6379) @@ -153,7 +161,7 @@ class DifyTestContainers: # Start Dify Sandbox container for code execution environment # Dify Sandbox provides a secure environment for executing user code logger.info("Initializing Dify Sandbox container...") - self.dify_sandbox = DockerContainer(image="langgenius/dify-sandbox:latest") + self.dify_sandbox = DockerContainer(image="langgenius/dify-sandbox:latest").with_network(self.network) self.dify_sandbox.with_exposed_ports(8194) self.dify_sandbox.env = { "API_KEY": "test_api_key", @@ -173,22 +181,28 @@ class DifyTestContainers: # Start Dify Plugin Daemon container for plugin management # Dify Plugin Daemon provides plugin lifecycle management and execution logger.info("Initializing Dify Plugin Daemon container...") - self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local") + self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local").with_network( + self.network + ) self.dify_plugin_daemon.with_exposed_ports(5002) + # Get container internal network addresses + postgres_container_name = self.postgres.get_wrapped_container().name + redis_container_name = self.redis.get_wrapped_container().name + self.dify_plugin_daemon.env = { - "DB_HOST": db_host, - "DB_PORT": str(db_port), + "DB_HOST": postgres_container_name, # Use container name for internal network communication + "DB_PORT": "5432", # Use internal port "DB_USERNAME": self.postgres.username, "DB_PASSWORD": self.postgres.password, "DB_DATABASE": "dify_plugin", - "REDIS_HOST": redis_host, - "REDIS_PORT": str(redis_port), + "REDIS_HOST": redis_container_name, # Use container name for internal network communication + "REDIS_PORT": "6379", # Use internal port "REDIS_PASSWORD": "", "SERVER_PORT": "5002", "SERVER_KEY": "test_plugin_daemon_key", "MAX_PLUGIN_PACKAGE_SIZE": "52428800", "PPROF_ENABLED": "false", - "DIFY_INNER_API_URL": f"http://{db_host}:5001", + "DIFY_INNER_API_URL": f"http://{postgres_container_name}:5001", "DIFY_INNER_API_KEY": "test_inner_api_key", "PLUGIN_REMOTE_INSTALLING_HOST": "0.0.0.0", "PLUGIN_REMOTE_INSTALLING_PORT": "5003", @@ -253,6 +267,15 @@ class DifyTestContainers: # Log error but don't fail the test cleanup logger.warning("Failed to stop container %s: %s", container, e) + # Stop and remove the network + if self.network: + try: + logger.info("Removing Docker network...") + self.network.remove() + logger.info("Successfully removed Docker network") + except Exception as e: + logger.warning("Failed to remove Docker network: %s", e) + self._containers_started = False logger.info("All test containers stopped and cleaned up successfully") diff --git a/api/tests/test_containers_integration_tests/services/test_account_service.py b/api/tests/test_containers_integration_tests/services/test_account_service.py index c98406d845..4d4e77a802 100644 --- a/api/tests/test_containers_integration_tests/services/test_account_service.py +++ b/api/tests/test_containers_integration_tests/services/test_account_service.py @@ -8,7 +8,7 @@ from werkzeug.exceptions import Unauthorized from configs import dify_config from controllers.console.error import AccountNotFound, NotAllowedCreateWorkspace -from models.account import AccountStatus, TenantAccountJoin +from models import AccountStatus, TenantAccountJoin from services.account_service import AccountService, RegisterService, TenantService, TokenPair from services.errors.account import ( AccountAlreadyInTenantError, @@ -16,6 +16,7 @@ from services.errors.account import ( AccountPasswordError, AccountRegisterError, CurrentPasswordIncorrectError, + TenantNotFoundError, ) from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkspacesLimitExceededError @@ -63,7 +64,7 @@ class TestAccountService: password=password, ) assert account.email == email - assert account.status == AccountStatus.ACTIVE.value + assert account.status == AccountStatus.ACTIVE # Login with correct password logged_in = AccountService.authenticate(email, password) @@ -184,7 +185,7 @@ class TestAccountService: ) # Ban the account - account.status = AccountStatus.BANNED.value + account.status = AccountStatus.BANNED from extensions.ext_database import db db.session.commit() @@ -268,14 +269,14 @@ class TestAccountService: interface_language="en-US", password=password, ) - account.status = AccountStatus.PENDING.value + account.status = AccountStatus.PENDING from extensions.ext_database import db db.session.commit() # Authenticate should activate the account authenticated_account = AccountService.authenticate(email, password) - assert authenticated_account.status == AccountStatus.ACTIVE.value + assert authenticated_account.status == AccountStatus.ACTIVE assert authenticated_account.initialized_at is not None def test_update_account_password_success(self, db_session_with_containers, mock_external_service_dependencies): @@ -469,7 +470,7 @@ class TestAccountService: # Verify integration was created from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider="new-google").first() assert integration is not None @@ -504,7 +505,7 @@ class TestAccountService: # Verify integration was updated from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = ( db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider="exists-google").first() @@ -538,7 +539,7 @@ class TestAccountService: from extensions.ext_database import db db.session.refresh(account) - assert account.status == AccountStatus.CLOSED.value + assert account.status == AccountStatus.CLOSED def test_update_account_fields(self, db_session_with_containers, mock_external_service_dependencies): """ @@ -678,7 +679,7 @@ class TestAccountService: interface_language="en-US", password=password, ) - account.status = AccountStatus.PENDING.value + account.status = AccountStatus.PENDING from extensions.ext_database import db db.session.commit() @@ -687,7 +688,7 @@ class TestAccountService: token_pair = AccountService.login(account) db.session.refresh(account) - assert account.status == AccountStatus.ACTIVE.value + assert account.status == AccountStatus.ACTIVE def test_logout(self, db_session_with_containers, mock_external_service_dependencies): """ @@ -859,7 +860,7 @@ class TestAccountService: ) # Ban the account - account.status = AccountStatus.BANNED.value + account.status = AccountStatus.BANNED from extensions.ext_database import db db.session.commit() @@ -989,7 +990,7 @@ class TestAccountService: ) # Ban the account - account.status = AccountStatus.BANNED.value + account.status = AccountStatus.BANNED from extensions.ext_database import db db.session.commit() @@ -1414,7 +1415,7 @@ class TestTenantService: ) # Try to get current tenant (should fail) - with pytest.raises(AttributeError): + with pytest.raises((AttributeError, TenantNotFoundError)): TenantService.get_current_tenant_by_account(account) def test_switch_tenant_success(self, db_session_with_containers, mock_external_service_dependencies): @@ -2298,11 +2299,12 @@ class TestRegisterService: name=admin_name, password=admin_password, ip_address=ip_address, + language="en-US", ) # Verify account was created from extensions.ext_database import db - from models.account import Account + from models import Account from models.model import DifySetup account = db.session.query(Account).filter_by(email=admin_email).first() @@ -2347,11 +2349,12 @@ class TestRegisterService: name=admin_name, password=admin_password, ip_address=ip_address, + language="en-US", ) # Verify no entities were created (rollback worked) from extensions.ext_database import db - from models.account import Account, Tenant, TenantAccountJoin + from models import Account, Tenant, TenantAccountJoin from models.model import DifySetup account = db.session.query(Account).filter_by(email=admin_email).first() @@ -2445,7 +2448,7 @@ class TestRegisterService: # Verify OAuth integration was created from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider=provider).first() assert integration is not None @@ -2471,7 +2474,7 @@ class TestRegisterService: mock_external_service_dependencies["billing_service"].is_email_in_freeze.return_value = False # Execute registration with pending status - from models.account import AccountStatus + from models import AccountStatus account = RegisterService.register( email=email, @@ -2660,7 +2663,7 @@ class TestRegisterService: # Verify new account was created with pending status from extensions.ext_database import db - from models.account import Account, TenantAccountJoin + from models import Account, TenantAccountJoin new_account = db.session.query(Account).filter_by(email=new_member_email).first() assert new_account is not None diff --git a/api/tests/test_containers_integration_tests/services/test_agent_service.py b/api/tests/test_containers_integration_tests/services/test_agent_service.py index c572ddc925..ca513319b2 100644 --- a/api/tests/test_containers_integration_tests/services/test_agent_service.py +++ b/api/tests/test_containers_integration_tests/services/test_agent_service.py @@ -5,7 +5,7 @@ import pytest from faker import Faker from core.plugin.impl.exc import PluginDaemonClientSideError -from models.account import Account +from models import Account from models.model import AppModelConfig, Conversation, EndUser, Message, MessageAgentThought from services.account_service import AccountService, TenantService from services.agent_service import AgentService diff --git a/api/tests/test_containers_integration_tests/services/test_annotation_service.py b/api/tests/test_containers_integration_tests/services/test_annotation_service.py index 3cb7424df8..2b03ec1c26 100644 --- a/api/tests/test_containers_integration_tests/services/test_annotation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_annotation_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from werkzeug.exceptions import NotFound -from models.account import Account +from models import Account from models.model import MessageAnnotation from services.annotation_service import AppAnnotationService from services.app_service import AppService @@ -25,9 +25,7 @@ class TestAnnotationService: patch("services.annotation_service.enable_annotation_reply_task") as mock_enable_task, patch("services.annotation_service.disable_annotation_reply_task") as mock_disable_task, patch("services.annotation_service.batch_import_annotations_task") as mock_batch_import_task, - patch( - "services.annotation_service.current_user", create_autospec(Account, instance=True) - ) as mock_current_user, + patch("services.annotation_service.current_account_with_tenant") as mock_current_account_with_tenant, ): # Setup default mock returns mock_account_feature_service.get_features.return_value.billing.enabled = False @@ -38,6 +36,9 @@ class TestAnnotationService: mock_disable_task.delay.return_value = None mock_batch_import_task.delay.return_value = None + # Create mock user that will be returned by current_account_with_tenant + mock_user = create_autospec(Account, instance=True) + yield { "account_feature_service": mock_account_feature_service, "feature_service": mock_feature_service, @@ -47,7 +48,8 @@ class TestAnnotationService: "enable_task": mock_enable_task, "disable_task": mock_disable_task, "batch_import_task": mock_batch_import_task, - "current_user": mock_current_user, + "current_account_with_tenant": mock_current_account_with_tenant, + "current_user": mock_user, } def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies): @@ -107,6 +109,11 @@ class TestAnnotationService: """ mock_external_service_dependencies["current_user"].id = account_id mock_external_service_dependencies["current_user"].current_tenant_id = tenant_id + # Configure current_account_with_tenant to return (user, tenant_id) + mock_external_service_dependencies["current_account_with_tenant"].return_value = ( + mock_external_service_dependencies["current_user"], + tenant_id, + ) def _create_test_conversation(self, app, account, fake): """ diff --git a/api/tests/test_containers_integration_tests/services/test_app_generate_service.py b/api/tests/test_containers_integration_tests/services/test_app_generate_service.py index ca0f309fd4..9386687a04 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_generate_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_generate_service.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from openai._exceptions import RateLimitError from core.app.entities.app_invoke_entities import InvokeFrom from models.model import EndUser @@ -484,36 +483,6 @@ class TestAppGenerateService: # Verify error message assert "Rate limit exceeded" in str(exc_info.value) - def test_generate_with_rate_limit_error_from_openai( - self, db_session_with_containers, mock_external_service_dependencies - ): - """ - Test generation when OpenAI rate limit error occurs. - """ - fake = Faker() - app, account = self._create_test_app_and_account( - db_session_with_containers, mock_external_service_dependencies, mode="completion" - ) - - # Setup completion generator to raise RateLimitError - mock_response = MagicMock() - mock_response.request = MagicMock() - mock_external_service_dependencies["completion_generator"].return_value.generate.side_effect = RateLimitError( - "Rate limit exceeded", response=mock_response, body=None - ) - - # Setup test arguments - args = {"inputs": {"query": fake.text(max_nb_chars=50)}, "response_mode": "streaming"} - - # Execute the method under test and expect rate limit error - with pytest.raises(InvokeRateLimitError) as exc_info: - AppGenerateService.generate( - app_model=app, user=account, args=args, invoke_from=InvokeFrom.SERVICE_API, streaming=True - ) - - # Verify error message - assert "Rate limit exceeded" in str(exc_info.value) - def test_generate_with_invalid_app_mode(self, db_session_with_containers, mock_external_service_dependencies): """ Test generation with invalid app mode. diff --git a/api/tests/test_containers_integration_tests/services/test_app_service.py b/api/tests/test_containers_integration_tests/services/test_app_service.py index cbbbbddb21..e53392bcef 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from constants.model_template import default_app_templates -from models.account import Account +from models import Account from models.model import App, Site from services.account_service import AccountService, TenantService from services.app_service import AppService diff --git a/api/tests/test_containers_integration_tests/services/test_file_service.py b/api/tests/test_containers_integration_tests/services/test_file_service.py index 5598c5bc0c..4c94e42f3e 100644 --- a/api/tests/test_containers_integration_tests/services/test_file_service.py +++ b/api/tests/test_containers_integration_tests/services/test_file_service.py @@ -8,7 +8,7 @@ from sqlalchemy import Engine from werkzeug.exceptions import NotFound from configs import dify_config -from models.account import Account, Tenant +from models import Account, Tenant from models.enums import CreatorUserRole from models.model import EndUser, UploadFile from services.errors.file import FileTooLargeError, UnsupportedFileTypeError @@ -86,7 +86,7 @@ class TestFileService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -187,7 +187,7 @@ class TestFileService: assert upload_file.extension == "pdf" assert upload_file.mime_type == mimetype assert upload_file.created_by == account.id - assert upload_file.created_by_role == CreatorUserRole.ACCOUNT.value + assert upload_file.created_by_role == CreatorUserRole.ACCOUNT assert upload_file.used is False assert upload_file.hash == hashlib.sha3_256(content).hexdigest() @@ -216,7 +216,7 @@ class TestFileService: assert upload_file is not None assert upload_file.created_by == end_user.id - assert upload_file.created_by_role == CreatorUserRole.END_USER.value + assert upload_file.created_by_role == CreatorUserRole.END_USER def test_upload_file_with_datasets_source( self, db_session_with_containers, engine, mock_external_service_dependencies diff --git a/api/tests/test_containers_integration_tests/services/test_metadata_service.py b/api/tests/test_containers_integration_tests/services/test_metadata_service.py index d0f7e945f1..c8ced3f3a5 100644 --- a/api/tests/test_containers_integration_tests/services/test_metadata_service.py +++ b/api/tests/test_containers_integration_tests/services/test_metadata_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from core.rag.index_processor.constant.built_in_field import BuiltInField -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding, Document from services.entities.knowledge_entities.knowledge_entities import MetadataArgs from services.metadata_service import MetadataService @@ -17,9 +17,7 @@ class TestMetadataService: def mock_external_service_dependencies(self): """Mock setup for external service dependencies.""" with ( - patch( - "services.metadata_service.current_user", create_autospec(Account, instance=True) - ) as mock_current_user, + patch("libs.login.current_user", create_autospec(Account, instance=True)) as mock_current_user, patch("services.metadata_service.redis_client") as mock_redis_client, patch("services.dataset_service.DocumentService") as mock_document_service, ): @@ -72,7 +70,7 @@ class TestMetadataService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py b/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py index 66527dd506..8a72331425 100644 --- a/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py +++ b/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py @@ -103,7 +103,7 @@ class TestModelLoadBalancingService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py index 2196da8b3e..8cb3572c47 100644 --- a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py +++ b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py @@ -5,7 +5,7 @@ from faker import Faker from core.entities.model_entities import ModelStatus from core.model_runtime.entities.model_entities import FetchFrom, ModelType -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.provider import Provider, ProviderModel, ProviderModelSetting, ProviderType from services.model_provider_service import ModelProviderService @@ -67,7 +67,7 @@ class TestModelProviderService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/test_tag_service.py b/api/tests/test_containers_integration_tests/services/test_tag_service.py index 04cff397b2..6732b8d558 100644 --- a/api/tests/test_containers_integration_tests/services/test_tag_service.py +++ b/api/tests/test_containers_integration_tests/services/test_tag_service.py @@ -5,7 +5,7 @@ from faker import Faker from sqlalchemy import select from werkzeug.exceptions import NotFound -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset from models.model import App, Tag, TagBinding from services.tag_service import TagService @@ -66,7 +66,7 @@ class TestTagService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py b/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py index c9ace46c55..bbbf48ede9 100644 --- a/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py @@ -5,7 +5,7 @@ from faker import Faker from sqlalchemy import select from core.app.entities.app_invoke_entities import InvokeFrom -from models.account import Account +from models import Account from models.model import Conversation, EndUser from models.web import PinnedConversation from services.account_service import AccountService, TenantService @@ -144,7 +144,7 @@ class TestWebConversationService: system_instruction=fake.text(max_nb_chars=300), system_instruction_tokens=50, status="normal", - invoke_from=InvokeFrom.WEB_APP.value, + invoke_from=InvokeFrom.WEB_APP, from_source="console" if isinstance(user, Account) else "api", from_end_user_id=user.id if isinstance(user, EndUser) else None, from_account_id=user.id if isinstance(user, Account) else None, diff --git a/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py b/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py index 316cfe1674..73e622b061 100644 --- a/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py +++ b/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py @@ -7,7 +7,7 @@ from faker import Faker from werkzeug.exceptions import NotFound, Unauthorized from libs.password import hash_password -from models.account import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole from models.model import App, Site from services.errors.account import AccountLoginError, AccountNotFoundError, AccountPasswordError from services.webapp_auth_service import WebAppAuthService, WebAppAuthType @@ -87,7 +87,7 @@ class TestWebAppAuthService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -150,7 +150,7 @@ class TestWebAppAuthService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -232,7 +232,7 @@ class TestWebAppAuthService: assert result.id == account.id assert result.email == account.email assert result.name == account.name - assert result.status == AccountStatus.ACTIVE.value + assert result.status == AccountStatus.ACTIVE # Verify database state from extensions.ext_database import db @@ -280,7 +280,7 @@ class TestWebAppAuthService: email=fake.email(), name=fake.name(), interface_language="en-US", - status=AccountStatus.BANNED.value, + status=AccountStatus.BANNED, ) # Hash password @@ -411,7 +411,7 @@ class TestWebAppAuthService: assert result.id == account.id assert result.email == account.email assert result.name == account.name - assert result.status == AccountStatus.ACTIVE.value + assert result.status == AccountStatus.ACTIVE # Verify database state from extensions.ext_database import db @@ -455,7 +455,7 @@ class TestWebAppAuthService: email=unique_email, name=fake.name(), interface_language="en-US", - status=AccountStatus.BANNED.value, + status=AccountStatus.BANNED, ) from extensions.ext_database import db @@ -863,13 +863,14 @@ class TestWebAppAuthService: - Mock service integration """ # Arrange: Setup mock for enterprise service - mock_webapp_auth = type("MockWebAppAuth", (), {"access_mode": "sso_verified"})() + mock_external_service_dependencies["app_service"].get_app_id_by_code.return_value = "mock_app_id" + setting = type("MockWebAppAuth", (), {"access_mode": "sso_verified"})() mock_external_service_dependencies[ "enterprise_service" - ].WebAppAuth.get_app_access_mode_by_code.return_value = mock_webapp_auth + ].WebAppAuth.get_app_access_mode_by_id.return_value = setting # Act: Execute authentication type determination - result = WebAppAuthService.get_app_auth_type(app_code="mock_app_code") + result: WebAppAuthType = WebAppAuthService.get_app_auth_type(app_code="mock_app_code") # Assert: Verify correct result assert result == WebAppAuthType.EXTERNAL @@ -877,7 +878,7 @@ class TestWebAppAuthService: # Verify mock service was called correctly mock_external_service_dependencies[ "enterprise_service" - ].WebAppAuth.get_app_access_mode_by_code.assert_called_once_with("mock_app_code") + ].WebAppAuth.get_app_access_mode_by_id.assert_called_once_with(app_id="mock_app_id") def test_get_app_auth_type_no_parameters(self, db_session_with_containers, mock_external_service_dependencies): """ diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py index 2e18184aea..66bd4d3cd9 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py @@ -199,7 +199,7 @@ class TestWorkflowAppService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), finished_at=datetime.now(UTC), @@ -215,7 +215,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), ) @@ -356,7 +356,7 @@ class TestWorkflowAppService: elapsed_time=1.0 + i, total_tokens=100 + i * 10, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status != "running" else None, @@ -371,7 +371,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -464,7 +464,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=timestamp, finished_at=timestamp + timedelta(minutes=1), @@ -479,7 +479,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=timestamp, ) @@ -571,7 +571,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1), @@ -586,7 +586,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -701,7 +701,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1), @@ -716,7 +716,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -743,7 +743,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.END_USER.value, + created_by_role=CreatorUserRole.END_USER, created_by=end_user.id, created_at=datetime.now(UTC) + timedelta(minutes=i + 10), finished_at=datetime.now(UTC) + timedelta(minutes=i + 11), @@ -758,7 +758,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="web-app", - created_by_role=CreatorUserRole.END_USER.value, + created_by_role=CreatorUserRole.END_USER, created_by=end_user.id, created_at=datetime.now(UTC) + timedelta(minutes=i + 10), ) @@ -780,14 +780,39 @@ class TestWorkflowAppService: limit=20, ) assert result_session_filter["total"] == 2 - assert all(log.created_by_role == CreatorUserRole.END_USER.value for log in result_session_filter["data"]) + assert all(log.created_by_role == CreatorUserRole.END_USER for log in result_session_filter["data"]) # Test filtering by account email result_account_filter = service.get_paginate_workflow_app_logs( session=db_session_with_containers, app_model=app, created_by_account=account.email, page=1, limit=20 ) assert result_account_filter["total"] == 3 - assert all(log.created_by_role == CreatorUserRole.ACCOUNT.value for log in result_account_filter["data"]) + assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_account_filter["data"]) + + # Test filtering by changed account email + original_email = account.email + new_email = "changed@example.com" + account.email = new_email + db_session_with_containers.commit() + + assert account.email == new_email + + # Results for new email, is expected to be the same as the original email + result_with_new_email = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, created_by_account=new_email, page=1, limit=20 + ) + assert result_with_new_email["total"] == 3 + assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_with_new_email["data"]) + + # Old email unbound, is unexpected input, should raise ValueError + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, created_by_account=original_email, page=1, limit=20 + ) + assert "Account not found" in str(exc_info.value) + + account.email = original_email + db_session_with_containers.commit() # Test filtering by non-existent session ID result_no_session = service.get_paginate_workflow_app_logs( @@ -799,15 +824,16 @@ class TestWorkflowAppService: ) assert result_no_session["total"] == 0 - # Test filtering by non-existent account email - result_no_account = service.get_paginate_workflow_app_logs( - session=db_session_with_containers, - app_model=app, - created_by_account="nonexistent@example.com", - page=1, - limit=20, - ) - assert result_no_account["total"] == 0 + # Test filtering by non-existent account email, is unexpected input, should raise ValueError + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account="nonexistent@example.com", + page=1, + limit=20, + ) + assert "Account not found" in str(exc_info.value) def test_get_paginate_workflow_app_logs_with_uuid_keyword_search( self, db_session_with_containers, mock_external_service_dependencies @@ -853,7 +879,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), finished_at=datetime.now(UTC) + timedelta(minutes=1), @@ -869,7 +895,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), ) @@ -943,7 +969,7 @@ class TestWorkflowAppService: elapsed_time=0.0, # Edge case: 0 elapsed time total_tokens=0, # Edge case: 0 tokens total_steps=0, # Edge case: 0 steps - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), finished_at=datetime.now(UTC), @@ -959,7 +985,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), ) @@ -1057,15 +1083,15 @@ class TestWorkflowAppService: assert len(result_no_session["data"]) == 0 # Test with account email that doesn't exist - result_no_account = service.get_paginate_workflow_app_logs( - session=db_session_with_containers, - app_model=app, - created_by_account="nonexistent@example.com", - page=1, - limit=20, - ) - assert result_no_account["total"] == 0 - assert len(result_no_account["data"]) == 0 + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account="nonexistent@example.com", + page=1, + limit=20, + ) + assert "Account not found" in str(exc_info.value) def test_get_paginate_workflow_app_logs_with_complex_query_combinations( self, db_session_with_containers, mock_external_service_dependencies @@ -1098,7 +1124,7 @@ class TestWorkflowAppService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status == "succeeded" else None, @@ -1113,7 +1139,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -1198,7 +1224,7 @@ class TestWorkflowAppService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status != "running" else None, @@ -1213,7 +1239,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -1300,7 +1326,7 @@ class TestWorkflowAppService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j), finished_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j + 1), @@ -1315,7 +1341,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j), ) diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py index 4cb21ef6bd..23c4eeb82f 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py @@ -130,7 +130,7 @@ class TestWorkflowRunService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=created_time, finished_at=created_time, @@ -167,7 +167,7 @@ class TestWorkflowRunService: inputs={}, status="normal", mode="chat", - from_source=CreatorUserRole.ACCOUNT.value, + from_source=CreatorUserRole.ACCOUNT, from_account_id=account.id, ) db.session.add(conversation) @@ -188,7 +188,7 @@ class TestWorkflowRunService: message.answer_price_unit = 0.001 message.currency = "USD" message.status = "normal" - message.from_source = CreatorUserRole.ACCOUNT.value + message.from_source = CreatorUserRole.ACCOUNT message.from_account_id = account.id message.workflow_run_id = workflow_run.id message.inputs = {"input": "test input"} @@ -458,7 +458,7 @@ class TestWorkflowRunService: status="succeeded", elapsed_time=0.5, execution_metadata=json.dumps({"tokens": 50}), - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), ) @@ -689,7 +689,7 @@ class TestWorkflowRunService: status="succeeded", elapsed_time=0.5, execution_metadata=json.dumps({"tokens": 50}), - created_by_role=CreatorUserRole.END_USER.value, + created_by_role=CreatorUserRole.END_USER, created_by=end_user.id, created_at=datetime.now(UTC), ) @@ -710,4 +710,4 @@ class TestWorkflowRunService: assert node_exec.app_id == app.id assert node_exec.workflow_run_id == workflow_run.id assert node_exec.created_by == end_user.id - assert node_exec.created_by_role == CreatorUserRole.END_USER.value + assert node_exec.created_by_role == CreatorUserRole.END_USER diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_service.py index 60150667ed..4741eba1f5 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_service.py @@ -44,27 +44,26 @@ class TestWorkflowService: Account: Created test account instance """ fake = fake or Faker() - account = Account() - account.id = fake.uuid4() - account.email = fake.email() - account.name = fake.name() - account.avatar_url = fake.url() - account.tenant_id = fake.uuid4() - account.status = "active" - account.type = "normal" - account.role = "owner" - account.interface_language = "en-US" # Set interface language for Site creation + account = Account( + email=fake.email(), + name=fake.name(), + avatar=fake.url(), + status="active", + interface_language="en-US", # Set interface language for Site creation + ) account.created_at = fake.date_time_this_year() + account.id = fake.uuid4() account.updated_at = account.created_at # Create a tenant for the account from models.account import Tenant - tenant = Tenant() - tenant.id = account.tenant_id - tenant.name = f"Test Tenant {fake.company()}" - tenant.plan = "basic" - tenant.status = "active" + tenant = Tenant( + name=f"Test Tenant {fake.company()}", + plan="basic", + status="active", + ) + tenant.id = account.current_tenant_id tenant.created_at = fake.date_time_this_year() tenant.updated_at = tenant.created_at @@ -91,20 +90,21 @@ class TestWorkflowService: App: Created test app instance """ fake = fake or Faker() - app = App() - app.id = fake.uuid4() - app.tenant_id = fake.uuid4() - app.name = fake.company() - app.description = fake.text() - app.mode = AppMode.WORKFLOW - app.icon_type = "emoji" - app.icon = "🤖" - app.icon_background = "#FFEAD5" - app.enable_site = True - app.enable_api = True - app.created_by = fake.uuid4() + app = App( + id=fake.uuid4(), + tenant_id=fake.uuid4(), + name=fake.company(), + description=fake.text(), + mode=AppMode.WORKFLOW, + icon_type="emoji", + icon="🤖", + icon_background="#FFEAD5", + enable_site=True, + enable_api=True, + created_by=fake.uuid4(), + workflow_id=None, # Will be set when workflow is created + ) app.updated_by = app.created_by - app.workflow_id = None # Will be set when workflow is created from extensions.ext_database import db @@ -126,19 +126,20 @@ class TestWorkflowService: Workflow: Created test workflow instance """ fake = fake or Faker() - workflow = Workflow() - workflow.id = fake.uuid4() - workflow.tenant_id = app.tenant_id - workflow.app_id = app.id - workflow.type = WorkflowType.WORKFLOW.value - workflow.version = Workflow.VERSION_DRAFT - workflow.graph = json.dumps({"nodes": [], "edges": []}) - workflow.features = json.dumps({"features": []}) - # unique_hash is a computed property based on graph and features - workflow.created_by = account.id - workflow.updated_by = account.id - workflow.environment_variables = [] - workflow.conversation_variables = [] + workflow = Workflow( + id=fake.uuid4(), + tenant_id=app.tenant_id, + app_id=app.id, + type=WorkflowType.WORKFLOW, + version=Workflow.VERSION_DRAFT, + graph=json.dumps({"nodes": [], "edges": []}), + features=json.dumps({"features": []}), + # unique_hash is a computed property based on graph and features + created_by=account.id, + updated_by=account.id, + environment_variables=[], + conversation_variables=[], + ) from extensions.ext_database import db @@ -175,7 +176,7 @@ class TestWorkflowService: node_execution.node_type = "test_node" node_execution.title = "Test Node" # Required field node_execution.status = "succeeded" - node_execution.created_by_role = CreatorUserRole.ACCOUNT.value # Required field + node_execution.created_by_role = CreatorUserRole.ACCOUNT # Required field node_execution.created_by = account.id # Required field node_execution.created_at = fake.date_time_this_year() diff --git a/api/tests/test_containers_integration_tests/services/test_workspace_service.py b/api/tests/test_containers_integration_tests/services/test_workspace_service.py index 3fd439256d..4249642bc9 100644 --- a/api/tests/test_containers_integration_tests/services/test_workspace_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workspace_service.py @@ -3,7 +3,7 @@ from unittest.mock import patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from services.workspace_service import WorkspaceService @@ -69,7 +69,7 @@ class TestWorkspaceService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -111,7 +111,7 @@ class TestWorkspaceService: assert result["name"] == tenant.name assert result["plan"] == tenant.plan assert result["status"] == tenant.status - assert result["role"] == TenantAccountRole.OWNER.value + assert result["role"] == TenantAccountRole.OWNER assert result["created_at"] == tenant.created_at assert result["trial_end_reason"] is None @@ -159,7 +159,7 @@ class TestWorkspaceService: assert result["name"] == tenant.name assert result["plan"] == tenant.plan assert result["status"] == tenant.status - assert result["role"] == TenantAccountRole.OWNER.value + assert result["role"] == TenantAccountRole.OWNER assert result["created_at"] == tenant.created_at assert result["trial_end_reason"] is None @@ -194,7 +194,7 @@ class TestWorkspaceService: from extensions.ext_database import db join = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() - join.role = TenantAccountRole.NORMAL.value + join.role = TenantAccountRole.NORMAL db.session.commit() # Setup mocks for feature service @@ -212,7 +212,7 @@ class TestWorkspaceService: assert result["name"] == tenant.name assert result["plan"] == tenant.plan assert result["status"] == tenant.status - assert result["role"] == TenantAccountRole.NORMAL.value + assert result["role"] == TenantAccountRole.NORMAL assert result["created_at"] == tenant.created_at assert result["trial_end_reason"] is None @@ -245,7 +245,7 @@ class TestWorkspaceService: from extensions.ext_database import db join = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() - join.role = TenantAccountRole.ADMIN.value + join.role = TenantAccountRole.ADMIN db.session.commit() # Setup mocks for feature service and tenant service @@ -260,7 +260,7 @@ class TestWorkspaceService: # Assert: Verify the expected outcomes assert result is not None - assert result["role"] == TenantAccountRole.ADMIN.value + assert result["role"] == TenantAccountRole.ADMIN # Verify custom config is included for admin users assert "custom_config" in result @@ -378,7 +378,7 @@ class TestWorkspaceService: from extensions.ext_database import db join = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() - join.role = TenantAccountRole.EDITOR.value + join.role = TenantAccountRole.EDITOR db.session.commit() # Setup mocks for feature service and tenant service @@ -394,7 +394,7 @@ class TestWorkspaceService: # Assert: Verify the expected outcomes assert result is not None - assert result["role"] == TenantAccountRole.EDITOR.value + assert result["role"] == TenantAccountRole.EDITOR # Verify custom config is not included for editor users without admin privileges assert "custom_config" not in result @@ -425,7 +425,7 @@ class TestWorkspaceService: from extensions.ext_database import db join = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() - join.role = TenantAccountRole.DATASET_OPERATOR.value + join.role = TenantAccountRole.DATASET_OPERATOR db.session.commit() # Setup mocks for feature service and tenant service @@ -441,7 +441,7 @@ class TestWorkspaceService: # Assert: Verify the expected outcomes assert result is not None - assert result["role"] == TenantAccountRole.DATASET_OPERATOR.value + assert result["role"] == TenantAccountRole.DATASET_OPERATOR # Verify custom config is not included for dataset operators without admin privileges assert "custom_config" not in result diff --git a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py index a412bdccf8..0871467a05 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py @@ -3,7 +3,7 @@ from unittest.mock import patch import pytest from faker import Faker -from models.account import Account, Tenant +from models import Account, Tenant from models.tools import ApiToolProvider from services.tools.api_tools_manage_service import ApiToolManageService @@ -72,7 +72,7 @@ class TestApiToolManageService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py index dd22dcbfd1..8c190762cf 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from core.tools.entities.tool_entities import ToolProviderType -from models.account import Account, Tenant +from models import Account, Tenant from models.tools import MCPToolProvider from services.tools.mcp_tools_manage_service import UNCHANGED_SERVER_URL_PLACEHOLDER, MCPToolManageService @@ -20,12 +20,21 @@ class TestMCPToolManageService: patch("services.tools.mcp_tools_manage_service.ToolTransformService") as mock_tool_transform_service, ): # Setup default mock returns + from core.tools.entities.api_entities import ToolProviderApiEntity + from core.tools.entities.common_entities import I18nObject + mock_encrypter.encrypt_token.return_value = "encrypted_server_url" - mock_tool_transform_service.mcp_provider_to_user_provider.return_value = { - "id": "test_id", - "name": "test_name", - "type": ToolProviderType.MCP, - } + mock_tool_transform_service.mcp_provider_to_user_provider.return_value = ToolProviderApiEntity( + id="test_id", + author="test_author", + name="test_name", + type=ToolProviderType.MCP, + description=I18nObject(en_US="Test Description", zh_Hans="测试描述"), + icon={"type": "emoji", "content": "🤖"}, + label=I18nObject(en_US="Test Label", zh_Hans="测试标签"), + labels=[], + tools=[], + ) yield { "encrypter": mock_encrypter, @@ -72,7 +81,7 @@ class TestMCPToolManageService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -104,9 +113,9 @@ class TestMCPToolManageService: mcp_provider = MCPToolProvider( tenant_id=tenant_id, name=fake.company(), - server_identifier=fake.uuid4(), + server_identifier=str(fake.uuid4()), server_url="encrypted_server_url", - server_url_hash=fake.sha256(), + server_url_hash=str(fake.sha256()), user_id=user_id, authed=False, tools="[]", @@ -144,7 +153,10 @@ class TestMCPToolManageService: ) # Act: Execute the method under test - result = MCPToolManageService.get_mcp_provider_by_provider_id(mcp_provider.id, tenant.id) + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result = service.get_provider(provider_id=mcp_provider.id, tenant_id=tenant.id) # Assert: Verify the expected outcomes assert result is not None @@ -154,8 +166,6 @@ class TestMCPToolManageService: assert result.user_id == account.id # Verify database state - from extensions.ext_database import db - db.session.refresh(result) assert result.id is not None assert result.server_identifier == mcp_provider.server_identifier @@ -177,11 +187,14 @@ class TestMCPToolManageService: db_session_with_containers, mock_external_service_dependencies ) - non_existent_id = fake.uuid4() + non_existent_id = str(fake.uuid4()) # Act & Assert: Verify proper error handling + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="MCP tool not found"): - MCPToolManageService.get_mcp_provider_by_provider_id(non_existent_id, tenant.id) + service.get_provider(provider_id=non_existent_id, tenant_id=tenant.id) def test_get_mcp_provider_by_provider_id_tenant_isolation( self, db_session_with_containers, mock_external_service_dependencies @@ -210,8 +223,11 @@ class TestMCPToolManageService: ) # Act & Assert: Verify tenant isolation + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="MCP tool not found"): - MCPToolManageService.get_mcp_provider_by_provider_id(mcp_provider1.id, tenant2.id) + service.get_provider(provider_id=mcp_provider1.id, tenant_id=tenant2.id) def test_get_mcp_provider_by_server_identifier_success( self, db_session_with_containers, mock_external_service_dependencies @@ -235,7 +251,10 @@ class TestMCPToolManageService: ) # Act: Execute the method under test - result = MCPToolManageService.get_mcp_provider_by_server_identifier(mcp_provider.server_identifier, tenant.id) + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result = service.get_provider(server_identifier=mcp_provider.server_identifier, tenant_id=tenant.id) # Assert: Verify the expected outcomes assert result is not None @@ -245,8 +264,6 @@ class TestMCPToolManageService: assert result.user_id == account.id # Verify database state - from extensions.ext_database import db - db.session.refresh(result) assert result.id is not None assert result.name == mcp_provider.name @@ -268,11 +285,14 @@ class TestMCPToolManageService: db_session_with_containers, mock_external_service_dependencies ) - non_existent_identifier = fake.uuid4() + non_existent_identifier = str(fake.uuid4()) # Act & Assert: Verify proper error handling + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="MCP tool not found"): - MCPToolManageService.get_mcp_provider_by_server_identifier(non_existent_identifier, tenant.id) + service.get_provider(server_identifier=non_existent_identifier, tenant_id=tenant.id) def test_get_mcp_provider_by_server_identifier_tenant_isolation( self, db_session_with_containers, mock_external_service_dependencies @@ -301,8 +321,11 @@ class TestMCPToolManageService: ) # Act & Assert: Verify tenant isolation + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="MCP tool not found"): - MCPToolManageService.get_mcp_provider_by_server_identifier(mcp_provider1.server_identifier, tenant2.id) + service.get_provider(server_identifier=mcp_provider1.server_identifier, tenant_id=tenant2.id) def test_create_mcp_provider_success(self, db_session_with_containers, mock_external_service_dependencies): """ @@ -322,15 +345,30 @@ class TestMCPToolManageService: ) # Setup mocks for provider creation + from core.tools.entities.api_entities import ToolProviderApiEntity + from core.tools.entities.common_entities import I18nObject + mock_external_service_dependencies["encrypter"].encrypt_token.return_value = "encrypted_server_url" - mock_external_service_dependencies["tool_transform_service"].mcp_provider_to_user_provider.return_value = { - "id": "new_provider_id", - "name": "Test MCP Provider", - "type": ToolProviderType.MCP, - } + mock_external_service_dependencies[ + "tool_transform_service" + ].mcp_provider_to_user_provider.return_value = ToolProviderApiEntity( + id="new_provider_id", + author=account.name, + name="Test MCP Provider", + type=ToolProviderType.MCP, + description=I18nObject(en_US="Test MCP Provider Description", zh_Hans="测试MCP提供者描述"), + icon={"type": "emoji", "content": "🤖"}, + label=I18nObject(en_US="Test MCP Provider", zh_Hans="测试MCP提供者"), + labels=[], + tools=[], + ) # Act: Execute the method under test - result = MCPToolManageService.create_mcp_provider( + from core.entities.mcp_provider import MCPConfiguration + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result = service.create_provider( tenant_id=tenant.id, name="Test MCP Provider", server_url="https://example.com/mcp", @@ -339,14 +377,16 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#FF6B6B", server_identifier="test_identifier_123", - timeout=30.0, - sse_read_timeout=300.0, + configuration=MCPConfiguration( + timeout=30.0, + sse_read_timeout=300.0, + ), ) # Assert: Verify the expected outcomes assert result is not None - assert result["name"] == "Test MCP Provider" - assert result["type"] == ToolProviderType.MCP + assert result.name == "Test MCP Provider" + assert result.type == ToolProviderType.MCP # Verify database state from extensions.ext_database import db @@ -386,7 +426,11 @@ class TestMCPToolManageService: ) # Create first provider - MCPToolManageService.create_mcp_provider( + from core.entities.mcp_provider import MCPConfiguration + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + service.create_provider( tenant_id=tenant.id, name="Test MCP Provider", server_url="https://example1.com/mcp", @@ -395,13 +439,15 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#FF6B6B", server_identifier="test_identifier_1", - timeout=30.0, - sse_read_timeout=300.0, + configuration=MCPConfiguration( + timeout=30.0, + sse_read_timeout=300.0, + ), ) # Act & Assert: Verify proper error handling for duplicate name with pytest.raises(ValueError, match="MCP tool Test MCP Provider already exists"): - MCPToolManageService.create_mcp_provider( + service.create_provider( tenant_id=tenant.id, name="Test MCP Provider", # Duplicate name server_url="https://example2.com/mcp", @@ -410,8 +456,10 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#4ECDC4", server_identifier="test_identifier_2", - timeout=45.0, - sse_read_timeout=400.0, + configuration=MCPConfiguration( + timeout=45.0, + sse_read_timeout=400.0, + ), ) def test_create_mcp_provider_duplicate_server_url( @@ -432,7 +480,11 @@ class TestMCPToolManageService: ) # Create first provider - MCPToolManageService.create_mcp_provider( + from core.entities.mcp_provider import MCPConfiguration + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + service.create_provider( tenant_id=tenant.id, name="Test MCP Provider 1", server_url="https://example.com/mcp", @@ -441,13 +493,15 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#FF6B6B", server_identifier="test_identifier_1", - timeout=30.0, - sse_read_timeout=300.0, + configuration=MCPConfiguration( + timeout=30.0, + sse_read_timeout=300.0, + ), ) # Act & Assert: Verify proper error handling for duplicate server URL - with pytest.raises(ValueError, match="MCP tool https://example.com/mcp already exists"): - MCPToolManageService.create_mcp_provider( + with pytest.raises(ValueError, match="MCP tool with this server URL already exists"): + service.create_provider( tenant_id=tenant.id, name="Test MCP Provider 2", server_url="https://example.com/mcp", # Duplicate URL @@ -456,8 +510,10 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#4ECDC4", server_identifier="test_identifier_2", - timeout=45.0, - sse_read_timeout=400.0, + configuration=MCPConfiguration( + timeout=45.0, + sse_read_timeout=400.0, + ), ) def test_create_mcp_provider_duplicate_server_identifier( @@ -478,7 +534,11 @@ class TestMCPToolManageService: ) # Create first provider - MCPToolManageService.create_mcp_provider( + from core.entities.mcp_provider import MCPConfiguration + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + service.create_provider( tenant_id=tenant.id, name="Test MCP Provider 1", server_url="https://example1.com/mcp", @@ -487,13 +547,15 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#FF6B6B", server_identifier="test_identifier_123", - timeout=30.0, - sse_read_timeout=300.0, + configuration=MCPConfiguration( + timeout=30.0, + sse_read_timeout=300.0, + ), ) # Act & Assert: Verify proper error handling for duplicate server identifier with pytest.raises(ValueError, match="MCP tool test_identifier_123 already exists"): - MCPToolManageService.create_mcp_provider( + service.create_provider( tenant_id=tenant.id, name="Test MCP Provider 2", server_url="https://example2.com/mcp", @@ -502,8 +564,10 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#4ECDC4", server_identifier="test_identifier_123", # Duplicate identifier - timeout=45.0, - sse_read_timeout=400.0, + configuration=MCPConfiguration( + timeout=45.0, + sse_read_timeout=400.0, + ), ) def test_retrieve_mcp_tools_success(self, db_session_with_containers, mock_external_service_dependencies): @@ -543,23 +607,59 @@ class TestMCPToolManageService: db.session.commit() # Setup mock for transformation service + from core.tools.entities.api_entities import ToolProviderApiEntity + from core.tools.entities.common_entities import I18nObject + mock_external_service_dependencies["tool_transform_service"].mcp_provider_to_user_provider.side_effect = [ - {"id": provider1.id, "name": provider1.name, "type": ToolProviderType.MCP}, - {"id": provider2.id, "name": provider2.name, "type": ToolProviderType.MCP}, - {"id": provider3.id, "name": provider3.name, "type": ToolProviderType.MCP}, + ToolProviderApiEntity( + id=provider1.id, + author=account.name, + name=provider1.name, + type=ToolProviderType.MCP, + description=I18nObject(en_US="Alpha Provider Description", zh_Hans="Alpha提供者描述"), + icon={"type": "emoji", "content": "🅰️"}, + label=I18nObject(en_US=provider1.name, zh_Hans=provider1.name), + labels=[], + tools=[], + ), + ToolProviderApiEntity( + id=provider2.id, + author=account.name, + name=provider2.name, + type=ToolProviderType.MCP, + description=I18nObject(en_US="Beta Provider Description", zh_Hans="Beta提供者描述"), + icon={"type": "emoji", "content": "🅱️"}, + label=I18nObject(en_US=provider2.name, zh_Hans=provider2.name), + labels=[], + tools=[], + ), + ToolProviderApiEntity( + id=provider3.id, + author=account.name, + name=provider3.name, + type=ToolProviderType.MCP, + description=I18nObject(en_US="Gamma Provider Description", zh_Hans="Gamma提供者描述"), + icon={"type": "emoji", "content": "Γ"}, + label=I18nObject(en_US=provider3.name, zh_Hans=provider3.name), + labels=[], + tools=[], + ), ] # Act: Execute the method under test - result = MCPToolManageService.retrieve_mcp_tools(tenant.id, for_list=True) + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result = service.list_providers(tenant_id=tenant.id, for_list=True) # Assert: Verify the expected outcomes assert result is not None assert len(result) == 3 # Verify correct ordering by name - assert result[0]["name"] == "Alpha Provider" - assert result[1]["name"] == "Beta Provider" - assert result[2]["name"] == "Gamma Provider" + assert result[0].name == "Alpha Provider" + assert result[1].name == "Beta Provider" + assert result[2].name == "Gamma Provider" # Verify mock interactions assert ( @@ -584,7 +684,10 @@ class TestMCPToolManageService: # No MCP providers created for this tenant # Act: Execute the method under test - result = MCPToolManageService.retrieve_mcp_tools(tenant.id, for_list=False) + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result = service.list_providers(tenant_id=tenant.id, for_list=False) # Assert: Verify the expected outcomes assert result is not None @@ -624,20 +727,46 @@ class TestMCPToolManageService: ) # Setup mock for transformation service + from core.tools.entities.api_entities import ToolProviderApiEntity + from core.tools.entities.common_entities import I18nObject + mock_external_service_dependencies["tool_transform_service"].mcp_provider_to_user_provider.side_effect = [ - {"id": provider1.id, "name": provider1.name, "type": ToolProviderType.MCP}, - {"id": provider2.id, "name": provider2.name, "type": ToolProviderType.MCP}, + ToolProviderApiEntity( + id=provider1.id, + author=account1.name, + name=provider1.name, + type=ToolProviderType.MCP, + description=I18nObject(en_US="Provider 1 Description", zh_Hans="提供者1描述"), + icon={"type": "emoji", "content": "1️⃣"}, + label=I18nObject(en_US=provider1.name, zh_Hans=provider1.name), + labels=[], + tools=[], + ), + ToolProviderApiEntity( + id=provider2.id, + author=account2.name, + name=provider2.name, + type=ToolProviderType.MCP, + description=I18nObject(en_US="Provider 2 Description", zh_Hans="提供者2描述"), + icon={"type": "emoji", "content": "2️⃣"}, + label=I18nObject(en_US=provider2.name, zh_Hans=provider2.name), + labels=[], + tools=[], + ), ] # Act: Execute the method under test for both tenants - result1 = MCPToolManageService.retrieve_mcp_tools(tenant1.id, for_list=True) - result2 = MCPToolManageService.retrieve_mcp_tools(tenant2.id, for_list=True) + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result1 = service.list_providers(tenant_id=tenant1.id, for_list=True) + result2 = service.list_providers(tenant_id=tenant2.id, for_list=True) # Assert: Verify tenant isolation assert len(result1) == 1 assert len(result2) == 1 - assert result1[0]["id"] == provider1.id - assert result2[0]["id"] == provider2.id + assert result1[0].id == provider1.id + assert result2[0].id == provider2.id def test_list_mcp_tool_from_remote_server_success( self, db_session_with_containers, mock_external_service_dependencies @@ -661,17 +790,20 @@ class TestMCPToolManageService: mcp_provider = self._create_test_mcp_provider( db_session_with_containers, mock_external_service_dependencies, tenant.id, account.id ) - mcp_provider.server_url = "encrypted_server_url" - mcp_provider.authed = False + # Use a valid base64 encoded string to avoid decryption errors + import base64 + + mcp_provider.server_url = base64.b64encode(b"encrypted_server_url").decode() + mcp_provider.authed = True # Provider must be authenticated to list tools mcp_provider.tools = "[]" from extensions.ext_database import db db.session.commit() - # Mock the decrypted_server_url property to avoid encryption issues - with patch("models.tools.encrypter") as mock_encrypter: - mock_encrypter.decrypt_token.return_value = "https://example.com/mcp" + # Mock the decryption process at the rsa level to avoid key file issues + with patch("libs.rsa.decrypt") as mock_decrypt: + mock_decrypt.return_value = "https://example.com/mcp" # Mock MCPClient and its context manager mock_tools = [ @@ -683,13 +815,16 @@ class TestMCPToolManageService: )(), ] - with patch("services.tools.mcp_tools_manage_service.MCPClient") as mock_mcp_client: + with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: # Setup mock client mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.return_value = mock_tools # Act: Execute the method under test - result = MCPToolManageService.list_mcp_tool_from_remote_server(tenant.id, mcp_provider.id) + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result = service.list_provider_tools(tenant_id=tenant.id, provider_id=mcp_provider.id) # Assert: Verify the expected outcomes assert result is not None @@ -705,16 +840,8 @@ class TestMCPToolManageService: assert mcp_provider.updated_at is not None # Verify mock interactions - mock_mcp_client.assert_called_once_with( - "https://example.com/mcp", - mcp_provider.id, - tenant.id, - authed=False, - for_list=True, - headers={}, - timeout=30.0, - sse_read_timeout=300.0, - ) + # MCPClientWithAuthRetry is called with different parameters + mock_mcp_client.assert_called_once() def test_list_mcp_tool_from_remote_server_auth_error( self, db_session_with_containers, mock_external_service_dependencies @@ -737,7 +864,10 @@ class TestMCPToolManageService: mcp_provider = self._create_test_mcp_provider( db_session_with_containers, mock_external_service_dependencies, tenant.id, account.id ) - mcp_provider.server_url = "encrypted_server_url" + # Use a valid base64 encoded string to avoid decryption errors + import base64 + + mcp_provider.server_url = base64.b64encode(b"encrypted_server_url").decode() mcp_provider.authed = False mcp_provider.tools = "[]" @@ -745,20 +875,23 @@ class TestMCPToolManageService: db.session.commit() - # Mock the decrypted_server_url property to avoid encryption issues - with patch("models.tools.encrypter") as mock_encrypter: - mock_encrypter.decrypt_token.return_value = "https://example.com/mcp" + # Mock the decryption process at the rsa level to avoid key file issues + with patch("libs.rsa.decrypt") as mock_decrypt: + mock_decrypt.return_value = "https://example.com/mcp" # Mock MCPClient to raise authentication error - with patch("services.tools.mcp_tools_manage_service.MCPClient") as mock_mcp_client: + with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: from core.mcp.error import MCPAuthError mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.side_effect = MCPAuthError("Authentication required") # Act & Assert: Verify proper error handling + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="Please auth the tool first"): - MCPToolManageService.list_mcp_tool_from_remote_server(tenant.id, mcp_provider.id) + service.list_provider_tools(tenant_id=tenant.id, provider_id=mcp_provider.id) # Verify database state was not changed db.session.refresh(mcp_provider) @@ -786,32 +919,38 @@ class TestMCPToolManageService: mcp_provider = self._create_test_mcp_provider( db_session_with_containers, mock_external_service_dependencies, tenant.id, account.id ) - mcp_provider.server_url = "encrypted_server_url" - mcp_provider.authed = False + # Use a valid base64 encoded string to avoid decryption errors + import base64 + + mcp_provider.server_url = base64.b64encode(b"encrypted_server_url").decode() + mcp_provider.authed = True # Provider must be authenticated to test connection errors mcp_provider.tools = "[]" from extensions.ext_database import db db.session.commit() - # Mock the decrypted_server_url property to avoid encryption issues - with patch("models.tools.encrypter") as mock_encrypter: - mock_encrypter.decrypt_token.return_value = "https://example.com/mcp" + # Mock the decryption process at the rsa level to avoid key file issues + with patch("libs.rsa.decrypt") as mock_decrypt: + mock_decrypt.return_value = "https://example.com/mcp" # Mock MCPClient to raise connection error - with patch("services.tools.mcp_tools_manage_service.MCPClient") as mock_mcp_client: + with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: from core.mcp.error import MCPError mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.side_effect = MCPError("Connection failed") # Act & Assert: Verify proper error handling + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="Failed to connect to MCP server: Connection failed"): - MCPToolManageService.list_mcp_tool_from_remote_server(tenant.id, mcp_provider.id) + service.list_provider_tools(tenant_id=tenant.id, provider_id=mcp_provider.id) # Verify database state was not changed db.session.refresh(mcp_provider) - assert mcp_provider.authed is False + assert mcp_provider.authed is True # Provider remains authenticated assert mcp_provider.tools == "[]" def test_delete_mcp_tool_success(self, db_session_with_containers, mock_external_service_dependencies): @@ -840,7 +979,8 @@ class TestMCPToolManageService: assert db.session.query(MCPToolProvider).filter_by(id=mcp_provider.id).first() is not None # Act: Execute the method under test - MCPToolManageService.delete_mcp_tool(tenant.id, mcp_provider.id) + service = MCPToolManageService(db.session()) + service.delete_provider(tenant_id=tenant.id, provider_id=mcp_provider.id) # Assert: Verify the expected outcomes # Provider should be deleted from database @@ -862,11 +1002,14 @@ class TestMCPToolManageService: db_session_with_containers, mock_external_service_dependencies ) - non_existent_id = fake.uuid4() + non_existent_id = str(fake.uuid4()) # Act & Assert: Verify proper error handling + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="MCP tool not found"): - MCPToolManageService.delete_mcp_tool(tenant.id, non_existent_id) + service.delete_provider(tenant_id=tenant.id, provider_id=non_existent_id) def test_delete_mcp_tool_tenant_isolation(self, db_session_with_containers, mock_external_service_dependencies): """ @@ -893,8 +1036,11 @@ class TestMCPToolManageService: ) # Act & Assert: Verify tenant isolation + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="MCP tool not found"): - MCPToolManageService.delete_mcp_tool(tenant2.id, mcp_provider1.id) + service.delete_provider(tenant_id=tenant2.id, provider_id=mcp_provider1.id) # Verify provider still exists in tenant1 from extensions.ext_database import db @@ -929,7 +1075,10 @@ class TestMCPToolManageService: db.session.commit() # Act: Execute the method under test - MCPToolManageService.update_mcp_provider( + from core.entities.mcp_provider import MCPConfiguration + + service = MCPToolManageService(db.session()) + service.update_provider( tenant_id=tenant.id, provider_id=mcp_provider.id, name="Updated MCP Provider", @@ -938,8 +1087,10 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#4ECDC4", server_identifier="updated_identifier_123", - timeout=45.0, - sse_read_timeout=400.0, + configuration=MCPConfiguration( + timeout=45.0, + sse_read_timeout=400.0, + ), ) # Assert: Verify the expected outcomes @@ -953,70 +1104,10 @@ class TestMCPToolManageService: # Verify icon was updated import json - icon_data = json.loads(mcp_provider.icon) + icon_data = json.loads(mcp_provider.icon or "{}") assert icon_data["content"] == "🚀" assert icon_data["background"] == "#4ECDC4" - def test_update_mcp_provider_with_server_url_change( - self, db_session_with_containers, mock_external_service_dependencies - ): - """ - Test successful update of MCP provider with server URL change. - - This test verifies: - - Proper handling of server URL changes - - Correct reconnection logic - - Database state updates - - External service integration - """ - # Arrange: Create test data - fake = Faker() - account, tenant = self._create_test_account_and_tenant( - db_session_with_containers, mock_external_service_dependencies - ) - - # Create MCP provider - mcp_provider = self._create_test_mcp_provider( - db_session_with_containers, mock_external_service_dependencies, tenant.id, account.id - ) - - from extensions.ext_database import db - - db.session.commit() - - # Mock the reconnection method - with patch.object(MCPToolManageService, "_re_connect_mcp_provider") as mock_reconnect: - mock_reconnect.return_value = { - "authed": True, - "tools": '[{"name": "test_tool"}]', - "encrypted_credentials": "{}", - } - - # Act: Execute the method under test - MCPToolManageService.update_mcp_provider( - tenant_id=tenant.id, - provider_id=mcp_provider.id, - name="Updated MCP Provider", - server_url="https://new-example.com/mcp", - icon="🚀", - icon_type="emoji", - icon_background="#4ECDC4", - server_identifier="updated_identifier_123", - timeout=45.0, - sse_read_timeout=400.0, - ) - - # Assert: Verify the expected outcomes - db.session.refresh(mcp_provider) - assert mcp_provider.name == "Updated MCP Provider" - assert mcp_provider.server_identifier == "updated_identifier_123" - assert mcp_provider.timeout == 45.0 - assert mcp_provider.sse_read_timeout == 400.0 - assert mcp_provider.updated_at is not None - - # Verify reconnection was called - mock_reconnect.assert_called_once_with("https://new-example.com/mcp", mcp_provider.id, tenant.id) - def test_update_mcp_provider_duplicate_name(self, db_session_with_containers, mock_external_service_dependencies): """ Test error handling when updating MCP provider with duplicate name. @@ -1048,8 +1139,12 @@ class TestMCPToolManageService: db.session.commit() # Act & Assert: Verify proper error handling for duplicate name + from core.entities.mcp_provider import MCPConfiguration + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="MCP tool First Provider already exists"): - MCPToolManageService.update_mcp_provider( + service.update_provider( tenant_id=tenant.id, provider_id=provider2.id, name="First Provider", # Duplicate name @@ -1058,8 +1153,10 @@ class TestMCPToolManageService: icon_type="emoji", icon_background="#4ECDC4", server_identifier="unique_identifier", - timeout=45.0, - sse_read_timeout=400.0, + configuration=MCPConfiguration( + timeout=45.0, + sse_read_timeout=400.0, + ), ) def test_update_mcp_provider_credentials_success( @@ -1094,19 +1191,25 @@ class TestMCPToolManageService: # Mock the provider controller and encryption with ( - patch("services.tools.mcp_tools_manage_service.MCPToolProviderController") as mock_controller, - patch("services.tools.mcp_tools_manage_service.ProviderConfigEncrypter") as mock_encrypter, + patch("core.tools.mcp_tool.provider.MCPToolProviderController") as mock_controller, + patch("core.tools.utils.encryption.ProviderConfigEncrypter") as mock_encrypter, ): # Setup mocks - mock_controller_instance = mock_controller._from_db.return_value + mock_controller_instance = mock_controller.from_db.return_value mock_controller_instance.get_credentials_schema.return_value = [] mock_encrypter_instance = mock_encrypter.return_value mock_encrypter_instance.encrypt.return_value = {"new_key": "encrypted_value"} # Act: Execute the method under test - MCPToolManageService.update_mcp_provider_credentials( - mcp_provider=mcp_provider, credentials={"new_key": "new_value"}, authed=True + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + service.update_provider_credentials( + provider_id=mcp_provider.id, + tenant_id=tenant.id, + credentials={"new_key": "new_value"}, + authed=True, ) # Assert: Verify the expected outcomes @@ -1117,7 +1220,7 @@ class TestMCPToolManageService: # Verify credentials were encrypted and merged import json - credentials = json.loads(mcp_provider.encrypted_credentials) + credentials = json.loads(mcp_provider.encrypted_credentials or "{}") assert "existing_key" in credentials assert "new_key" in credentials @@ -1152,19 +1255,25 @@ class TestMCPToolManageService: # Mock the provider controller and encryption with ( - patch("services.tools.mcp_tools_manage_service.MCPToolProviderController") as mock_controller, - patch("services.tools.mcp_tools_manage_service.ProviderConfigEncrypter") as mock_encrypter, + patch("core.tools.mcp_tool.provider.MCPToolProviderController") as mock_controller, + patch("core.tools.utils.encryption.ProviderConfigEncrypter") as mock_encrypter, ): # Setup mocks - mock_controller_instance = mock_controller._from_db.return_value + mock_controller_instance = mock_controller.from_db.return_value mock_controller_instance.get_credentials_schema.return_value = [] mock_encrypter_instance = mock_encrypter.return_value mock_encrypter_instance.encrypt.return_value = {"new_key": "encrypted_value"} # Act: Execute the method under test - MCPToolManageService.update_mcp_provider_credentials( - mcp_provider=mcp_provider, credentials={"new_key": "new_value"}, authed=False + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + service.update_provider_credentials( + provider_id=mcp_provider.id, + tenant_id=tenant.id, + credentials={"new_key": "new_value"}, + authed=False, ) # Assert: Verify the expected outcomes @@ -1199,41 +1308,37 @@ class TestMCPToolManageService: type("MockTool", (), {"model_dump": lambda self: {"name": "test_tool_2", "description": "Test tool 2"}})(), ] - with patch("services.tools.mcp_tools_manage_service.MCPClient") as mock_mcp_client: + with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: # Setup mock client mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.return_value = mock_tools # Act: Execute the method under test - result = MCPToolManageService._re_connect_mcp_provider( - "https://example.com/mcp", mcp_provider.id, tenant.id + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result = service._reconnect_provider( + server_url="https://example.com/mcp", + provider=mcp_provider, ) # Assert: Verify the expected outcomes assert result is not None - assert result["authed"] is True - assert result["tools"] is not None - assert result["encrypted_credentials"] == "{}" + assert result.authed is True + assert result.tools is not None + assert result.encrypted_credentials == "{}" # Verify tools were properly serialized import json - tools_data = json.loads(result["tools"]) + tools_data = json.loads(result.tools) assert len(tools_data) == 2 assert tools_data[0]["name"] == "test_tool_1" assert tools_data[1]["name"] == "test_tool_2" # Verify mock interactions - mock_mcp_client.assert_called_once_with( - "https://example.com/mcp", - mcp_provider.id, - tenant.id, - authed=False, - for_list=True, - headers={}, - timeout=30.0, - sse_read_timeout=300.0, - ) + provider_entity = mcp_provider.to_entity() + mock_mcp_client.assert_called_once() def test_re_connect_mcp_provider_auth_error(self, db_session_with_containers, mock_external_service_dependencies): """ @@ -1256,22 +1361,26 @@ class TestMCPToolManageService: ) # Mock MCPClient to raise authentication error - with patch("services.tools.mcp_tools_manage_service.MCPClient") as mock_mcp_client: + with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: from core.mcp.error import MCPAuthError mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.side_effect = MCPAuthError("Authentication required") # Act: Execute the method under test - result = MCPToolManageService._re_connect_mcp_provider( - "https://example.com/mcp", mcp_provider.id, tenant.id + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) + result = service._reconnect_provider( + server_url="https://example.com/mcp", + provider=mcp_provider, ) # Assert: Verify the expected outcomes assert result is not None - assert result["authed"] is False - assert result["tools"] == "[]" - assert result["encrypted_credentials"] == "{}" + assert result.authed is False + assert result.tools == "[]" + assert result.encrypted_credentials == "{}" def test_re_connect_mcp_provider_connection_error( self, db_session_with_containers, mock_external_service_dependencies @@ -1295,12 +1404,18 @@ class TestMCPToolManageService: ) # Mock MCPClient to raise connection error - with patch("services.tools.mcp_tools_manage_service.MCPClient") as mock_mcp_client: + with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: from core.mcp.error import MCPError mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.side_effect = MCPError("Connection failed") # Act & Assert: Verify proper error handling + from extensions.ext_database import db + + service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="Failed to re-connect MCP server: Connection failed"): - MCPToolManageService._re_connect_mcp_provider("https://example.com/mcp", mcp_provider.id, tenant.id) + service._reconnect_provider( + server_url="https://example.com/mcp", + provider=mcp_provider, + ) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py b/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py index 827f9c010e..e2c616420f 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py @@ -6,6 +6,7 @@ from faker import Faker from core.tools.entities.api_entities import ToolProviderApiEntity from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ToolProviderType +from libs.uuid_utils import uuidv7 from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider from services.tools.tools_transform_service import ToolTransformService @@ -66,6 +67,7 @@ class TestToolTransformService: ) elif provider_type == "workflow": provider = WorkflowToolProvider( + id=str(uuidv7()), name=fake.company(), description=fake.text(max_nb_chars=100), icon='{"background": "#FF6B6B", "content": "🔧"}', @@ -168,7 +170,7 @@ class TestToolTransformService: """ # Arrange: Setup test data fake = Faker() - provider_type = ToolProviderType.BUILT_IN.value + provider_type = ToolProviderType.BUILT_IN provider_name = fake.company() icon = "🔧" @@ -206,7 +208,7 @@ class TestToolTransformService: """ # Arrange: Setup test data fake = Faker() - provider_type = ToolProviderType.API.value + provider_type = ToolProviderType.API provider_name = fake.company() icon = '{"background": "#FF6B6B", "content": "🔧"}' @@ -231,7 +233,7 @@ class TestToolTransformService: """ # Arrange: Setup test data with invalid JSON fake = Faker() - provider_type = ToolProviderType.API.value + provider_type = ToolProviderType.API provider_name = fake.company() icon = '{"invalid": json}' @@ -257,7 +259,7 @@ class TestToolTransformService: """ # Arrange: Setup test data fake = Faker() - provider_type = ToolProviderType.WORKFLOW.value + provider_type = ToolProviderType.WORKFLOW provider_name = fake.company() icon = {"background": "#FF6B6B", "content": "🔧"} @@ -282,7 +284,7 @@ class TestToolTransformService: """ # Arrange: Setup test data fake = Faker() - provider_type = ToolProviderType.MCP.value + provider_type = ToolProviderType.MCP provider_name = fake.company() icon = {"background": "#FF6B6B", "content": "🔧"} @@ -329,7 +331,7 @@ class TestToolTransformService: # Arrange: Setup test data fake = Faker() tenant_id = fake.uuid4() - provider = {"type": ToolProviderType.BUILT_IN.value, "name": fake.company(), "icon": "🔧"} + provider = {"type": ToolProviderType.BUILT_IN, "name": fake.company(), "icon": "🔧"} # Act: Execute the method under test ToolTransformService.repack_provider(tenant_id, provider) @@ -758,6 +760,7 @@ class TestToolTransformService: # Create workflow tool provider provider = WorkflowToolProvider( + id=str(uuidv7()), name=fake.company(), description=fake.text(max_nb_chars=100), icon='{"background": "#FF6B6B", "content": "🔧"}', diff --git a/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py b/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py index 18ab4bb73c..2c5e719a58 100644 --- a/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py +++ b/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py @@ -15,7 +15,7 @@ from core.app.app_config.entities import ( ) from core.model_runtime.entities.llm_entities import LLMMode from core.prompt.utils.prompt_template_parser import PromptTemplateParser -from models.account import Account, Tenant +from models import Account, Tenant from models.api_based_extension import APIBasedExtension from models.model import App, AppMode, AppModelConfig from models.workflow import Workflow @@ -66,7 +66,7 @@ class TestWorkflowConverter: mock_config.model = ModelConfigEntity( provider="openai", model="gpt-4", - mode=LLMMode.CHAT.value, + mode=LLMMode.CHAT, parameters={}, stop=[], ) @@ -120,7 +120,7 @@ class TestWorkflowConverter: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -150,7 +150,7 @@ class TestWorkflowConverter: app = App( tenant_id=tenant.id, name=fake.company(), - mode=AppMode.CHAT.value, + mode=AppMode.CHAT, icon_type="emoji", icon="🤖", icon_background="#FF6B6B", @@ -218,7 +218,7 @@ class TestWorkflowConverter: # Assert: Verify the expected outcomes assert new_app is not None assert new_app.name == "Test Workflow App" - assert new_app.mode == AppMode.ADVANCED_CHAT.value + assert new_app.mode == AppMode.ADVANCED_CHAT assert new_app.icon_type == "emoji" assert new_app.icon == "🚀" assert new_app.icon_background == "#4CAF50" @@ -257,7 +257,7 @@ class TestWorkflowConverter: app = App( tenant_id=tenant.id, name=fake.company(), - mode=AppMode.CHAT.value, + mode=AppMode.CHAT, icon_type="emoji", icon="🤖", icon_background="#FF6B6B", @@ -522,7 +522,7 @@ class TestWorkflowConverter: model_config = ModelConfigEntity( provider="openai", model="gpt-4", - mode=LLMMode.CHAT.value, + mode=LLMMode.CHAT, parameters={"temperature": 0.7}, stop=[], ) diff --git a/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py index 4600f2addb..68e485107c 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py @@ -6,7 +6,7 @@ from faker import Faker from core.rag.index_processor.constant.index_type import IndexType from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, DatasetAutoDisableLog, Document, DocumentSegment from tasks.add_document_to_index_task import add_document_to_index_task @@ -63,7 +63,7 @@ class TestAddDocumentToIndexTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py b/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py index 3d17a8ac9d..f94c5b19e6 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py @@ -14,7 +14,7 @@ from faker import Faker from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from models.model import UploadFile from tasks.batch_clean_document_task import batch_clean_document_task @@ -84,7 +84,7 @@ class TestBatchCleanDocumentTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py index fcae93c669..1b844d6357 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py @@ -18,7 +18,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from models.enums import CreatorUserRole from models.model import UploadFile @@ -112,7 +112,7 @@ class TestBatchCreateSegmentToIndexTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py index e0c2da63b9..45eb9d4f78 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py @@ -17,7 +17,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import ( AppDatasetJoin, Dataset, @@ -784,133 +784,6 @@ class TestCleanDatasetTask: print(f"Total cleanup time: {cleanup_duration:.3f} seconds") print(f"Average time per document: {cleanup_duration / len(documents):.3f} seconds") - def test_clean_dataset_task_concurrent_cleanup_scenarios( - self, db_session_with_containers, mock_external_service_dependencies - ): - """ - Test dataset cleanup with concurrent cleanup scenarios and race conditions. - - This test verifies that the task can properly: - 1. Handle multiple cleanup operations on the same dataset - 2. Prevent data corruption during concurrent access - 3. Maintain data consistency across multiple cleanup attempts - 4. Handle race conditions gracefully - 5. Ensure idempotent cleanup operations - """ - # Create test data - account, tenant = self._create_test_account_and_tenant(db_session_with_containers) - dataset = self._create_test_dataset(db_session_with_containers, account, tenant) - document = self._create_test_document(db_session_with_containers, account, tenant, dataset) - segment = self._create_test_segment(db_session_with_containers, account, tenant, dataset, document) - upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant) - - # Update document with file reference - import json - - document.data_source_info = json.dumps({"upload_file_id": upload_file.id}) - from extensions.ext_database import db - - db.session.commit() - - # Save IDs for verification - dataset_id = dataset.id - tenant_id = tenant.id - upload_file_id = upload_file.id - - # Mock storage to simulate slow operations - mock_storage = mock_external_service_dependencies["storage"] - original_delete = mock_storage.delete - - def slow_delete(key): - import time - - time.sleep(0.1) # Simulate slow storage operation - return original_delete(key) - - mock_storage.delete.side_effect = slow_delete - - # Execute multiple cleanup operations concurrently - import threading - - cleanup_results = [] - cleanup_errors = [] - - def run_cleanup(): - try: - clean_dataset_task( - dataset_id=dataset_id, - tenant_id=tenant_id, - indexing_technique="high_quality", - index_struct='{"type": "paragraph"}', - collection_binding_id=str(uuid.uuid4()), - doc_form="paragraph_index", - ) - cleanup_results.append("success") - except Exception as e: - cleanup_errors.append(str(e)) - - # Start multiple cleanup threads - threads = [] - for i in range(3): - thread = threading.Thread(target=run_cleanup) - threads.append(thread) - thread.start() - - # Wait for all threads to complete - for thread in threads: - thread.join() - - # Verify results - # Check that all documents were deleted (only once) - remaining_documents = db.session.query(Document).filter_by(dataset_id=dataset_id).all() - assert len(remaining_documents) == 0 - - # Check that all segments were deleted (only once) - remaining_segments = db.session.query(DocumentSegment).filter_by(dataset_id=dataset_id).all() - assert len(remaining_segments) == 0 - - # Check that upload file was deleted (only once) - # Note: In concurrent scenarios, the first thread deletes documents and segments, - # subsequent threads may not find the related data to clean up upload files - # This demonstrates the idempotent nature of the cleanup process - remaining_files = db.session.query(UploadFile).filter_by(id=upload_file_id).all() - # The upload file should be deleted by the first successful cleanup operation - # However, in concurrent scenarios, this may not always happen due to race conditions - # This test demonstrates the idempotent nature of the cleanup process - if len(remaining_files) > 0: - print(f"Warning: Upload file {upload_file_id} was not deleted in concurrent scenario") - print("This is expected behavior demonstrating the idempotent nature of cleanup") - # We don't assert here as the behavior depends on timing and race conditions - - # Verify that storage.delete was called (may be called multiple times in concurrent scenarios) - # In concurrent scenarios, storage operations may be called multiple times due to race conditions - assert mock_storage.delete.call_count > 0 - - # Verify that index processor was called (may be called multiple times in concurrent scenarios) - mock_index_processor = mock_external_service_dependencies["index_processor"] - assert mock_index_processor.clean.call_count > 0 - - # Check cleanup results - assert len(cleanup_results) == 3, "All cleanup operations should complete" - assert len(cleanup_errors) == 0, "No cleanup errors should occur" - - # Verify idempotency by running cleanup again on the same dataset - # This should not perform any additional operations since data is already cleaned - clean_dataset_task( - dataset_id=dataset_id, - tenant_id=tenant_id, - indexing_technique="high_quality", - index_struct='{"type": "paragraph"}', - collection_binding_id=str(uuid.uuid4()), - doc_form="paragraph_index", - ) - - # Verify that no additional storage operations were performed - # Note: In concurrent scenarios, the exact count may vary due to race conditions - print(f"Final storage delete calls: {mock_storage.delete.call_count}") - print(f"Final index processor calls: {mock_index_processor.clean.call_count}") - print("Note: Multiple calls in concurrent scenarios are expected due to race conditions") - def test_clean_dataset_task_storage_exception_handling( self, db_session_with_containers, mock_external_service_dependencies ): diff --git a/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py index de81295100..8004175b2d 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py @@ -13,7 +13,7 @@ import pytest from faker import Faker from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from tasks.create_segment_to_index_task import create_segment_to_index_task @@ -91,7 +91,7 @@ class TestCreateSegmentToIndexTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_delete_segment_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_delete_segment_from_index_task.py index 7af4f238be..94e9b76965 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_delete_segment_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_delete_segment_from_index_task.py @@ -48,11 +48,8 @@ class TestDeleteSegmentFromIndexTask: Tenant: Created test tenant instance """ fake = fake or Faker() - tenant = Tenant() + tenant = Tenant(name=f"Test Tenant {fake.company()}", plan="basic", status="active") tenant.id = fake.uuid4() - tenant.name = f"Test Tenant {fake.company()}" - tenant.plan = "basic" - tenant.status = "active" tenant.created_at = fake.date_time_this_year() tenant.updated_at = tenant.created_at @@ -73,16 +70,14 @@ class TestDeleteSegmentFromIndexTask: Account: Created test account instance """ fake = fake or Faker() - account = Account() + account = Account( + name=fake.name(), + email=fake.email(), + avatar=fake.url(), + status="active", + interface_language="en-US", + ) account.id = fake.uuid4() - account.email = fake.email() - account.name = fake.name() - account.avatar_url = fake.url() - account.tenant_id = tenant.id - account.status = "active" - account.type = "normal" - account.role = "owner" - account.interface_language = "en-US" account.created_at = fake.date_time_this_year() account.updated_at = account.created_at diff --git a/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py index e1d63e993b..8785c948d1 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py @@ -16,7 +16,7 @@ from faker import Faker from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from tasks.disable_segment_from_index_task import disable_segment_from_index_task @@ -69,7 +69,7 @@ class TestDisableSegmentFromIndexTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py index 5fdb8c617c..0b36e0914a 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py @@ -43,27 +43,30 @@ class TestDisableSegmentsFromIndexTask: Account: Created test account instance """ fake = fake or Faker() - account = Account() + account = Account( + email=fake.email(), + name=fake.name(), + avatar=fake.url(), + status="active", + interface_language="en-US", + ) account.id = fake.uuid4() - account.email = fake.email() - account.name = fake.name() - account.avatar_url = fake.url() + # monkey-patch attributes for test setup account.tenant_id = fake.uuid4() - account.status = "active" account.type = "normal" account.role = "owner" - account.interface_language = "en-US" account.created_at = fake.date_time_this_year() account.updated_at = account.created_at # Create a tenant for the account from models.account import Tenant - tenant = Tenant() + tenant = Tenant( + name=f"Test Tenant {fake.company()}", + plan="basic", + status="active", + ) tenant.id = account.tenant_id - tenant.name = f"Test Tenant {fake.company()}" - tenant.plan = "basic" - tenant.status = "active" tenant.created_at = fake.date_time_this_year() tenant.updated_at = tenant.created_at @@ -91,20 +94,21 @@ class TestDisableSegmentsFromIndexTask: Dataset: Created test dataset instance """ fake = fake or Faker() - dataset = Dataset() - dataset.id = fake.uuid4() - dataset.tenant_id = account.tenant_id - dataset.name = f"Test Dataset {fake.word()}" - dataset.description = fake.text(max_nb_chars=200) - dataset.provider = "vendor" - dataset.permission = "only_me" - dataset.data_source_type = "upload_file" - dataset.indexing_technique = "high_quality" - dataset.created_by = account.id - dataset.updated_by = account.id - dataset.embedding_model = "text-embedding-ada-002" - dataset.embedding_model_provider = "openai" - dataset.built_in_field_enabled = False + dataset = Dataset( + id=fake.uuid4(), + tenant_id=account.tenant_id, + name=f"Test Dataset {fake.word()}", + description=fake.text(max_nb_chars=200), + provider="vendor", + permission="only_me", + data_source_type="upload_file", + indexing_technique="high_quality", + created_by=account.id, + updated_by=account.id, + embedding_model="text-embedding-ada-002", + embedding_model_provider="openai", + built_in_field_enabled=False, + ) from extensions.ext_database import db @@ -128,6 +132,7 @@ class TestDisableSegmentsFromIndexTask: """ fake = fake or Faker() document = DatasetDocument() + document.id = fake.uuid4() document.tenant_id = dataset.tenant_id document.dataset_id = dataset.id @@ -153,7 +158,6 @@ class TestDisableSegmentsFromIndexTask: document.archived = False document.doc_form = "text_model" # Use text_model form for testing document.doc_language = "en" - from extensions.ext_database import db db.session.add(document) diff --git a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py index f75dcf06e1..448f6da5ec 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from extensions.ext_database import db -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document from tasks.document_indexing_task import document_indexing_task @@ -72,7 +72,7 @@ class TestDocumentIndexingTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -154,7 +154,7 @@ class TestDocumentIndexingTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py new file mode 100644 index 0000000000..798fe091ab --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py @@ -0,0 +1,450 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from core.rag.index_processor.constant.index_type import IndexType +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models.dataset import Dataset, Document, DocumentSegment +from tasks.enable_segments_to_index_task import enable_segments_to_index_task + + +class TestEnableSegmentsToIndexTask: + """Integration tests for enable_segments_to_index_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.enable_segments_to_index_task.IndexProcessorFactory") as mock_index_processor_factory, + ): + # Setup mock index processor + mock_processor = MagicMock() + mock_index_processor_factory.return_value.init_index_processor.return_value = mock_processor + + yield { + "index_processor_factory": mock_index_processor_factory, + "index_processor": mock_processor, + } + + def _create_test_dataset_and_document(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test dataset and document for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (dataset, document) - Created dataset and document instances + """ + fake = Faker() + + # Create account and tenant + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db.session.add(account) + db.session.commit() + + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + current=True, + ) + db.session.add(join) + db.session.commit() + + # Create dataset + dataset = Dataset( + id=fake.uuid4(), + tenant_id=tenant.id, + name=fake.company(), + description=fake.text(max_nb_chars=100), + data_source_type="upload_file", + indexing_technique="high_quality", + created_by=account.id, + ) + db.session.add(dataset) + db.session.commit() + + # Create document + document = Document( + id=fake.uuid4(), + tenant_id=tenant.id, + dataset_id=dataset.id, + position=1, + data_source_type="upload_file", + batch="test_batch", + name=fake.file_name(), + created_from="upload_file", + created_by=account.id, + indexing_status="completed", + enabled=True, + doc_form=IndexType.PARAGRAPH_INDEX, + ) + db.session.add(document) + db.session.commit() + + # Refresh dataset to ensure doc_form property works correctly + db.session.refresh(dataset) + + return dataset, document + + def _create_test_segments( + self, db_session_with_containers, document, dataset, count=3, enabled=False, status="completed" + ): + """ + Helper method to create test document segments. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + document: Document instance + dataset: Dataset instance + count: Number of segments to create + enabled: Whether segments should be enabled + status: Status of the segments + + Returns: + list: List of created DocumentSegment instances + """ + fake = Faker() + segments = [] + + for i in range(count): + text = fake.text(max_nb_chars=200) + segment = DocumentSegment( + id=fake.uuid4(), + tenant_id=document.tenant_id, + dataset_id=dataset.id, + document_id=document.id, + position=i, + content=text, + word_count=len(text.split()), + tokens=len(text.split()) * 2, + index_node_id=f"node_{i}", + index_node_hash=f"hash_{i}", + enabled=enabled, + status=status, + created_by=document.created_by, + ) + db.session.add(segment) + segments.append(segment) + + db.session.commit() + return segments + + def test_enable_segments_to_index_with_different_index_type( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test segments indexing with different index types. + + This test verifies: + - Proper handling of different index types + - Index processor factory integration + - Document processing with various configurations + - Redis cache key deletion + """ + # Arrange: Create test data with different index type + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Update document to use different index type + document.doc_form = IndexType.QA_INDEX + db.session.commit() + + # Refresh dataset to ensure doc_form property reflects the updated document + db.session.refresh(dataset) + + # Create segments + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify different index type handling + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.QA_INDEX) + mock_external_service_dependencies["index_processor"].load.assert_called_once() + + # Verify the load method was called with correct parameters + call_args = mock_external_service_dependencies["index_processor"].load.call_args + assert call_args is not None + documents = call_args[0][1] # Second argument should be documents list + assert len(documents) == 3 + + # Verify Redis cache keys were deleted + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 + + def test_enable_segments_to_index_dataset_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of non-existent dataset. + + This test verifies: + - Proper error handling for missing datasets + - Early return without processing + - Database session cleanup + - No unnecessary index processor calls + """ + # Arrange: Use non-existent dataset ID + fake = Faker() + non_existent_dataset_id = fake.uuid4() + non_existent_document_id = fake.uuid4() + segment_ids = [fake.uuid4()] + + # Act: Execute the task with non-existent dataset + enable_segments_to_index_task(segment_ids, non_existent_dataset_id, non_existent_document_id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_document_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of non-existent document. + + This test verifies: + - Proper error handling for missing documents + - Early return without processing + - Database session cleanup + - No unnecessary index processor calls + """ + # Arrange: Create dataset but use non-existent document ID + dataset, _ = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + fake = Faker() + non_existent_document_id = fake.uuid4() + segment_ids = [fake.uuid4()] + + # Act: Execute the task with non-existent document + enable_segments_to_index_task(segment_ids, dataset.id, non_existent_document_id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_invalid_document_status( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of document with invalid status. + + This test verifies: + - Early return when document is disabled, archived, or not completed + - No index processing for documents not ready for indexing + - Proper database session cleanup + - No unnecessary external service calls + """ + # Arrange: Create test data with invalid document status + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Test different invalid statuses + invalid_statuses = [ + ("disabled", {"enabled": False}), + ("archived", {"archived": True}), + ("not_completed", {"indexing_status": "processing"}), + ] + + for _, status_attrs in invalid_statuses: + # Reset document status + document.enabled = True + document.archived = False + document.indexing_status = "completed" + db.session.commit() + + # Set invalid status + for attr, value in status_attrs.items(): + setattr(document, attr, value) + db.session.commit() + + # Create segments + segments = self._create_test_segments(db_session_with_containers, document, dataset) + segment_ids = [segment.id for segment in segments] + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + # Clean up segments for next iteration + for segment in segments: + db.session.delete(segment) + db.session.commit() + + def test_enable_segments_to_index_segments_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling when no segments are found. + + This test verifies: + - Proper handling when segments don't exist + - Early return without processing + - Database session cleanup + - Index processor is created but load is not called + """ + # Arrange: Create test data + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Use non-existent segment IDs + fake = Faker() + non_existent_segment_ids = [fake.uuid4() for _ in range(3)] + + # Act: Execute the task with non-existent segments + enable_segments_to_index_task(non_existent_segment_ids, dataset.id, document.id) + + # Assert: Verify index processor was created but load was not called + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX) + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_with_parent_child_structure( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test segments indexing with parent-child structure. + + This test verifies: + - Proper handling of PARENT_CHILD_INDEX type + - Child document creation from segments + - Correct document structure for parent-child indexing + - Index processor receives properly structured documents + - Redis cache key deletion + """ + # Arrange: Create test data with parent-child index type + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Update document to use parent-child index type + document.doc_form = IndexType.PARENT_CHILD_INDEX + db.session.commit() + + # Refresh dataset to ensure doc_form property reflects the updated document + db.session.refresh(dataset) + + # Create segments with mock child chunks + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Mock the get_child_chunks method for each segment + with patch.object(DocumentSegment, "get_child_chunks") as mock_get_child_chunks: + # Setup mock to return child chunks for each segment + mock_child_chunks = [] + for i in range(2): # Each segment has 2 child chunks + mock_child = MagicMock() + mock_child.content = f"child_content_{i}" + mock_child.index_node_id = f"child_node_{i}" + mock_child.index_node_hash = f"child_hash_{i}" + mock_child_chunks.append(mock_child) + + mock_get_child_chunks.return_value = mock_child_chunks + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify parent-child index processing + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with( + IndexType.PARENT_CHILD_INDEX + ) + mock_external_service_dependencies["index_processor"].load.assert_called_once() + + # Verify the load method was called with correct parameters + call_args = mock_external_service_dependencies["index_processor"].load.call_args + assert call_args is not None + documents = call_args[0][1] # Second argument should be documents list + assert len(documents) == 3 # 3 segments + + # Verify each document has children + for doc in documents: + assert hasattr(doc, "children") + assert len(doc.children) == 2 # Each document has 2 children + + # Verify Redis cache keys were deleted + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 + + def test_enable_segments_to_index_general_exception_handling( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test general exception handling during indexing process. + + This test verifies: + - Exceptions are properly caught and handled + - Segment status is set to error + - Segments are disabled + - Error information is recorded + - Redis cache is still cleared + - Database session is properly closed + """ + # Arrange: Create test data + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Mock the index processor to raise an exception + mock_external_service_dependencies["index_processor"].load.side_effect = Exception("Index processing failed") + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify error handling + for segment in segments: + db.session.refresh(segment) + assert segment.enabled is False + assert segment.status == "error" + assert segment.error is not None + assert "Index processing failed" in segment.error + assert segment.disabled_at is not None + + # Verify Redis cache keys were still cleared despite error + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py new file mode 100644 index 0000000000..31e9b67421 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py @@ -0,0 +1,242 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from extensions.ext_database import db +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_account_deletion_task import send_account_deletion_verification_code, send_deletion_success_task + + +class TestMailAccountDeletionTask: + """Integration tests for mail account deletion tasks using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_account_deletion_task.mail") as mock_mail, + patch("tasks.mail_account_deletion_task.get_email_i18n_service") as mock_get_email_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email service + mock_email_service = MagicMock() + mock_get_email_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "get_email_service": mock_get_email_service, + "email_service": mock_email_service, + } + + def _create_test_account(self, db_session_with_containers): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + Account: Created account instance + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db.session.add(account) + db.session.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + current=True, + ) + db.session.add(join) + db.session.commit() + + return account + + def test_send_deletion_success_task_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful account deletion success email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls + - Template context is properly formatted + - Email type is correctly specified + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_language = "en-US" + + # Act: Execute the task + send_deletion_success_task(test_email, test_language) + + # Assert: Verify the expected outcomes + # Verify mail service was checked + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify email service was retrieved + mock_external_service_dependencies["get_email_service"].assert_called_once() + + # Verify email was sent with correct parameters + mock_external_service_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.ACCOUNT_DELETION_SUCCESS, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) + + def test_send_deletion_success_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion success email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Setup mail service to return not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + account = self._create_test_account(db_session_with_containers) + test_email = account.email + + # Act: Execute the task + send_deletion_success_task(test_email) + + # Assert: Verify no email service calls were made + mock_external_service_dependencies["get_email_service"].assert_not_called() + mock_external_service_dependencies["email_service"].send_email.assert_not_called() + + def test_send_deletion_success_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion success email when email service raises exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + - Error logging is recorded + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_service"].send_email.side_effect = Exception("Email service failed") + account = self._create_test_account(db_session_with_containers) + test_email = account.email + + # Act: Execute the task (should not raise exception) + send_deletion_success_task(test_email) + + # Assert: Verify email service was called but exception was handled + mock_external_service_dependencies["email_service"].send_email.assert_called_once() + + def test_send_account_deletion_verification_code_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful account deletion verification code email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls + - Template context includes verification code + - Email type is correctly specified + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_account_deletion_verification_code(test_email, test_code, test_language) + + # Assert: Verify the expected outcomes + # Verify mail service was checked + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify email service was retrieved + mock_external_service_dependencies["get_email_service"].assert_called_once() + + # Verify email was sent with correct parameters + mock_external_service_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.ACCOUNT_DELETION_VERIFICATION, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_account_deletion_verification_code_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion verification code email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Setup mail service to return not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + + # Act: Execute the task + send_account_deletion_verification_code(test_email, test_code) + + # Assert: Verify no email service calls were made + mock_external_service_dependencies["get_email_service"].assert_not_called() + mock_external_service_dependencies["email_service"].send_email.assert_not_called() + + def test_send_account_deletion_verification_code_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion verification code email when email service raises exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + - Error logging is recorded + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_service"].send_email.side_effect = Exception("Email service failed") + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + + # Act: Execute the task (should not raise exception) + send_account_deletion_verification_code(test_email, test_code) + + # Assert: Verify email service was called but exception was handled + mock_external_service_dependencies["email_service"].send_email.assert_called_once() diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py new file mode 100644 index 0000000000..1aed7dc7cc --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py @@ -0,0 +1,282 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_change_mail_task import send_change_mail_completed_notification_task, send_change_mail_task + + +class TestMailChangeMailTask: + """Integration tests for mail_change_mail_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_change_mail_task.mail") as mock_mail, + patch("tasks.mail_change_mail_task.get_email_i18n_service") as mock_get_email_i18n_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email i18n service + mock_email_service = MagicMock() + mock_get_email_i18n_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "email_i18n_service": mock_email_service, + "get_email_i18n_service": mock_get_email_i18n_service, + } + + def _create_test_account(self, db_session_with_containers): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + Account: Created account instance + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db_session_with_containers.add(account) + db_session_with_containers.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + current=True, + ) + db_session_with_containers.add(join) + db_session_with_containers.commit() + + return account + + def test_send_change_mail_task_success_old_email_phase( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email task execution for old_email phase. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with old_email phase + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "en-US" + test_email = account.email + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_task_success_new_email_phase( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email task execution for new_email phase. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with new_email phase + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "zh-Hans" + test_email = "new@example.com" + test_code = "789012" + test_phase = "new_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email task when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls when mail is not available + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify no email service calls + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_not_called() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_not_called() + + def test_send_change_mail_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email task when email service raises an exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_i18n_service"].send_change_email.side_effect = Exception( + "Email service failed" + ) + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task (should not raise exception) + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify email service was called despite exception + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_completed_notification_task_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email completed notification task execution. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with CHANGE_EMAIL_COMPLETED type + - Template context is properly constructed + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "en-US" + test_email = account.email + + # Act: Execute the task + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with( + email_type=EmailType.CHANGE_EMAIL_COMPLETED, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) + + def test_send_change_mail_completed_notification_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email completed notification task when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls when mail is not available + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + test_language = "en-US" + test_email = "test@example.com" + + # Act: Execute the task + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify no email service calls + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_not_called() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_not_called() + + def test_send_change_mail_completed_notification_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email completed notification task when email service raises an exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_i18n_service"].send_email.side_effect = Exception( + "Email service failed" + ) + test_language = "en-US" + test_email = "test@example.com" + + # Act: Execute the task (should not raise exception) + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify email service was called despite exception + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with( + email_type=EmailType.CHANGE_EMAIL_COMPLETED, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py new file mode 100644 index 0000000000..e6a804784a --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py @@ -0,0 +1,598 @@ +""" +TestContainers-based integration tests for send_email_code_login_mail_task. + +This module provides comprehensive integration tests for the email code login mail task +using TestContainers infrastructure. The tests ensure that the task properly sends +email verification codes for login with internationalization support and handles +various error scenarios in a real database environment. + +All tests use the testcontainers infrastructure to ensure proper database isolation +and realistic testing scenarios with actual PostgreSQL and Redis instances. +""" + +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_email_code_login import send_email_code_login_mail_task + + +class TestSendEmailCodeLoginMailTask: + """ + Comprehensive integration tests for send_email_code_login_mail_task using testcontainers. + + This test class covers all major functionality of the email code login mail task: + - Successful email sending with different languages + - Email service integration and template rendering + - Error handling for various failure scenarios + - Performance metrics and logging verification + - Edge cases and boundary conditions + + All tests use the testcontainers infrastructure to ensure proper database isolation + and realistic testing environment with actual database interactions. + """ + + @pytest.fixture(autouse=True) + def cleanup_database(self, db_session_with_containers): + """Clean up database before each test to ensure isolation.""" + from extensions.ext_redis import redis_client + + # Clear all test data + db_session_with_containers.query(TenantAccountJoin).delete() + db_session_with_containers.query(Tenant).delete() + db_session_with_containers.query(Account).delete() + db_session_with_containers.commit() + + # Clear Redis cache + redis_client.flushdb() + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_email_code_login.mail") as mock_mail, + patch("tasks.mail_email_code_login.get_email_i18n_service") as mock_email_service, + ): + # Setup default mock returns + mock_mail.is_inited.return_value = True + + # Mock email service + mock_email_service_instance = MagicMock() + mock_email_service_instance.send_email.return_value = None + mock_email_service.return_value = mock_email_service_instance + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "email_service_instance": mock_email_service_instance, + } + + def _create_test_account(self, db_session_with_containers, fake=None): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + fake: Faker instance for generating test data + + Returns: + Account: Created account instance + """ + if fake is None: + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + db_session_with_containers.add(account) + db_session_with_containers.commit() + + return account + + def _create_test_tenant_and_account(self, db_session_with_containers, fake=None): + """ + Helper method to create a test tenant and account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + fake: Faker instance for generating test data + + Returns: + tuple: (Account, Tenant) created instances + """ + if fake is None: + fake = Faker() + + # Create account using the existing helper method + account = self._create_test_account(db_session_with_containers, fake) + + # Create tenant + tenant = Tenant( + name=fake.company(), + plan="basic", + status="active", + ) + + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account relationship + tenant_account_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + ) + + db_session_with_containers.add(tenant_account_join) + db_session_with_containers.commit() + + return account, tenant + + def test_send_email_code_login_mail_task_success_english( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending in English. + + This test verifies that the task can successfully: + 1. Send email code login mail with English language + 2. Use proper email service integration + 3. Pass correct template context to email service + 4. Log performance metrics correctly + 5. Complete task execution without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_mail = mock_external_service_dependencies["mail"] + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was called with correct parameters + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_success_chinese( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending in Chinese. + + This test verifies that the task can successfully: + 1. Send email code login mail with Chinese language + 2. Handle different language codes properly + 3. Use correct template context for Chinese emails + 4. Complete task execution without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "789012" + test_language = "zh-Hans" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called with Chinese language + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_success_multiple_languages( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending with multiple languages. + + This test verifies that the task can successfully: + 1. Handle various language codes correctly + 2. Send emails with different language configurations + 3. Maintain proper template context for each language + 4. Complete multiple task executions without conflicts + """ + # Arrange: Setup test data + fake = Faker() + test_languages = ["en-US", "zh-Hans", "zh-CN", "ja-JP", "ko-KR"] + test_emails = [fake.email() for _ in test_languages] + test_codes = [fake.numerify("######") for _ in test_languages] + + # Act: Execute the task for each language + for i, language in enumerate(test_languages): + send_email_code_login_mail_task( + language=language, + to=test_emails[i], + code=test_codes[i], + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called for each language + assert mock_email_service_instance.send_email.call_count == len(test_languages) + + # Verify each call had correct parameters + for i, language in enumerate(test_languages): + call_args = mock_email_service_instance.send_email.call_args_list[i] + assert call_args[1]["email_type"] == EmailType.EMAIL_CODE_LOGIN + assert call_args[1]["language_code"] == language + assert call_args[1]["to"] == test_emails[i] + assert call_args[1]["template_context"]["code"] == test_codes[i] + + def test_send_email_code_login_mail_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task when mail service is not initialized. + + This test verifies that the task can properly: + 1. Check mail service initialization status + 2. Return early when mail is not initialized + 3. Not attempt to send email when service is unavailable + 4. Handle gracefully without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Mock mail service as not initialized + mock_mail = mock_external_service_dependencies["mail"] + mock_mail.is_inited.return_value = False + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was not called + mock_email_service_instance.send_email.assert_not_called() + + def test_send_email_code_login_mail_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task when email service raises an exception. + + This test verifies that the task can properly: + 1. Handle email service exceptions gracefully + 2. Log appropriate error messages + 3. Continue execution without crashing + 4. Maintain proper error handling + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Mock email service to raise an exception + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.send_email.side_effect = Exception("Email service unavailable") + + # Act: Execute the task - it should handle the exception gracefully + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_mail = mock_external_service_dependencies["mail"] + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was called (and failed) + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_invalid_parameters( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with invalid parameters. + + This test verifies that the task can properly: + 1. Handle empty or None email addresses + 2. Process empty or None verification codes + 3. Handle invalid language codes + 4. Maintain proper error handling for invalid inputs + """ + # Arrange: Setup test data + fake = Faker() + test_language = "en-US" + + # Test cases for invalid parameters + invalid_test_cases = [ + {"email": "", "code": "123456", "description": "empty email"}, + {"email": None, "code": "123456", "description": "None email"}, + {"email": fake.email(), "code": "", "description": "empty code"}, + {"email": fake.email(), "code": None, "description": "None code"}, + {"email": "invalid-email", "code": "123456", "description": "invalid email format"}, + ] + + for test_case in invalid_test_cases: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + + # Act: Execute the task with invalid parameters + send_email_code_login_mail_task( + language=test_language, + to=test_case["email"], + code=test_case["code"], + ) + + # Assert: Verify that email service was still called + # The task should pass parameters to email service as-is + # and let the email service handle validation + mock_email_service_instance.send_email.assert_called_once() + + def test_send_email_code_login_mail_task_edge_cases( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with edge cases and boundary conditions. + + This test verifies that the task can properly: + 1. Handle very long email addresses + 2. Process very long verification codes + 3. Handle special characters in parameters + 4. Process extreme language codes + """ + # Arrange: Setup test data + fake = Faker() + test_language = "en-US" + + # Edge case test data + edge_cases = [ + { + "email": "a" * 100 + "@example.com", # Very long email + "code": "1" * 20, # Very long code + "description": "very long email and code", + }, + { + "email": "test+tag@example.com", # Email with special characters + "code": "123-456", # Code with special characters + "description": "special characters", + }, + { + "email": "test@sub.domain.example.com", # Complex domain + "code": "000000", # All zeros + "description": "complex domain and all zeros code", + }, + { + "email": "test@example.co.uk", # International domain + "code": "999999", # All nines + "description": "international domain and all nines code", + }, + ] + + for test_case in edge_cases: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + + # Act: Execute the task with edge case data + send_email_code_login_mail_task( + language=test_language, + to=test_case["email"], + code=test_case["code"], + ) + + # Assert: Verify that email service was called with edge case data + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_case["email"], + template_context={ + "to": test_case["email"], + "code": test_case["code"], + }, + ) + + def test_send_email_code_login_mail_task_database_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with database integration. + + This test verifies that the task can properly: + 1. Work with real database connections + 2. Handle database session management + 3. Maintain proper database state + 4. Complete without database-related errors + """ + # Arrange: Setup test data with database + fake = Faker() + account, tenant = self._create_test_tenant_and_account(db_session_with_containers, fake) + + test_email = account.email + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called with database account email + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + # Verify database state is maintained + db_session_with_containers.refresh(account) + assert account.email == test_email + assert account.status == "active" + + def test_send_email_code_login_mail_task_redis_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with Redis integration. + + This test verifies that the task can properly: + 1. Work with Redis cache connections + 2. Handle Redis operations without errors + 3. Maintain proper cache state + 4. Complete without Redis-related errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Setup Redis cache data + from extensions.ext_redis import redis_client + + cache_key = f"email_code_login_test_{test_email}" + redis_client.set(cache_key, "test_value", ex=300) + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called + mock_email_service_instance.send_email.assert_called_once() + + # Verify Redis cache is still accessible + assert redis_client.exists(cache_key) == 1 + assert redis_client.get(cache_key) == b"test_value" + + # Clean up Redis cache + redis_client.delete(cache_key) + + def test_send_email_code_login_mail_task_error_handling_comprehensive( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test comprehensive error handling for email code login mail task. + + This test verifies that the task can properly: + 1. Handle various types of exceptions + 2. Log appropriate error messages + 3. Continue execution despite errors + 4. Maintain proper error reporting + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Test different exception types + exception_types = [ + ("ValueError", ValueError("Invalid email format")), + ("RuntimeError", RuntimeError("Service unavailable")), + ("ConnectionError", ConnectionError("Network error")), + ("TimeoutError", TimeoutError("Request timeout")), + ("Exception", Exception("Generic error")), + ] + + for error_name, exception in exception_types: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + mock_email_service_instance.send_email.side_effect = exception + + # Mock logging to capture error messages + with patch("tasks.mail_email_code_login.logger") as mock_logger: + # Act: Execute the task - it should handle the exception gracefully + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify error handling + # Verify email service was called (and failed) + mock_email_service_instance.send_email.assert_called_once() + + # Verify error was logged + error_calls = [ + call + for call in mock_logger.exception.call_args_list + if f"Send email code login mail to {test_email} failed" in str(call) + ] + # Check if any exception call was made (the exact message format may vary) + assert mock_logger.exception.call_count >= 1, f"Error should be logged for {error_name}" + + # Reset side effect for next iteration + mock_email_service_instance.send_email.side_effect = None diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py new file mode 100644 index 0000000000..d67794654f --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py @@ -0,0 +1,261 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from tasks.mail_inner_task import send_inner_email_task + + +class TestMailInnerTask: + """Integration tests for send_inner_email_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_inner_task.mail") as mock_mail, + patch("tasks.mail_inner_task.get_email_i18n_service") as mock_get_email_i18n_service, + patch("tasks.mail_inner_task._render_template_with_strategy") as mock_render_template, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email i18n service + mock_email_service = MagicMock() + mock_get_email_i18n_service.return_value = mock_email_service + + # Setup mock template rendering + mock_render_template.return_value = "Test email content" + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "render_template": mock_render_template, + } + + def _create_test_email_data(self, fake: Faker) -> dict: + """ + Helper method to create test email data for testing. + + Args: + fake: Faker instance for generating test data + + Returns: + dict: Test email data including recipients, subject, body, and substitutions + """ + return { + "to": [fake.email() for _ in range(3)], + "subject": fake.sentence(nb_words=4), + "body": "Hello {{name}}, this is a test email from {{company}}.", + "substitutions": { + "name": fake.name(), + "company": fake.company(), + "date": fake.date(), + }, + } + + def test_send_inner_email_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful email sending with valid data. + + This test verifies: + - Proper email service initialization check + - Template rendering with substitutions + - Email service integration + - Multiple recipient handling + """ + # Arrange: Create test data + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + # Verify mail service was checked for initialization + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify template rendering was called with correct parameters + mock_external_service_dependencies["render_template"].assert_called_once_with( + email_data["body"], email_data["substitutions"] + ) + + # Verify email service was called once with the full recipient list + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_single_recipient(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending with single recipient. + + This test verifies: + - Single recipient handling + - Template rendering + - Email service integration + """ + # Arrange: Create test data with single recipient + fake = Faker() + email_data = { + "to": [fake.email()], + "subject": fake.sentence(nb_words=3), + "body": "Welcome {{user_name}}!", + "substitutions": { + "user_name": fake.name(), + }, + } + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_empty_substitutions(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending with empty substitutions. + + This test verifies: + - Template rendering with empty substitutions + - Email service integration + - Handling of minimal template context + """ + # Arrange: Create test data with empty substitutions + fake = Faker() + email_data = { + "to": [fake.email()], + "subject": fake.sentence(nb_words=3), + "body": "This is a simple email without variables.", + "substitutions": {}, + } + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["render_template"].assert_called_once_with(email_data["body"], {}) + + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No template rendering occurs + - No email service calls + - No exceptions raised + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["render_template"].assert_not_called() + mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called() + + def test_send_inner_email_template_rendering_error( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending when template rendering fails. + + This test verifies: + - Exception handling during template rendering + - No email service calls when template fails + """ + # Arrange: Setup template rendering to raise an exception + mock_external_service_dependencies["render_template"].side_effect = Exception("Template rendering failed") + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify template rendering was attempted + mock_external_service_dependencies["render_template"].assert_called_once() + + # Verify no email service calls due to exception + mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called() + + def test_send_inner_email_service_error(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending when email service fails. + + This test verifies: + - Exception handling during email sending + - Graceful error handling + """ + # Arrange: Setup email service to raise an exception + mock_external_service_dependencies["email_service"].send_raw_email.side_effect = Exception( + "Email service failed" + ) + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify template rendering occurred + mock_external_service_dependencies["render_template"].assert_called_once() + + # Verify email service was called (and failed) + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py new file mode 100644 index 0000000000..c083861004 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py @@ -0,0 +1,544 @@ +""" +Integration tests for mail_invite_member_task using testcontainers. + +This module provides integration tests for the invite member email task +using TestContainers infrastructure. The tests ensure that the task properly sends +invitation emails with internationalization support, handles error scenarios, +and integrates correctly with the database and Redis for token management. + +All tests use the testcontainers infrastructure to ensure proper database isolation +and realistic testing scenarios with actual PostgreSQL and Redis instances. +""" + +import json +import uuid +from datetime import UTC, datetime +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from extensions.ext_redis import redis_client +from libs.email_i18n import EmailType +from models.account import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_invite_member_task import send_invite_member_mail_task + + +class TestMailInviteMemberTask: + """ + Integration tests for send_invite_member_mail_task using testcontainers. + + This test class covers the core functionality of the invite member email task: + - Email sending with proper internationalization + - Template context generation and URL construction + - Error handling for failure scenarios + - Integration with Redis for token validation + - Mail service initialization checks + - Real database integration with actual invitation flow + + All tests use the testcontainers infrastructure to ensure proper database isolation + and realistic testing environment with actual database and Redis interactions. + """ + + @pytest.fixture(autouse=True) + def cleanup_database(self, db_session_with_containers): + """Clean up database before each test to ensure isolation.""" + # Clear all test data + db_session_with_containers.query(TenantAccountJoin).delete() + db_session_with_containers.query(Tenant).delete() + db_session_with_containers.query(Account).delete() + db_session_with_containers.commit() + + # Clear Redis cache + redis_client.flushdb() + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_invite_member_task.mail") as mock_mail, + patch("tasks.mail_invite_member_task.get_email_i18n_service") as mock_email_service, + patch("tasks.mail_invite_member_task.dify_config") as mock_config, + ): + # Setup mail service mock + mock_mail.is_inited.return_value = True + + # Setup email service mock + mock_email_service_instance = MagicMock() + mock_email_service_instance.send_email.return_value = None + mock_email_service.return_value = mock_email_service_instance + + # Setup config mock + mock_config.CONSOLE_WEB_URL = "https://console.dify.ai" + + yield { + "mail": mock_mail, + "email_service": mock_email_service_instance, + "config": mock_config, + } + + def _create_test_account_and_tenant(self, db_session_with_containers): + """ + Helper method to create a test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + tuple: (Account, Tenant) created instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + password=fake.password(), + interface_language="en-US", + status=AccountStatus.ACTIVE, + ) + account.created_at = datetime.now(UTC) + account.updated_at = datetime.now(UTC) + db_session_with_containers.add(account) + db_session_with_containers.commit() + db_session_with_containers.refresh(account) + + # Create tenant + tenant = Tenant( + name=fake.company(), + ) + tenant.created_at = datetime.now(UTC) + tenant.updated_at = datetime.now(UTC) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + db_session_with_containers.refresh(tenant) + + # Create tenant member relationship + tenant_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + ) + tenant_join.created_at = datetime.now(UTC) + db_session_with_containers.add(tenant_join) + db_session_with_containers.commit() + + return account, tenant + + def _create_invitation_token(self, tenant, account): + """ + Helper method to create a valid invitation token in Redis. + + Args: + tenant: Tenant instance + account: Account instance + + Returns: + str: Generated invitation token + """ + token = str(uuid.uuid4()) + invitation_data = { + "account_id": account.id, + "email": account.email, + "workspace_id": tenant.id, + } + cache_key = f"member_invite:token:{token}" + redis_client.setex(cache_key, 24 * 60 * 60, json.dumps(invitation_data)) # 24 hours + return token + + def _create_pending_account_for_invitation(self, db_session_with_containers, email, tenant): + """ + Helper method to create a pending account for invitation testing. + + Args: + db_session_with_containers: Database session + email: Email address for the account + tenant: Tenant instance + + Returns: + Account: Created pending account + """ + account = Account( + email=email, + name=email.split("@")[0], + password="", + interface_language="en-US", + status=AccountStatus.PENDING, + ) + + account.created_at = datetime.now(UTC) + account.updated_at = datetime.now(UTC) + db_session_with_containers.add(account) + db_session_with_containers.commit() + db_session_with_containers.refresh(account) + + # Create tenant member relationship + tenant_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.NORMAL, + ) + tenant_join.created_at = datetime.now(UTC) + db_session_with_containers.add(tenant_join) + db_session_with_containers.commit() + + return account + + def test_send_invite_member_mail_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful invitation email sending with all parameters. + + This test verifies: + - Email service is called with correct parameters + - Template context includes all required fields + - URL is constructed correctly with token + - Performance logging is recorded + - No exceptions are raised + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + invitee_email = "test@example.com" + language = "en-US" + token = self._create_invitation_token(tenant, inviter) + inviter_name = inviter.name + workspace_name = tenant.name + + # Act: Execute the task + send_invite_member_mail_task( + language=language, + to=invitee_email, + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify email service was called correctly + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_called_once() + + # Verify call arguments + call_args = mock_email_service.send_email.call_args + assert call_args[1]["email_type"] == EmailType.INVITE_MEMBER + assert call_args[1]["language_code"] == language + assert call_args[1]["to"] == invitee_email + + # Verify template context + template_context = call_args[1]["template_context"] + assert template_context["to"] == invitee_email + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + assert template_context["url"] == f"https://console.dify.ai/activate?token={token}" + + def test_send_invite_member_mail_different_languages( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test invitation email sending with different language codes. + + This test verifies: + - Email service handles different language codes correctly + - Template context is passed correctly for each language + - No language-specific errors occur + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + test_languages = ["en-US", "zh-CN", "ja-JP", "fr-FR", "de-DE", "es-ES"] + + for language in test_languages: + # Act: Execute the task with different language + send_invite_member_mail_task( + language=language, + to="test@example.com", + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify language code was passed correctly + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + assert call_args[1]["language_code"] == language + + def test_send_invite_member_mail_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test behavior when mail service is not initialized. + + This test verifies: + - Task returns early when mail is not initialized + - Email service is not called + - No exceptions are raised + """ + # Arrange: Setup mail service as not initialized + mock_mail = mock_external_service_dependencies["mail"] + mock_mail.is_inited.return_value = False + + # Act: Execute the task + result = send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token="test-token", + inviter_name="Test User", + workspace_name="Test Workspace", + ) + + # Assert: Verify early return + assert result is None + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_not_called() + + def test_send_invite_member_mail_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test error handling when email service raises an exception. + + This test verifies: + - Exception is caught and logged + - Task completes without raising exception + - Error logging is performed + """ + # Arrange: Setup email service to raise exception + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.side_effect = Exception("Email service failed") + + # Act & Assert: Execute task and verify exception is handled + with patch("tasks.mail_invite_member_task.logger") as mock_logger: + send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token="test-token", + inviter_name="Test User", + workspace_name="Test Workspace", + ) + + # Verify error was logged + mock_logger.exception.assert_called_once() + error_call = mock_logger.exception.call_args[0][0] + assert "Send invite member mail to %s failed" in error_call + + def test_send_invite_member_mail_template_context_validation( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test template context contains all required fields for email rendering. + + This test verifies: + - All required template context fields are present + - Field values match expected data + - URL construction is correct + - No missing or None values in context + """ + # Arrange: Create test data with specific values + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = "test-token-123" + invitee_email = "invitee@example.com" + inviter_name = "John Doe" + workspace_name = "Acme Corp" + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=invitee_email, + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify template context + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + template_context = call_args[1]["template_context"] + + # Verify all required fields are present + required_fields = ["to", "inviter_name", "workspace_name", "url"] + for field in required_fields: + assert field in template_context + assert template_context[field] is not None + assert template_context[field] != "" + + # Verify specific values + assert template_context["to"] == invitee_email + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + assert template_context["url"] == f"https://console.dify.ai/activate?token={token}" + + def test_send_invite_member_mail_integration_with_redis_token( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test integration with Redis token validation. + + This test verifies: + - Task works with real Redis token data + - Token validation can be performed after email sending + - Redis data integrity is maintained + """ + # Arrange: Create test data and store token in Redis + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + # Verify token exists in Redis before sending email + cache_key = f"member_invite:token:{token}" + assert redis_client.exists(cache_key) == 1 + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=inviter.email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify token still exists after email sending + assert redis_client.exists(cache_key) == 1 + + # Verify token data integrity + token_data = redis_client.get(cache_key) + assert token_data is not None + invitation_data = json.loads(token_data) + assert invitation_data["account_id"] == inviter.id + assert invitation_data["email"] == inviter.email + assert invitation_data["workspace_id"] == tenant.id + + def test_send_invite_member_mail_with_special_characters( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending with special characters in names and workspace names. + + This test verifies: + - Special characters are handled correctly in template context + - Email service receives properly formatted data + - No encoding issues occur + """ + # Arrange: Create test data with special characters + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + special_cases = [ + ("John O'Connor", "Acme & Co."), + ("José María", "Café & Restaurant"), + ("李小明", "北京科技有限公司"), + ("François & Marie", "L'École Internationale"), + ("Александр", "ООО Технологии"), + ("محمد أحمد", "شركة التقنية المتقدمة"), + ] + + for inviter_name, workspace_name in special_cases: + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify special characters are preserved + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + template_context = call_args[1]["template_context"] + + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + + def test_send_invite_member_mail_real_database_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test real database integration with actual invitation flow. + + This test verifies: + - Task works with real database entities + - Account and tenant relationships are properly maintained + - Database state is consistent after email sending + - Real invitation data flow is tested + """ + # Arrange: Create real database entities + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + invitee_email = "newmember@example.com" + + # Create a pending account for invitation (simulating real invitation flow) + pending_account = self._create_pending_account_for_invitation(db_session_with_containers, invitee_email, tenant) + + # Create invitation token with real account data + token = self._create_invitation_token(tenant, pending_account) + + # Act: Execute the task with real data + send_invite_member_mail_task( + language="en-US", + to=invitee_email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify email service was called with real data + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_called_once() + + # Verify database state is maintained + db_session_with_containers.refresh(pending_account) + db_session_with_containers.refresh(tenant) + + assert pending_account.status == AccountStatus.PENDING + assert pending_account.email == invitee_email + assert tenant.name is not None + + # Verify tenant relationship exists + tenant_join = ( + db_session_with_containers.query(TenantAccountJoin) + .filter_by(tenant_id=tenant.id, account_id=pending_account.id) + .first() + ) + assert tenant_join is not None + assert tenant_join.role == TenantAccountRole.NORMAL + + def test_send_invite_member_mail_token_lifecycle_management( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test token lifecycle management and validation. + + This test verifies: + - Token is properly stored in Redis with correct TTL + - Token data structure is correct + - Token can be retrieved and validated after email sending + - Token expiration is handled correctly + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=inviter.email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify token lifecycle + cache_key = f"member_invite:token:{token}" + + # Token should still exist + assert redis_client.exists(cache_key) == 1 + + # Token should have correct TTL (approximately 24 hours) + ttl = redis_client.ttl(cache_key) + assert 23 * 60 * 60 <= ttl <= 24 * 60 * 60 # Allow some tolerance + + # Token data should be valid + token_data = redis_client.get(cache_key) + assert token_data is not None + + invitation_data = json.loads(token_data) + assert invitation_data["account_id"] == inviter.id + assert invitation_data["email"] == inviter.email + assert invitation_data["workspace_id"] == tenant.id diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py new file mode 100644 index 0000000000..e128b06b11 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py @@ -0,0 +1,401 @@ +""" +TestContainers-based integration tests for mail_owner_transfer_task. + +This module provides comprehensive integration tests for the mail owner transfer tasks +using TestContainers to ensure real email service integration and proper functionality +testing with actual database and service dependencies. +""" + +import logging +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_owner_transfer_task import ( + send_new_owner_transfer_notify_email_task, + send_old_owner_transfer_notify_email_task, + send_owner_transfer_confirm_task, +) + +logger = logging.getLogger(__name__) + + +class TestMailOwnerTransferTask: + """Integration tests for mail owner transfer tasks using testcontainers.""" + + @pytest.fixture + def mock_mail_dependencies(self): + """Mock setup for mail service dependencies.""" + with ( + patch("tasks.mail_owner_transfer_task.mail") as mock_mail, + patch("tasks.mail_owner_transfer_task.get_email_i18n_service") as mock_get_email_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email service + mock_email_service = MagicMock() + mock_get_email_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "get_email_service": mock_get_email_service, + } + + def _create_test_account_and_tenant(self, db_session_with_containers): + """ + Helper method to create test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + tuple: (account, tenant) - Created account and tenant instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db_session_with_containers.add(account) + db_session_with_containers.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db_session_with_containers.add(join) + db_session_with_containers.commit() + + return account, tenant + + def test_send_owner_transfer_confirm_task_success(self, db_session_with_containers, mock_mail_dependencies): + """ + Test successful owner transfer confirmation email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context is properly constructed + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_code = "123456" + test_workspace = tenant.name + + # Act: Execute the task + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_CONFIRM + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["code"] == test_code + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + + def test_send_owner_transfer_confirm_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test owner transfer confirmation email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_workspace = "Test Workspace" + + # Act: Execute the task + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_owner_transfer_confirm_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in owner transfer confirmation email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_workspace = "Test Workspace" + + # Act & Assert: Verify no exception is raised + try: + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() + + def test_send_old_owner_transfer_notify_email_task_success( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test successful old owner transfer notification email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context includes new owner email + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_workspace = tenant.name + test_new_owner_email = "newowner@example.com" + + # Act: Execute the task + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_OLD_NOTIFY + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + assert call_args[1]["template_context"]["NewOwnerEmail"] == test_new_owner_email + + def test_send_old_owner_transfer_notify_email_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test old owner transfer notification email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + test_new_owner_email = "newowner@example.com" + + # Act: Execute the task + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_old_owner_transfer_notify_email_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in old owner transfer notification email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + test_new_owner_email = "newowner@example.com" + + # Act & Assert: Verify no exception is raised + try: + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() + + def test_send_new_owner_transfer_notify_email_task_success( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test successful new owner transfer notification email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context is properly constructed + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_workspace = tenant.name + + # Act: Execute the task + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_NEW_NOTIFY + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + + def test_send_new_owner_transfer_notify_email_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test new owner transfer notification email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + + # Act: Execute the task + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_new_owner_transfer_notify_email_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in new owner transfer notification email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + + # Act & Assert: Verify no exception is raised + try: + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_register_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_register_task.py new file mode 100644 index 0000000000..e4db14623d --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_register_task.py @@ -0,0 +1,134 @@ +""" +TestContainers-based integration tests for mail_register_task.py + +This module provides integration tests for email registration tasks +using TestContainers to ensure real database and service interactions. +""" + +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from tasks.mail_register_task import send_email_register_mail_task, send_email_register_mail_task_when_account_exist + + +class TestMailRegisterTask: + """Integration tests for mail_register_task using testcontainers.""" + + @pytest.fixture + def mock_mail_dependencies(self): + """Mock setup for mail service dependencies.""" + with ( + patch("tasks.mail_register_task.mail") as mock_mail, + patch("tasks.mail_register_task.get_email_i18n_service") as mock_get_email_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email i18n service + mock_email_service = MagicMock() + mock_get_email_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "get_email_service": mock_get_email_service, + } + + def test_send_email_register_mail_task_success(self, db_session_with_containers, mock_mail_dependencies): + """Test successful email registration mail sending.""" + fake = Faker() + language = "en-US" + to_email = fake.email() + code = fake.numerify("######") + + send_email_register_mail_task(language=language, to=to_email, code=code) + + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.EMAIL_REGISTER, + language_code=language, + to=to_email, + template_context={ + "to": to_email, + "code": code, + }, + ) + + def test_send_email_register_mail_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """Test email registration task when mail service is not initialized.""" + mock_mail_dependencies["mail"].is_inited.return_value = False + + send_email_register_mail_task(language="en-US", to="test@example.com", code="123456") + + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_email_register_mail_task_exception_handling(self, db_session_with_containers, mock_mail_dependencies): + """Test email registration task exception handling.""" + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + fake = Faker() + to_email = fake.email() + code = fake.numerify("######") + + with patch("tasks.mail_register_task.logger") as mock_logger: + send_email_register_mail_task(language="en-US", to=to_email, code=code) + mock_logger.exception.assert_called_once_with("Send email register mail to %s failed", to_email) + + def test_send_email_register_mail_task_when_account_exist_success( + self, db_session_with_containers, mock_mail_dependencies + ): + """Test successful email registration mail sending when account exists.""" + fake = Faker() + language = "en-US" + to_email = fake.email() + account_name = fake.name() + + with patch("tasks.mail_register_task.dify_config") as mock_config: + mock_config.CONSOLE_WEB_URL = "https://console.dify.ai" + + send_email_register_mail_task_when_account_exist(language=language, to=to_email, account_name=account_name) + + mock_mail_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.EMAIL_REGISTER_WHEN_ACCOUNT_EXIST, + language_code=language, + to=to_email, + template_context={ + "to": to_email, + "login_url": "https://console.dify.ai/signin", + "reset_password_url": "https://console.dify.ai/reset-password", + "account_name": account_name, + }, + ) + + def test_send_email_register_mail_task_when_account_exist_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """Test account exist email task when mail service is not initialized.""" + mock_mail_dependencies["mail"].is_inited.return_value = False + + send_email_register_mail_task_when_account_exist( + language="en-US", to="test@example.com", account_name="Test User" + ) + + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_email_register_mail_task_when_account_exist_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """Test account exist email task exception handling.""" + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + fake = Faker() + to_email = fake.email() + account_name = fake.name() + + with patch("tasks.mail_register_task.logger") as mock_logger: + send_email_register_mail_task_when_account_exist(language="en-US", to=to_email, account_name=account_name) + mock_logger.exception.assert_called_once_with("Send email register mail to %s failed", to_email) diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index f4e3d97719..209b6bf59b 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -15,13 +15,13 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") - monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") + monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") # Custom value for testing monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") monkeypatch.setenv("DB_PORT", "5432") monkeypatch.setenv("DB_DATABASE", "dify") - monkeypatch.setenv("HTTP_REQUEST_MAX_READ_TIMEOUT", "600") + monkeypatch.setenv("HTTP_REQUEST_MAX_READ_TIMEOUT", "300") # Custom value for testing # load dotenv file with pydantic-settings config = DifyConfig() @@ -33,17 +33,38 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch): assert config.EDITION == "SELF_HOSTED" assert config.API_COMPRESSION_ENABLED is False assert config.SENTRY_TRACES_SAMPLE_RATE == 1.0 + assert config.TEMPLATE_TRANSFORM_MAX_LENGTH == 400_000 - # annotated field with default value - assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600 + # annotated field with custom configured value + assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 300 - # annotated field with configured value + # annotated field with custom configured value assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 30 # values from pyproject.toml assert Version(config.project.version) >= Version("1.0.0") +def test_http_timeout_defaults(monkeypatch: pytest.MonkeyPatch): + """Test that HTTP timeout defaults are correctly set""" + # clear system environment variables + os.environ.clear() + + # Set minimal required env vars + monkeypatch.setenv("DB_USERNAME", "postgres") + monkeypatch.setenv("DB_PASSWORD", "postgres") + monkeypatch.setenv("DB_HOST", "localhost") + monkeypatch.setenv("DB_PORT", "5432") + monkeypatch.setenv("DB_DATABASE", "dify") + + config = DifyConfig() + + # Verify default timeout values + assert config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT == 10 + assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600 + assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 600 + + # NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected. # This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`. def test_flask_configs(monkeypatch: pytest.MonkeyPatch): @@ -54,7 +75,6 @@ def test_flask_configs(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") - monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -104,7 +124,6 @@ def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") - monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") diff --git a/api/tests/unit_tests/controllers/console/app/test_description_validation.py b/api/tests/unit_tests/controllers/console/app/test_description_validation.py index 178267e560..dcc408a21c 100644 --- a/api/tests/unit_tests/controllers/console/app/test_description_validation.py +++ b/api/tests/unit_tests/controllers/console/app/test_description_validation.py @@ -1,174 +1,53 @@ import pytest -from controllers.console.app.app import _validate_description_length as app_validate -from controllers.console.datasets.datasets import _validate_description_length as dataset_validate -from controllers.service_api.dataset.dataset import _validate_description_length as service_dataset_validate +from libs.validators import validate_description_length class TestDescriptionValidationUnit: - """Unit tests for description validation functions in App and Dataset APIs""" + """Unit tests for the centralized description validation function.""" - def test_app_validate_description_length_valid(self): - """Test App validation function with valid descriptions""" + def test_validate_description_length_valid(self): + """Test validation function with valid descriptions.""" # Empty string should be valid - assert app_validate("") == "" + assert validate_description_length("") == "" # None should be valid - assert app_validate(None) is None + assert validate_description_length(None) is None # Short description should be valid short_desc = "Short description" - assert app_validate(short_desc) == short_desc + assert validate_description_length(short_desc) == short_desc # Exactly 400 characters should be valid exactly_400 = "x" * 400 - assert app_validate(exactly_400) == exactly_400 + assert validate_description_length(exactly_400) == exactly_400 # Just under limit should be valid just_under = "x" * 399 - assert app_validate(just_under) == just_under + assert validate_description_length(just_under) == just_under - def test_app_validate_description_length_invalid(self): - """Test App validation function with invalid descriptions""" + def test_validate_description_length_invalid(self): + """Test validation function with invalid descriptions.""" # 401 characters should fail just_over = "x" * 401 with pytest.raises(ValueError) as exc_info: - app_validate(just_over) + validate_description_length(just_over) assert "Description cannot exceed 400 characters." in str(exc_info.value) # 500 characters should fail way_over = "x" * 500 with pytest.raises(ValueError) as exc_info: - app_validate(way_over) + validate_description_length(way_over) assert "Description cannot exceed 400 characters." in str(exc_info.value) # 1000 characters should fail very_long = "x" * 1000 with pytest.raises(ValueError) as exc_info: - app_validate(very_long) + validate_description_length(very_long) assert "Description cannot exceed 400 characters." in str(exc_info.value) - def test_dataset_validate_description_length_valid(self): - """Test Dataset validation function with valid descriptions""" - # Empty string should be valid - assert dataset_validate("") == "" - - # Short description should be valid - short_desc = "Short description" - assert dataset_validate(short_desc) == short_desc - - # Exactly 400 characters should be valid - exactly_400 = "x" * 400 - assert dataset_validate(exactly_400) == exactly_400 - - # Just under limit should be valid - just_under = "x" * 399 - assert dataset_validate(just_under) == just_under - - def test_dataset_validate_description_length_invalid(self): - """Test Dataset validation function with invalid descriptions""" - # 401 characters should fail - just_over = "x" * 401 - with pytest.raises(ValueError) as exc_info: - dataset_validate(just_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - # 500 characters should fail - way_over = "x" * 500 - with pytest.raises(ValueError) as exc_info: - dataset_validate(way_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - def test_service_dataset_validate_description_length_valid(self): - """Test Service Dataset validation function with valid descriptions""" - # Empty string should be valid - assert service_dataset_validate("") == "" - - # None should be valid - assert service_dataset_validate(None) is None - - # Short description should be valid - short_desc = "Short description" - assert service_dataset_validate(short_desc) == short_desc - - # Exactly 400 characters should be valid - exactly_400 = "x" * 400 - assert service_dataset_validate(exactly_400) == exactly_400 - - # Just under limit should be valid - just_under = "x" * 399 - assert service_dataset_validate(just_under) == just_under - - def test_service_dataset_validate_description_length_invalid(self): - """Test Service Dataset validation function with invalid descriptions""" - # 401 characters should fail - just_over = "x" * 401 - with pytest.raises(ValueError) as exc_info: - service_dataset_validate(just_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - # 500 characters should fail - way_over = "x" * 500 - with pytest.raises(ValueError) as exc_info: - service_dataset_validate(way_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - def test_app_dataset_validation_consistency(self): - """Test that App and Dataset validation functions behave identically""" - test_cases = [ - "", # Empty string - "Short description", # Normal description - "x" * 100, # Medium description - "x" * 400, # Exactly at limit - ] - - # Test valid cases produce same results - for test_desc in test_cases: - assert app_validate(test_desc) == dataset_validate(test_desc) == service_dataset_validate(test_desc) - - # Test invalid cases produce same errors - invalid_cases = [ - "x" * 401, # Just over limit - "x" * 500, # Way over limit - "x" * 1000, # Very long - ] - - for invalid_desc in invalid_cases: - app_error = None - dataset_error = None - service_dataset_error = None - - # Capture App validation error - try: - app_validate(invalid_desc) - except ValueError as e: - app_error = str(e) - - # Capture Dataset validation error - try: - dataset_validate(invalid_desc) - except ValueError as e: - dataset_error = str(e) - - # Capture Service Dataset validation error - try: - service_dataset_validate(invalid_desc) - except ValueError as e: - service_dataset_error = str(e) - - # All should produce errors - assert app_error is not None, f"App validation should fail for {len(invalid_desc)} characters" - assert dataset_error is not None, f"Dataset validation should fail for {len(invalid_desc)} characters" - error_msg = f"Service Dataset validation should fail for {len(invalid_desc)} characters" - assert service_dataset_error is not None, error_msg - - # Errors should be identical - error_msg = f"Error messages should be identical for {len(invalid_desc)} characters" - assert app_error == dataset_error == service_dataset_error, error_msg - assert app_error == "Description cannot exceed 400 characters." - def test_boundary_values(self): - """Test boundary values around the 400 character limit""" + """Test boundary values around the 400 character limit.""" boundary_tests = [ (0, True), # Empty (1, True), # Minimum @@ -184,69 +63,45 @@ class TestDescriptionValidationUnit: if should_pass: # Should not raise exception - assert app_validate(test_desc) == test_desc - assert dataset_validate(test_desc) == test_desc - assert service_dataset_validate(test_desc) == test_desc + assert validate_description_length(test_desc) == test_desc else: # Should raise ValueError with pytest.raises(ValueError): - app_validate(test_desc) - with pytest.raises(ValueError): - dataset_validate(test_desc) - with pytest.raises(ValueError): - service_dataset_validate(test_desc) + validate_description_length(test_desc) def test_special_characters(self): """Test validation with special characters, Unicode, etc.""" # Unicode characters unicode_desc = "测试描述" * 100 # Chinese characters if len(unicode_desc) <= 400: - assert app_validate(unicode_desc) == unicode_desc - assert dataset_validate(unicode_desc) == unicode_desc - assert service_dataset_validate(unicode_desc) == unicode_desc + assert validate_description_length(unicode_desc) == unicode_desc # Special characters special_desc = "Special chars: !@#$%^&*()_+-=[]{}|;':\",./<>?" * 10 if len(special_desc) <= 400: - assert app_validate(special_desc) == special_desc - assert dataset_validate(special_desc) == special_desc - assert service_dataset_validate(special_desc) == special_desc + assert validate_description_length(special_desc) == special_desc # Mixed content mixed_desc = "Mixed content: 测试 123 !@# " * 15 if len(mixed_desc) <= 400: - assert app_validate(mixed_desc) == mixed_desc - assert dataset_validate(mixed_desc) == mixed_desc - assert service_dataset_validate(mixed_desc) == mixed_desc + assert validate_description_length(mixed_desc) == mixed_desc elif len(mixed_desc) > 400: with pytest.raises(ValueError): - app_validate(mixed_desc) - with pytest.raises(ValueError): - dataset_validate(mixed_desc) - with pytest.raises(ValueError): - service_dataset_validate(mixed_desc) + validate_description_length(mixed_desc) def test_whitespace_handling(self): - """Test validation with various whitespace scenarios""" + """Test validation with various whitespace scenarios.""" # Leading/trailing whitespace whitespace_desc = " Description with whitespace " if len(whitespace_desc) <= 400: - assert app_validate(whitespace_desc) == whitespace_desc - assert dataset_validate(whitespace_desc) == whitespace_desc - assert service_dataset_validate(whitespace_desc) == whitespace_desc + assert validate_description_length(whitespace_desc) == whitespace_desc # Newlines and tabs multiline_desc = "Line 1\nLine 2\tTabbed content" if len(multiline_desc) <= 400: - assert app_validate(multiline_desc) == multiline_desc - assert dataset_validate(multiline_desc) == multiline_desc - assert service_dataset_validate(multiline_desc) == multiline_desc + assert validate_description_length(multiline_desc) == multiline_desc # Only whitespace over limit only_spaces = " " * 401 with pytest.raises(ValueError): - app_validate(only_spaces) - with pytest.raises(ValueError): - dataset_validate(only_spaces) - with pytest.raises(ValueError): - service_dataset_validate(only_spaces) + validate_description_length(only_spaces) diff --git a/api/tests/unit_tests/controllers/console/auth/test_oauth.py b/api/tests/unit_tests/controllers/console/auth/test_oauth.py index 1a2e27e8fe..399caf8c4d 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_oauth.py +++ b/api/tests/unit_tests/controllers/console/auth/test_oauth.py @@ -143,7 +143,7 @@ class TestOAuthCallback: oauth_provider.get_user_info.return_value = OAuthUserInfo(id="123", name="Test User", email="test@example.com") account = MagicMock() - account.status = AccountStatus.ACTIVE.value + account.status = AccountStatus.ACTIVE token_pair = MagicMock() token_pair.access_token = "jwt_access_token" @@ -179,9 +179,7 @@ class TestOAuthCallback: oauth_setup["provider"].get_access_token.assert_called_once_with("test_code") oauth_setup["provider"].get_user_info.assert_called_once_with("access_token") - mock_redirect.assert_called_once_with( - "http://localhost:3000?access_token=jwt_access_token&refresh_token=jwt_refresh_token" - ) + mock_redirect.assert_called_once_with("http://localhost:3000") @pytest.mark.parametrize( ("exception", "expected_error"), @@ -220,12 +218,12 @@ class TestOAuthCallback: @pytest.mark.parametrize( ("account_status", "expected_redirect"), [ - (AccountStatus.BANNED.value, "http://localhost:3000/signin?message=Account is banned."), + (AccountStatus.BANNED, "http://localhost:3000/signin?message=Account is banned."), # CLOSED status: Currently NOT handled, will proceed to login (security issue) # This documents actual behavior. See test_defensive_check_for_closed_account_status for details ( AccountStatus.CLOSED.value, - "http://localhost:3000?access_token=jwt_access_token&refresh_token=jwt_refresh_token", + "http://localhost:3000", ), ], ) @@ -268,6 +266,7 @@ class TestOAuthCallback: mock_token_pair = MagicMock() mock_token_pair.access_token = "jwt_access_token" mock_token_pair.refresh_token = "jwt_refresh_token" + mock_token_pair.csrf_token = "csrf_token" mock_account_service.login.return_value = mock_token_pair with app.test_request_context("/auth/oauth/github/callback?code=test_code"): @@ -296,13 +295,19 @@ class TestOAuthCallback: mock_get_providers.return_value = {"github": oauth_setup["provider"]} mock_account = MagicMock() - mock_account.status = AccountStatus.PENDING.value + mock_account.status = AccountStatus.PENDING mock_generate_account.return_value = mock_account + mock_token_pair = MagicMock() + mock_token_pair.access_token = "jwt_access_token" + mock_token_pair.refresh_token = "jwt_refresh_token" + mock_token_pair.csrf_token = "csrf_token" + mock_account_service.login.return_value = mock_token_pair + with app.test_request_context("/auth/oauth/github/callback?code=test_code"): resource.get("github") - assert mock_account.status == AccountStatus.ACTIVE.value + assert mock_account.status == AccountStatus.ACTIVE assert mock_account.initialized_at is not None mock_db.session.commit.assert_called_once() @@ -352,7 +357,7 @@ class TestOAuthCallback: # Create account with CLOSED status closed_account = MagicMock() - closed_account.status = AccountStatus.CLOSED.value + closed_account.status = AccountStatus.CLOSED closed_account.id = "123" closed_account.name = "Closed Account" mock_generate_account.return_value = closed_account @@ -361,6 +366,7 @@ class TestOAuthCallback: mock_token_pair = MagicMock() mock_token_pair.access_token = "jwt_access_token" mock_token_pair.refresh_token = "jwt_refresh_token" + mock_token_pair.csrf_token = "csrf_token" mock_account_service.login.return_value = mock_token_pair # Execute OAuth callback @@ -368,9 +374,7 @@ class TestOAuthCallback: resource.get("github") # Verify current behavior: login succeeds (this is NOT ideal) - mock_redirect.assert_called_once_with( - "http://localhost:3000?access_token=jwt_access_token&refresh_token=jwt_refresh_token" - ) + mock_redirect.assert_called_once_with("http://localhost:3000") mock_account_service.login.assert_called_once() # Document expected behavior in comments: diff --git a/api/tests/unit_tests/controllers/console/test_wraps.py b/api/tests/unit_tests/controllers/console/test_wraps.py index 9742368f04..6777077de8 100644 --- a/api/tests/unit_tests/controllers/console/test_wraps.py +++ b/api/tests/unit_tests/controllers/console/test_wraps.py @@ -60,7 +60,7 @@ class TestAccountInitialization: return "success" # Act - with patch("controllers.console.wraps.current_user", mock_user): + with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_user, "tenant123")): result = protected_view() # Assert @@ -77,7 +77,7 @@ class TestAccountInitialization: return "success" # Act & Assert - with patch("controllers.console.wraps.current_user", mock_user): + with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_user, "tenant123")): with pytest.raises(AccountNotInitializedError): protected_view() @@ -163,7 +163,9 @@ class TestBillingResourceLimits: return "member_added" # Act - with patch("controllers.console.wraps.current_user"): + with patch( + "controllers.console.wraps.current_account_with_tenant", return_value=(MockUser("test_user"), "tenant123") + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): result = add_member() @@ -185,7 +187,10 @@ class TestBillingResourceLimits: # Act & Assert with app.test_request_context(): - with patch("controllers.console.wraps.current_user", MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): with pytest.raises(Exception) as exc_info: add_member() @@ -207,7 +212,10 @@ class TestBillingResourceLimits: # Test 1: Should reject when source is datasets with app.test_request_context("/?source=datasets"): - with patch("controllers.console.wraps.current_user", MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): with pytest.raises(Exception) as exc_info: upload_document() @@ -215,7 +223,10 @@ class TestBillingResourceLimits: # Test 2: Should allow when source is not datasets with app.test_request_context("/?source=other"): - with patch("controllers.console.wraps.current_user", MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): result = upload_document() assert result == "document_uploaded" @@ -239,7 +250,9 @@ class TestRateLimiting: return "knowledge_success" # Act - with patch("controllers.console.wraps.current_user"): + with patch( + "controllers.console.wraps.current_account_with_tenant", return_value=(MockUser("test_user"), "tenant123") + ): with patch( "controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit ): @@ -271,7 +284,10 @@ class TestRateLimiting: # Act & Assert with app.test_request_context(): - with patch("controllers.console.wraps.current_user", MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch( "controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit ): diff --git a/api/tests/unit_tests/core/app/apps/advanced_chat/test_app_runner_conversation_variables.py b/api/tests/unit_tests/core/app/apps/advanced_chat/test_app_runner_conversation_variables.py index bb1d5e2f67..3a4fdc3cd8 100644 --- a/api/tests/unit_tests/core/app/apps/advanced_chat/test_app_runner_conversation_variables.py +++ b/api/tests/unit_tests/core/app/apps/advanced_chat/test_app_runner_conversation_variables.py @@ -99,6 +99,8 @@ class TestAdvancedChatAppRunnerConversationVariables: workflow=mock_workflow, system_user_id=str(uuid4()), app=MagicMock(), + workflow_execution_repository=MagicMock(), + workflow_node_execution_repository=MagicMock(), ) # Mock database session @@ -237,6 +239,8 @@ class TestAdvancedChatAppRunnerConversationVariables: workflow=mock_workflow, system_user_id=str(uuid4()), app=MagicMock(), + workflow_execution_repository=MagicMock(), + workflow_node_execution_repository=MagicMock(), ) # Mock database session @@ -390,6 +394,8 @@ class TestAdvancedChatAppRunnerConversationVariables: workflow=mock_workflow, system_user_id=str(uuid4()), app=MagicMock(), + workflow_execution_repository=MagicMock(), + workflow_node_execution_repository=MagicMock(), ) # Mock database session diff --git a/api/tests/unit_tests/core/app/apps/common/test_graph_runtime_state_support.py b/api/tests/unit_tests/core/app/apps/common/test_graph_runtime_state_support.py new file mode 100644 index 0000000000..cd5ea8986a --- /dev/null +++ b/api/tests/unit_tests/core/app/apps/common/test_graph_runtime_state_support.py @@ -0,0 +1,63 @@ +from types import SimpleNamespace + +import pytest + +from core.app.apps.common.graph_runtime_state_support import GraphRuntimeStateSupport +from core.workflow.runtime import GraphRuntimeState +from core.workflow.runtime.variable_pool import VariablePool +from core.workflow.system_variable import SystemVariable + + +def _make_state(workflow_run_id: str | None) -> GraphRuntimeState: + variable_pool = VariablePool(system_variables=SystemVariable(workflow_execution_id=workflow_run_id)) + return GraphRuntimeState(variable_pool=variable_pool, start_at=0.0) + + +class _StubPipeline(GraphRuntimeStateSupport): + def __init__(self, *, cached_state: GraphRuntimeState | None, queue_state: GraphRuntimeState | None): + self._graph_runtime_state = cached_state + self._base_task_pipeline = SimpleNamespace(queue_manager=SimpleNamespace(graph_runtime_state=queue_state)) + + +def test_ensure_graph_runtime_initialized_caches_explicit_state(): + explicit_state = _make_state("run-explicit") + pipeline = _StubPipeline(cached_state=None, queue_state=None) + + resolved = pipeline._ensure_graph_runtime_initialized(explicit_state) + + assert resolved is explicit_state + assert pipeline._graph_runtime_state is explicit_state + + +def test_resolve_graph_runtime_state_reads_from_queue_when_cache_empty(): + queued_state = _make_state("run-queue") + pipeline = _StubPipeline(cached_state=None, queue_state=queued_state) + + resolved = pipeline._resolve_graph_runtime_state() + + assert resolved is queued_state + assert pipeline._graph_runtime_state is queued_state + + +def test_resolve_graph_runtime_state_raises_when_no_state_available(): + pipeline = _StubPipeline(cached_state=None, queue_state=None) + + with pytest.raises(ValueError): + pipeline._resolve_graph_runtime_state() + + +def test_extract_workflow_run_id_returns_value(): + state = _make_state("run-identifier") + pipeline = _StubPipeline(cached_state=state, queue_state=None) + + run_id = pipeline._extract_workflow_run_id(state) + + assert run_id == "run-identifier" + + +def test_extract_workflow_run_id_raises_when_missing(): + state = _make_state(None) + pipeline = _StubPipeline(cached_state=state, queue_state=None) + + with pytest.raises(ValueError): + pipeline._extract_workflow_run_id(state) diff --git a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter.py b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter.py index 5895f63f94..8423f1ab02 100644 --- a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter.py +++ b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter.py @@ -43,7 +43,7 @@ class TestWorkflowResponseConverterFetchFilesFromVariableValue: """Test with None input""" # The method signature expects Union[dict, list, Segment], but implementation handles None # We'll test the actual behavior by passing an empty dict instead - result = WorkflowResponseConverter._fetch_files_from_variable_value(None) # type: ignore + result = WorkflowResponseConverter._fetch_files_from_variable_value(None) assert result == [] def test_fetch_files_from_variable_value_with_empty_dict(self): diff --git a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_process_data.py b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_process_data.py deleted file mode 100644 index 3366666a47..0000000000 --- a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_process_data.py +++ /dev/null @@ -1,430 +0,0 @@ -""" -Unit tests for WorkflowResponseConverter focusing on process_data truncation functionality. -""" - -import uuid -from dataclasses import dataclass -from datetime import datetime -from typing import Any -from unittest.mock import Mock - -import pytest - -from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter -from core.app.entities.app_invoke_entities import WorkflowAppGenerateEntity -from core.app.entities.queue_entities import QueueNodeRetryEvent, QueueNodeSucceededEvent -from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus -from core.workflow.enums import NodeType -from libs.datetime_utils import naive_utc_now -from models import Account - - -@dataclass -class ProcessDataResponseScenario: - """Test scenario for process_data in responses.""" - - name: str - original_process_data: dict[str, Any] | None - truncated_process_data: dict[str, Any] | None - expected_response_data: dict[str, Any] | None - expected_truncated_flag: bool - - -class TestWorkflowResponseConverterCenarios: - """Test process_data truncation in WorkflowResponseConverter.""" - - def create_mock_generate_entity(self) -> WorkflowAppGenerateEntity: - """Create a mock WorkflowAppGenerateEntity.""" - mock_entity = Mock(spec=WorkflowAppGenerateEntity) - mock_app_config = Mock() - mock_app_config.tenant_id = "test-tenant-id" - mock_entity.app_config = mock_app_config - return mock_entity - - def create_workflow_response_converter(self) -> WorkflowResponseConverter: - """Create a WorkflowResponseConverter for testing.""" - - mock_entity = self.create_mock_generate_entity() - mock_user = Mock(spec=Account) - mock_user.id = "test-user-id" - mock_user.name = "Test User" - mock_user.email = "test@example.com" - - return WorkflowResponseConverter(application_generate_entity=mock_entity, user=mock_user) - - def create_workflow_node_execution( - self, - process_data: dict[str, Any] | None = None, - truncated_process_data: dict[str, Any] | None = None, - execution_id: str = "test-execution-id", - ) -> WorkflowNodeExecution: - """Create a WorkflowNodeExecution for testing.""" - execution = WorkflowNodeExecution( - id=execution_id, - workflow_id="test-workflow-id", - workflow_execution_id="test-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - process_data=process_data, - status=WorkflowNodeExecutionStatus.SUCCEEDED, - created_at=datetime.now(), - finished_at=datetime.now(), - ) - - if truncated_process_data is not None: - execution.set_truncated_process_data(truncated_process_data) - - return execution - - def create_node_succeeded_event(self) -> QueueNodeSucceededEvent: - """Create a QueueNodeSucceededEvent for testing.""" - return QueueNodeSucceededEvent( - node_id="test-node-id", - node_type=NodeType.CODE, - node_execution_id=str(uuid.uuid4()), - start_at=naive_utc_now(), - parallel_id=None, - parallel_start_node_id=None, - parent_parallel_id=None, - parent_parallel_start_node_id=None, - in_iteration_id=None, - in_loop_id=None, - ) - - def create_node_retry_event(self) -> QueueNodeRetryEvent: - """Create a QueueNodeRetryEvent for testing.""" - return QueueNodeRetryEvent( - inputs={"data": "inputs"}, - outputs={"data": "outputs"}, - error="oops", - retry_index=1, - node_id="test-node-id", - node_type=NodeType.CODE, - node_title="test code", - provider_type="built-in", - provider_id="code", - node_execution_id=str(uuid.uuid4()), - start_at=naive_utc_now(), - parallel_id=None, - parallel_start_node_id=None, - parent_parallel_id=None, - parent_parallel_start_node_id=None, - in_iteration_id=None, - in_loop_id=None, - ) - - def test_workflow_node_finish_response_uses_truncated_process_data(self): - """Test that node finish response uses get_response_process_data().""" - converter = self.create_workflow_response_converter() - - original_data = {"large_field": "x" * 10000, "metadata": "info"} - truncated_data = {"large_field": "[TRUNCATED]", "metadata": "info"} - - execution = self.create_workflow_node_execution( - process_data=original_data, truncated_process_data=truncated_data - ) - event = self.create_node_succeeded_event() - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - # Response should use truncated data, not original - assert response is not None - assert response.data.process_data == truncated_data - assert response.data.process_data != original_data - assert response.data.process_data_truncated is True - - def test_workflow_node_finish_response_without_truncation(self): - """Test node finish response when no truncation is applied.""" - converter = self.create_workflow_response_converter() - - original_data = {"small": "data"} - - execution = self.create_workflow_node_execution(process_data=original_data) - event = self.create_node_succeeded_event() - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - # Response should use original data - assert response is not None - assert response.data.process_data == original_data - assert response.data.process_data_truncated is False - - def test_workflow_node_finish_response_with_none_process_data(self): - """Test node finish response when process_data is None.""" - converter = self.create_workflow_response_converter() - - execution = self.create_workflow_node_execution(process_data=None) - event = self.create_node_succeeded_event() - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - # Response should have None process_data - assert response is not None - assert response.data.process_data is None - assert response.data.process_data_truncated is False - - def test_workflow_node_retry_response_uses_truncated_process_data(self): - """Test that node retry response uses get_response_process_data().""" - converter = self.create_workflow_response_converter() - - original_data = {"large_field": "x" * 10000, "metadata": "info"} - truncated_data = {"large_field": "[TRUNCATED]", "metadata": "info"} - - execution = self.create_workflow_node_execution( - process_data=original_data, truncated_process_data=truncated_data - ) - event = self.create_node_retry_event() - - response = converter.workflow_node_retry_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - # Response should use truncated data, not original - assert response is not None - assert response.data.process_data == truncated_data - assert response.data.process_data != original_data - assert response.data.process_data_truncated is True - - def test_workflow_node_retry_response_without_truncation(self): - """Test node retry response when no truncation is applied.""" - converter = self.create_workflow_response_converter() - - original_data = {"small": "data"} - - execution = self.create_workflow_node_execution(process_data=original_data) - event = self.create_node_retry_event() - - response = converter.workflow_node_retry_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - # Response should use original data - assert response is not None - assert response.data.process_data == original_data - assert response.data.process_data_truncated is False - - def test_iteration_and_loop_nodes_return_none(self): - """Test that iteration and loop nodes return None (no change from existing behavior).""" - converter = self.create_workflow_response_converter() - - # Test iteration node - iteration_execution = self.create_workflow_node_execution(process_data={"test": "data"}) - iteration_execution.node_type = NodeType.ITERATION - - event = self.create_node_succeeded_event() - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=iteration_execution, - ) - - # Should return None for iteration nodes - assert response is None - - # Test loop node - loop_execution = self.create_workflow_node_execution(process_data={"test": "data"}) - loop_execution.node_type = NodeType.LOOP - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=loop_execution, - ) - - # Should return None for loop nodes - assert response is None - - def test_execution_without_workflow_execution_id_returns_none(self): - """Test that executions without workflow_execution_id return None.""" - converter = self.create_workflow_response_converter() - - execution = self.create_workflow_node_execution(process_data={"test": "data"}) - execution.workflow_execution_id = None # Single-step debugging - - event = self.create_node_succeeded_event() - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - # Should return None for single-step debugging - assert response is None - - @staticmethod - def get_process_data_response_scenarios() -> list[ProcessDataResponseScenario]: - """Create test scenarios for process_data responses.""" - return [ - ProcessDataResponseScenario( - name="none_process_data", - original_process_data=None, - truncated_process_data=None, - expected_response_data=None, - expected_truncated_flag=False, - ), - ProcessDataResponseScenario( - name="small_process_data_no_truncation", - original_process_data={"small": "data"}, - truncated_process_data=None, - expected_response_data={"small": "data"}, - expected_truncated_flag=False, - ), - ProcessDataResponseScenario( - name="large_process_data_with_truncation", - original_process_data={"large": "x" * 10000, "metadata": "info"}, - truncated_process_data={"large": "[TRUNCATED]", "metadata": "info"}, - expected_response_data={"large": "[TRUNCATED]", "metadata": "info"}, - expected_truncated_flag=True, - ), - ProcessDataResponseScenario( - name="empty_process_data", - original_process_data={}, - truncated_process_data=None, - expected_response_data={}, - expected_truncated_flag=False, - ), - ProcessDataResponseScenario( - name="complex_data_with_truncation", - original_process_data={ - "logs": ["entry"] * 1000, # Large array - "config": {"setting": "value"}, - "status": "processing", - }, - truncated_process_data={ - "logs": "[TRUNCATED: 1000 items]", - "config": {"setting": "value"}, - "status": "processing", - }, - expected_response_data={ - "logs": "[TRUNCATED: 1000 items]", - "config": {"setting": "value"}, - "status": "processing", - }, - expected_truncated_flag=True, - ), - ] - - @pytest.mark.parametrize( - "scenario", - get_process_data_response_scenarios(), - ids=[scenario.name for scenario in get_process_data_response_scenarios()], - ) - def test_node_finish_response_scenarios(self, scenario: ProcessDataResponseScenario): - """Test various scenarios for node finish responses.""" - - mock_user = Mock(spec=Account) - mock_user.id = "test-user-id" - mock_user.name = "Test User" - mock_user.email = "test@example.com" - - converter = WorkflowResponseConverter( - application_generate_entity=Mock(spec=WorkflowAppGenerateEntity, app_config=Mock(tenant_id="test-tenant")), - user=mock_user, - ) - - execution = WorkflowNodeExecution( - id="test-execution-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - process_data=scenario.original_process_data, - status=WorkflowNodeExecutionStatus.SUCCEEDED, - created_at=datetime.now(), - finished_at=datetime.now(), - ) - - if scenario.truncated_process_data is not None: - execution.set_truncated_process_data(scenario.truncated_process_data) - - event = QueueNodeSucceededEvent( - node_id="test-node-id", - node_type=NodeType.CODE, - node_execution_id=str(uuid.uuid4()), - start_at=naive_utc_now(), - parallel_id=None, - parallel_start_node_id=None, - parent_parallel_id=None, - parent_parallel_start_node_id=None, - in_iteration_id=None, - in_loop_id=None, - ) - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - assert response is not None - assert response.data.process_data == scenario.expected_response_data - assert response.data.process_data_truncated == scenario.expected_truncated_flag - - @pytest.mark.parametrize( - "scenario", - get_process_data_response_scenarios(), - ids=[scenario.name for scenario in get_process_data_response_scenarios()], - ) - def test_node_retry_response_scenarios(self, scenario: ProcessDataResponseScenario): - """Test various scenarios for node retry responses.""" - - mock_user = Mock(spec=Account) - mock_user.id = "test-user-id" - mock_user.name = "Test User" - mock_user.email = "test@example.com" - - converter = WorkflowResponseConverter( - application_generate_entity=Mock(spec=WorkflowAppGenerateEntity, app_config=Mock(tenant_id="test-tenant")), - user=mock_user, - ) - - execution = WorkflowNodeExecution( - id="test-execution-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - process_data=scenario.original_process_data, - status=WorkflowNodeExecutionStatus.FAILED, # Retry scenario - created_at=datetime.now(), - finished_at=datetime.now(), - ) - - if scenario.truncated_process_data is not None: - execution.set_truncated_process_data(scenario.truncated_process_data) - - event = self.create_node_retry_event() - - response = converter.workflow_node_retry_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - assert response is not None - assert response.data.process_data == scenario.expected_response_data - assert response.data.process_data_truncated == scenario.expected_truncated_flag diff --git a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py new file mode 100644 index 0000000000..1c9f577a50 --- /dev/null +++ b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py @@ -0,0 +1,810 @@ +""" +Unit tests for WorkflowResponseConverter focusing on process_data truncation functionality. +""" + +import uuid +from collections.abc import Mapping +from dataclasses import dataclass +from typing import Any +from unittest.mock import Mock + +import pytest + +from core.app.app_config.entities import WorkflowUIBasedAppConfig +from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter +from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity +from core.app.entities.queue_entities import ( + QueueEvent, + QueueIterationStartEvent, + QueueLoopStartEvent, + QueueNodeExceptionEvent, + QueueNodeFailedEvent, + QueueNodeRetryEvent, + QueueNodeStartedEvent, + QueueNodeSucceededEvent, +) +from core.workflow.enums import NodeType +from core.workflow.system_variable import SystemVariable +from libs.datetime_utils import naive_utc_now +from models import Account +from models.model import AppMode + + +class TestWorkflowResponseConverter: + """Test truncation in WorkflowResponseConverter.""" + + def create_mock_generate_entity(self) -> WorkflowAppGenerateEntity: + """Create a mock WorkflowAppGenerateEntity.""" + mock_entity = Mock(spec=WorkflowAppGenerateEntity) + mock_app_config = Mock() + mock_app_config.tenant_id = "test-tenant-id" + mock_entity.invoke_from = InvokeFrom.WEB_APP + mock_entity.app_config = mock_app_config + mock_entity.inputs = {} + return mock_entity + + def create_workflow_response_converter(self) -> WorkflowResponseConverter: + """Create a WorkflowResponseConverter for testing.""" + + mock_entity = self.create_mock_generate_entity() + mock_user = Mock(spec=Account) + mock_user.id = "test-user-id" + mock_user.name = "Test User" + mock_user.email = "test@example.com" + + system_variables = SystemVariable(workflow_id="wf-id", workflow_execution_id="initial-run-id") + return WorkflowResponseConverter( + application_generate_entity=mock_entity, + user=mock_user, + system_variables=system_variables, + ) + + def create_node_started_event(self, *, node_execution_id: str | None = None) -> QueueNodeStartedEvent: + """Create a QueueNodeStartedEvent for testing.""" + return QueueNodeStartedEvent( + node_execution_id=node_execution_id or str(uuid.uuid4()), + node_id="test-node-id", + node_title="Test Node", + node_type=NodeType.CODE, + start_at=naive_utc_now(), + in_iteration_id=None, + in_loop_id=None, + provider_type="built-in", + provider_id="code", + ) + + def create_node_succeeded_event( + self, + *, + node_execution_id: str, + process_data: Mapping[str, Any] | None = None, + ) -> QueueNodeSucceededEvent: + """Create a QueueNodeSucceededEvent for testing.""" + return QueueNodeSucceededEvent( + node_id="test-node-id", + node_type=NodeType.CODE, + node_execution_id=node_execution_id, + start_at=naive_utc_now(), + in_iteration_id=None, + in_loop_id=None, + inputs={}, + process_data=process_data or {}, + outputs={}, + execution_metadata={}, + ) + + def create_node_retry_event( + self, + *, + node_execution_id: str, + process_data: Mapping[str, Any] | None = None, + ) -> QueueNodeRetryEvent: + """Create a QueueNodeRetryEvent for testing.""" + return QueueNodeRetryEvent( + inputs={"data": "inputs"}, + outputs={"data": "outputs"}, + process_data=process_data or {}, + error="oops", + retry_index=1, + node_id="test-node-id", + node_type=NodeType.CODE, + node_title="test code", + provider_type="built-in", + provider_id="code", + node_execution_id=node_execution_id, + start_at=naive_utc_now(), + in_iteration_id=None, + in_loop_id=None, + ) + + def test_workflow_node_finish_response_uses_truncated_process_data(self): + """Test that node finish response uses get_response_process_data().""" + converter = self.create_workflow_response_converter() + + original_data = {"large_field": "x" * 10000, "metadata": "info"} + truncated_data = {"large_field": "[TRUNCATED]", "metadata": "info"} + + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", + ) + + event = self.create_node_succeeded_event( + node_execution_id=start_event.node_execution_id, + process_data=original_data, + ) + + def fake_truncate(mapping): + if mapping == dict(original_data): + return truncated_data, True + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] + + response = converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test-task-id", + ) + + # Response should use truncated data, not original + assert response is not None + assert response.data.process_data == truncated_data + assert response.data.process_data != original_data + assert response.data.process_data_truncated is True + + def test_workflow_node_finish_response_without_truncation(self): + """Test node finish response when no truncation is applied.""" + converter = self.create_workflow_response_converter() + + original_data = {"small": "data"} + + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", + ) + + event = self.create_node_succeeded_event( + node_execution_id=start_event.node_execution_id, + process_data=original_data, + ) + + def fake_truncate(mapping): + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] + + response = converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test-task-id", + ) + + # Response should use original data + assert response is not None + assert response.data.process_data == original_data + assert response.data.process_data_truncated is False + + def test_workflow_node_finish_response_with_none_process_data(self): + """Test node finish response when process_data is None.""" + converter = self.create_workflow_response_converter() + + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", + ) + + event = self.create_node_succeeded_event( + node_execution_id=start_event.node_execution_id, + process_data=None, + ) + + def fake_truncate(mapping): + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] + + response = converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test-task-id", + ) + + # Response should normalize missing process_data to an empty mapping + assert response is not None + assert response.data.process_data == {} + assert response.data.process_data_truncated is False + + def test_workflow_node_retry_response_uses_truncated_process_data(self): + """Test that node retry response uses get_response_process_data().""" + converter = self.create_workflow_response_converter() + + original_data = {"large_field": "x" * 10000, "metadata": "info"} + truncated_data = {"large_field": "[TRUNCATED]", "metadata": "info"} + + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", + ) + + event = self.create_node_retry_event( + node_execution_id=start_event.node_execution_id, + process_data=original_data, + ) + + def fake_truncate(mapping): + if mapping == dict(original_data): + return truncated_data, True + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] + + response = converter.workflow_node_retry_to_stream_response( + event=event, + task_id="test-task-id", + ) + + # Response should use truncated data, not original + assert response is not None + assert response.data.process_data == truncated_data + assert response.data.process_data != original_data + assert response.data.process_data_truncated is True + + def test_workflow_node_retry_response_without_truncation(self): + """Test node retry response when no truncation is applied.""" + converter = self.create_workflow_response_converter() + + original_data = {"small": "data"} + + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", + ) + + event = self.create_node_retry_event( + node_execution_id=start_event.node_execution_id, + process_data=original_data, + ) + + def fake_truncate(mapping): + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] + + response = converter.workflow_node_retry_to_stream_response( + event=event, + task_id="test-task-id", + ) + + assert response is not None + assert response.data.process_data == original_data + assert response.data.process_data_truncated is False + + def test_iteration_and_loop_nodes_return_none(self): + """Test that iteration and loop nodes return None (no streaming events).""" + converter = self.create_workflow_response_converter() + + iteration_event = QueueNodeSucceededEvent( + node_id="iteration-node", + node_type=NodeType.ITERATION, + node_execution_id=str(uuid.uuid4()), + start_at=naive_utc_now(), + in_iteration_id=None, + in_loop_id=None, + inputs={}, + process_data={}, + outputs={}, + execution_metadata={}, + ) + + response = converter.workflow_node_finish_to_stream_response( + event=iteration_event, + task_id="test-task-id", + ) + assert response is None + + loop_event = iteration_event.model_copy(update={"node_type": NodeType.LOOP}) + response = converter.workflow_node_finish_to_stream_response( + event=loop_event, + task_id="test-task-id", + ) + assert response is None + + def test_finish_without_start_raises(self): + """Ensure finish responses require a prior workflow start.""" + converter = self.create_workflow_response_converter() + event = self.create_node_succeeded_event( + node_execution_id=str(uuid.uuid4()), + process_data={}, + ) + + with pytest.raises(ValueError): + converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test-task-id", + ) + + +@dataclass +class TestCase: + """Test case data for table-driven tests.""" + + name: str + invoke_from: InvokeFrom + expected_truncation_enabled: bool + description: str + + +class TestWorkflowResponseConverterServiceApiTruncation: + """Test class for Service API truncation functionality in WorkflowResponseConverter.""" + + def create_test_app_generate_entity(self, invoke_from: InvokeFrom) -> WorkflowAppGenerateEntity: + """Create a test WorkflowAppGenerateEntity with specified invoke_from.""" + # Create a minimal WorkflowUIBasedAppConfig for testing + app_config = WorkflowUIBasedAppConfig( + tenant_id="test_tenant", + app_id="test_app", + app_mode=AppMode.WORKFLOW, + workflow_id="test_workflow_id", + ) + + entity = WorkflowAppGenerateEntity( + task_id="test_task_id", + app_id="test_app_id", + app_config=app_config, + tenant_id="test_tenant", + app_mode=AppMode.WORKFLOW, + invoke_from=invoke_from, + inputs={"test_input": "test_value"}, + user_id="test_user_id", + stream=True, + files=[], + workflow_execution_id="test_workflow_exec_id", + ) + return entity + + def create_test_user(self) -> Account: + """Create a test user account.""" + account = Account( + name="Test User", + email="test@example.com", + ) + # Manually set the ID for testing purposes + account.id = "test_user_id" + return account + + def create_test_system_variables(self) -> SystemVariable: + """Create test system variables.""" + return SystemVariable() + + def create_test_converter(self, invoke_from: InvokeFrom) -> WorkflowResponseConverter: + """Create WorkflowResponseConverter with specified invoke_from.""" + entity = self.create_test_app_generate_entity(invoke_from) + user = self.create_test_user() + system_variables = self.create_test_system_variables() + + converter = WorkflowResponseConverter( + application_generate_entity=entity, + user=user, + system_variables=system_variables, + ) + # ensure `workflow_run_id` is set. + converter.workflow_start_to_stream_response( + task_id="test-task-id", + workflow_run_id="test-workflow-run-id", + workflow_id="test-workflow-id", + ) + return converter + + @pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="service_api_truncation_disabled", + invoke_from=InvokeFrom.SERVICE_API, + expected_truncation_enabled=False, + description="Service API calls should have truncation disabled", + ), + TestCase( + name="web_app_truncation_enabled", + invoke_from=InvokeFrom.WEB_APP, + expected_truncation_enabled=True, + description="Web app calls should have truncation enabled", + ), + TestCase( + name="debugger_truncation_enabled", + invoke_from=InvokeFrom.DEBUGGER, + expected_truncation_enabled=True, + description="Debugger calls should have truncation enabled", + ), + TestCase( + name="explore_truncation_enabled", + invoke_from=InvokeFrom.EXPLORE, + expected_truncation_enabled=True, + description="Explore calls should have truncation enabled", + ), + TestCase( + name="published_truncation_enabled", + invoke_from=InvokeFrom.PUBLISHED, + expected_truncation_enabled=True, + description="Published app calls should have truncation enabled", + ), + ], + ids=lambda x: x.name, + ) + def test_truncator_selection_based_on_invoke_from(self, test_case: TestCase): + """Test that the correct truncator is selected based on invoke_from.""" + converter = self.create_test_converter(test_case.invoke_from) + + # Test truncation behavior instead of checking private attribute + + # Create a test event with large data + large_value = {"key": ["x"] * 2000} # Large data that would be truncated + + event = QueueNodeSucceededEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + start_at=naive_utc_now(), + inputs=large_value, + process_data=large_value, + outputs=large_value, + error=None, + execution_metadata=None, + in_iteration_id=None, + in_loop_id=None, + ) + + response = converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test_task", + ) + + # Verify response is not None + assert response is not None + + # Verify truncation behavior matches expectations + if test_case.expected_truncation_enabled: + # Truncation should be enabled for non-service-api calls + assert response.data.inputs_truncated + assert response.data.process_data_truncated + assert response.data.outputs_truncated + else: + # SERVICE_API should not truncate + assert not response.data.inputs_truncated + assert not response.data.process_data_truncated + assert not response.data.outputs_truncated + + def test_service_api_truncator_no_op_mapping(self): + """Test that Service API truncator doesn't truncate variable mappings.""" + converter = self.create_test_converter(InvokeFrom.SERVICE_API) + + # Create a test event with large data + large_value: dict[str, Any] = { + "large_string": "x" * 10000, # Large string + "large_list": list(range(2000)), # Large array + "nested_data": {"deep_nested": {"very_deep": {"value": "x" * 5000}}}, + } + + event = QueueNodeSucceededEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + start_at=naive_utc_now(), + inputs=large_value, + process_data=large_value, + outputs=large_value, + error=None, + execution_metadata=None, + in_iteration_id=None, + in_loop_id=None, + ) + + response = converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test_task", + ) + + # Verify response is not None + data = response.data + assert data.inputs == large_value + assert data.process_data == large_value + assert data.outputs == large_value + # Service API should not truncate + assert data.inputs_truncated is False + assert data.process_data_truncated is False + assert data.outputs_truncated is False + + def test_web_app_truncator_works_normally(self): + """Test that web app truncator still works normally.""" + converter = self.create_test_converter(InvokeFrom.WEB_APP) + + # Create a test event with large data + large_value = { + "large_string": "x" * 10000, # Large string + "large_list": list(range(2000)), # Large array + } + + event = QueueNodeSucceededEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + start_at=naive_utc_now(), + inputs=large_value, + process_data=large_value, + outputs=large_value, + error=None, + execution_metadata=None, + in_iteration_id=None, + in_loop_id=None, + ) + + response = converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test_task", + ) + + # Verify response is not None + assert response is not None + + # Web app should truncate + data = response.data + assert data.inputs != large_value + assert data.process_data != large_value + assert data.outputs != large_value + # The exact behavior depends on VariableTruncator implementation + # Just verify that truncation flags are present + assert data.inputs_truncated is True + assert data.process_data_truncated is True + assert data.outputs_truncated is True + + @staticmethod + def _create_event_by_type( + type_: QueueEvent, inputs: Mapping[str, Any], process_data: Mapping[str, Any], outputs: Mapping[str, Any] + ) -> QueueNodeSucceededEvent | QueueNodeFailedEvent | QueueNodeExceptionEvent: + if type_ == QueueEvent.NODE_SUCCEEDED: + return QueueNodeSucceededEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + start_at=naive_utc_now(), + inputs=inputs, + process_data=process_data, + outputs=outputs, + error=None, + execution_metadata=None, + in_iteration_id=None, + in_loop_id=None, + ) + elif type_ == QueueEvent.NODE_FAILED: + return QueueNodeFailedEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + start_at=naive_utc_now(), + inputs=inputs, + process_data=process_data, + outputs=outputs, + error="oops", + execution_metadata=None, + in_iteration_id=None, + in_loop_id=None, + ) + elif type_ == QueueEvent.NODE_EXCEPTION: + return QueueNodeExceptionEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + start_at=naive_utc_now(), + inputs=inputs, + process_data=process_data, + outputs=outputs, + error="oops", + execution_metadata=None, + in_iteration_id=None, + in_loop_id=None, + ) + else: + raise Exception("unknown type.") + + @pytest.mark.parametrize( + "event_type", + [ + QueueEvent.NODE_SUCCEEDED, + QueueEvent.NODE_FAILED, + QueueEvent.NODE_EXCEPTION, + ], + ) + def test_service_api_node_finish_event_no_truncation(self, event_type: QueueEvent): + """Test that Service API doesn't truncate node finish events.""" + converter = self.create_test_converter(InvokeFrom.SERVICE_API) + # Create test event with large data + large_inputs = {"input1": "x" * 5000, "input2": list(range(2000))} + large_process_data = {"process1": "y" * 5000, "process2": {"nested": ["z"] * 2000}} + large_outputs = {"output1": "result" * 1000, "output2": list(range(2000))} + + event = TestWorkflowResponseConverterServiceApiTruncation._create_event_by_type( + event_type, large_inputs, large_process_data, large_outputs + ) + + response = converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test_task", + ) + + # Verify response is not None + assert response is not None + + # Verify response contains full data (not truncated) + assert response.data.inputs == large_inputs + assert response.data.process_data == large_process_data + assert response.data.outputs == large_outputs + assert not response.data.inputs_truncated + assert not response.data.process_data_truncated + assert not response.data.outputs_truncated + + def test_service_api_node_retry_event_no_truncation(self): + """Test that Service API doesn't truncate node retry events.""" + converter = self.create_test_converter(InvokeFrom.SERVICE_API) + + # Create test event with large data + large_inputs = {"retry_input": "x" * 5000} + large_process_data = {"retry_process": "y" * 5000} + large_outputs = {"retry_output": "z" * 5000} + + # First, we need to store a snapshot by simulating a start event + start_event = QueueNodeStartedEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + node_title="Test Node", + node_run_index=1, + start_at=naive_utc_now(), + in_iteration_id=None, + in_loop_id=None, + agent_strategy=None, + provider_type="plugin", + provider_id="test/test_plugin", + ) + converter.workflow_node_start_to_stream_response(event=start_event, task_id="test_task") + + # Now create retry event + event = QueueNodeRetryEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + node_title="Test Node", + node_run_index=1, + start_at=naive_utc_now(), + inputs=large_inputs, + process_data=large_process_data, + outputs=large_outputs, + error="Retry error", + execution_metadata=None, + in_iteration_id=None, + in_loop_id=None, + retry_index=1, + provider_type="plugin", + provider_id="test/test_plugin", + ) + + response = converter.workflow_node_retry_to_stream_response( + event=event, + task_id="test_task", + ) + + # Verify response is not None + assert response is not None + + # Verify response contains full data (not truncated) + assert response.data.inputs == large_inputs + assert response.data.process_data == large_process_data + assert response.data.outputs == large_outputs + assert not response.data.inputs_truncated + assert not response.data.process_data_truncated + assert not response.data.outputs_truncated + + def test_service_api_iteration_events_no_truncation(self): + """Test that Service API doesn't truncate iteration events.""" + converter = self.create_test_converter(InvokeFrom.SERVICE_API) + + # Test iteration start event + large_value = {"iteration_input": ["x"] * 2000} + + start_event = QueueIterationStartEvent( + node_execution_id="test_iter_exec_id", + node_id="test_iteration", + node_type=NodeType.ITERATION, + node_title="Test Iteration", + node_run_index=0, + start_at=naive_utc_now(), + inputs=large_value, + metadata={}, + ) + + response = converter.workflow_iteration_start_to_stream_response( + task_id="test_task", + workflow_execution_id="test_workflow_exec_id", + event=start_event, + ) + + assert response is not None + assert response.data.inputs == large_value + assert not response.data.inputs_truncated + + def test_service_api_loop_events_no_truncation(self): + """Test that Service API doesn't truncate loop events.""" + converter = self.create_test_converter(InvokeFrom.SERVICE_API) + + # Test loop start event + large_inputs = {"loop_input": ["x"] * 2000} + + start_event = QueueLoopStartEvent( + node_execution_id="test_loop_exec_id", + node_id="test_loop", + node_type=NodeType.LOOP, + node_title="Test Loop", + start_at=naive_utc_now(), + inputs=large_inputs, + metadata={}, + node_run_index=0, + ) + + response = converter.workflow_loop_start_to_stream_response( + task_id="test_task", + workflow_execution_id="test_workflow_exec_id", + event=start_event, + ) + + assert response is not None + assert response.data.inputs == large_inputs + assert not response.data.inputs_truncated + + def test_web_app_node_finish_event_truncation_works(self): + """Test that web app still truncates node finish events.""" + converter = self.create_test_converter(InvokeFrom.WEB_APP) + + # Create test event with large data that should be truncated + large_inputs = {"input1": ["x"] * 2000} + large_process_data = {"process1": ["y"] * 2000} + large_outputs = {"output1": ["z"] * 2000} + + event = QueueNodeSucceededEvent( + node_execution_id="test_node_exec_id", + node_id="test_node", + node_type=NodeType.LLM, + start_at=naive_utc_now(), + inputs=large_inputs, + process_data=large_process_data, + outputs=large_outputs, + error=None, + execution_metadata=None, + in_iteration_id=None, + in_loop_id=None, + ) + + response = converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test_task", + ) + + # Verify response is not None + assert response is not None + + # Verify response contains truncated data + # The exact behavior depends on VariableTruncator implementation + # Just verify truncation flags are set correctly (may or may not be truncated depending on size) + # At minimum, the truncation mechanism should work + assert isinstance(response.data.inputs, dict) + assert response.data.inputs_truncated + assert isinstance(response.data.process_data, dict) + assert response.data.process_data_truncated + assert isinstance(response.data.outputs, dict) + assert response.data.outputs_truncated diff --git a/api/tests/unit_tests/core/mcp/__init__.py b/api/tests/unit_tests/core/mcp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/mcp/auth/__init__.py b/api/tests/unit_tests/core/mcp/auth/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/mcp/auth/test_auth_flow.py b/api/tests/unit_tests/core/mcp/auth/test_auth_flow.py new file mode 100644 index 0000000000..12a9f11205 --- /dev/null +++ b/api/tests/unit_tests/core/mcp/auth/test_auth_flow.py @@ -0,0 +1,740 @@ +"""Unit tests for MCP OAuth authentication flow.""" + +from unittest.mock import Mock, patch + +import pytest + +from core.entities.mcp_provider import MCPProviderEntity +from core.mcp.auth.auth_flow import ( + OAUTH_STATE_EXPIRY_SECONDS, + OAUTH_STATE_REDIS_KEY_PREFIX, + OAuthCallbackState, + _create_secure_redis_state, + _retrieve_redis_state, + auth, + check_support_resource_discovery, + discover_oauth_metadata, + exchange_authorization, + generate_pkce_challenge, + handle_callback, + refresh_authorization, + register_client, + start_authorization, +) +from core.mcp.entities import AuthActionType, AuthResult +from core.mcp.types import ( + OAuthClientInformation, + OAuthClientInformationFull, + OAuthClientMetadata, + OAuthMetadata, + OAuthTokens, +) + + +class TestPKCEGeneration: + """Test PKCE challenge generation.""" + + def test_generate_pkce_challenge(self): + """Test PKCE challenge and verifier generation.""" + code_verifier, code_challenge = generate_pkce_challenge() + + # Verify format - should be URL-safe base64 without padding + assert "=" not in code_verifier + assert "+" not in code_verifier + assert "/" not in code_verifier + assert "=" not in code_challenge + assert "+" not in code_challenge + assert "/" not in code_challenge + + # Verify length + assert len(code_verifier) > 40 # Should be around 54 characters + assert len(code_challenge) > 40 # Should be around 43 characters + + def test_generate_pkce_challenge_uniqueness(self): + """Test that PKCE generation produces unique values.""" + results = set() + for _ in range(10): + code_verifier, code_challenge = generate_pkce_challenge() + results.add((code_verifier, code_challenge)) + + # All should be unique + assert len(results) == 10 + + +class TestRedisStateManagement: + """Test Redis state management functions.""" + + @patch("core.mcp.auth.auth_flow.redis_client") + def test_create_secure_redis_state(self, mock_redis): + """Test creating secure Redis state.""" + state_data = OAuthCallbackState( + provider_id="test-provider", + tenant_id="test-tenant", + server_url="https://example.com", + metadata=None, + client_information=OAuthClientInformation(client_id="test-client"), + code_verifier="test-verifier", + redirect_uri="https://redirect.example.com", + ) + + state_key = _create_secure_redis_state(state_data) + + # Verify state key format + assert len(state_key) > 20 # Should be a secure random token + + # Verify Redis call + mock_redis.setex.assert_called_once() + call_args = mock_redis.setex.call_args + assert call_args[0][0].startswith(OAUTH_STATE_REDIS_KEY_PREFIX) + assert call_args[0][1] == OAUTH_STATE_EXPIRY_SECONDS + assert state_data.model_dump_json() in call_args[0][2] + + @patch("core.mcp.auth.auth_flow.redis_client") + def test_retrieve_redis_state_success(self, mock_redis): + """Test retrieving state from Redis.""" + state_data = OAuthCallbackState( + provider_id="test-provider", + tenant_id="test-tenant", + server_url="https://example.com", + metadata=None, + client_information=OAuthClientInformation(client_id="test-client"), + code_verifier="test-verifier", + redirect_uri="https://redirect.example.com", + ) + mock_redis.get.return_value = state_data.model_dump_json() + + result = _retrieve_redis_state("test-state-key") + + # Verify result + assert result.provider_id == "test-provider" + assert result.tenant_id == "test-tenant" + assert result.server_url == "https://example.com" + + # Verify Redis calls + mock_redis.get.assert_called_once_with(f"{OAUTH_STATE_REDIS_KEY_PREFIX}test-state-key") + mock_redis.delete.assert_called_once_with(f"{OAUTH_STATE_REDIS_KEY_PREFIX}test-state-key") + + @patch("core.mcp.auth.auth_flow.redis_client") + def test_retrieve_redis_state_not_found(self, mock_redis): + """Test retrieving non-existent state from Redis.""" + mock_redis.get.return_value = None + + with pytest.raises(ValueError) as exc_info: + _retrieve_redis_state("nonexistent-key") + + assert "State parameter has expired or does not exist" in str(exc_info.value) + + @patch("core.mcp.auth.auth_flow.redis_client") + def test_retrieve_redis_state_invalid_json(self, mock_redis): + """Test retrieving invalid JSON state from Redis.""" + mock_redis.get.return_value = '{"invalid": json}' + + with pytest.raises(ValueError) as exc_info: + _retrieve_redis_state("test-key") + + assert "Invalid state parameter" in str(exc_info.value) + # State should still be deleted + mock_redis.delete.assert_called_once() + + +class TestOAuthDiscovery: + """Test OAuth discovery functions.""" + + @patch("core.helper.ssrf_proxy.get") + def test_check_support_resource_discovery_success(self, mock_get): + """Test successful resource discovery check.""" + mock_response = Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"authorization_server_url": ["https://auth.example.com"]} + mock_get.return_value = mock_response + + supported, auth_url = check_support_resource_discovery("https://api.example.com/endpoint") + + assert supported is True + assert auth_url == "https://auth.example.com" + mock_get.assert_called_once_with( + "https://api.example.com/.well-known/oauth-protected-resource", + headers={"MCP-Protocol-Version": "2025-03-26", "User-Agent": "Dify"}, + ) + + @patch("core.helper.ssrf_proxy.get") + def test_check_support_resource_discovery_not_supported(self, mock_get): + """Test resource discovery not supported.""" + mock_response = Mock() + mock_response.status_code = 404 + mock_get.return_value = mock_response + + supported, auth_url = check_support_resource_discovery("https://api.example.com") + + assert supported is False + assert auth_url == "" + + @patch("core.helper.ssrf_proxy.get") + def test_check_support_resource_discovery_with_query_fragment(self, mock_get): + """Test resource discovery with query and fragment.""" + mock_response = Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"authorization_server_url": ["https://auth.example.com"]} + mock_get.return_value = mock_response + + supported, auth_url = check_support_resource_discovery("https://api.example.com/path?query=1#fragment") + + assert supported is True + assert auth_url == "https://auth.example.com" + mock_get.assert_called_once_with( + "https://api.example.com/.well-known/oauth-protected-resource?query=1#fragment", + headers={"MCP-Protocol-Version": "2025-03-26", "User-Agent": "Dify"}, + ) + + @patch("core.helper.ssrf_proxy.get") + def test_discover_oauth_metadata_with_resource_discovery(self, mock_get): + """Test OAuth metadata discovery with resource discovery support.""" + with patch("core.mcp.auth.auth_flow.check_support_resource_discovery") as mock_check: + mock_check.return_value = (True, "https://auth.example.com") + + mock_response = Mock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = { + "authorization_endpoint": "https://auth.example.com/authorize", + "token_endpoint": "https://auth.example.com/token", + "response_types_supported": ["code"], + } + mock_get.return_value = mock_response + + metadata = discover_oauth_metadata("https://api.example.com") + + assert metadata is not None + assert metadata.authorization_endpoint == "https://auth.example.com/authorize" + assert metadata.token_endpoint == "https://auth.example.com/token" + mock_get.assert_called_once_with( + "https://auth.example.com/.well-known/oauth-authorization-server", + headers={"MCP-Protocol-Version": "2025-03-26"}, + ) + + @patch("core.helper.ssrf_proxy.get") + def test_discover_oauth_metadata_without_resource_discovery(self, mock_get): + """Test OAuth metadata discovery without resource discovery.""" + with patch("core.mcp.auth.auth_flow.check_support_resource_discovery") as mock_check: + mock_check.return_value = (False, "") + + mock_response = Mock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = { + "authorization_endpoint": "https://api.example.com/oauth/authorize", + "token_endpoint": "https://api.example.com/oauth/token", + "response_types_supported": ["code"], + } + mock_get.return_value = mock_response + + metadata = discover_oauth_metadata("https://api.example.com") + + assert metadata is not None + assert metadata.authorization_endpoint == "https://api.example.com/oauth/authorize" + mock_get.assert_called_once_with( + "https://api.example.com/.well-known/oauth-authorization-server", + headers={"MCP-Protocol-Version": "2025-03-26"}, + ) + + @patch("core.helper.ssrf_proxy.get") + def test_discover_oauth_metadata_not_found(self, mock_get): + """Test OAuth metadata discovery when not found.""" + with patch("core.mcp.auth.auth_flow.check_support_resource_discovery") as mock_check: + mock_check.return_value = (False, "") + + mock_response = Mock() + mock_response.status_code = 404 + mock_get.return_value = mock_response + + metadata = discover_oauth_metadata("https://api.example.com") + + assert metadata is None + + +class TestAuthorizationFlow: + """Test authorization flow functions.""" + + @patch("core.mcp.auth.auth_flow._create_secure_redis_state") + def test_start_authorization_with_metadata(self, mock_create_state): + """Test starting authorization with metadata.""" + mock_create_state.return_value = "secure-state-key" + + metadata = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["code"], + code_challenge_methods_supported=["S256"], + ) + client_info = OAuthClientInformation(client_id="test-client-id") + + auth_url, code_verifier = start_authorization( + "https://api.example.com", + metadata, + client_info, + "https://redirect.example.com", + "provider-id", + "tenant-id", + ) + + # Verify URL format + assert auth_url.startswith("https://auth.example.com/authorize?") + assert "response_type=code" in auth_url + assert "client_id=test-client-id" in auth_url + assert "code_challenge=" in auth_url + assert "code_challenge_method=S256" in auth_url + assert "redirect_uri=https%3A%2F%2Fredirect.example.com" in auth_url + assert "state=secure-state-key" in auth_url + + # Verify code verifier + assert len(code_verifier) > 40 + + # Verify state was stored + mock_create_state.assert_called_once() + state_data = mock_create_state.call_args[0][0] + assert state_data.provider_id == "provider-id" + assert state_data.tenant_id == "tenant-id" + assert state_data.code_verifier == code_verifier + + def test_start_authorization_without_metadata(self): + """Test starting authorization without metadata.""" + with patch("core.mcp.auth.auth_flow._create_secure_redis_state") as mock_create_state: + mock_create_state.return_value = "secure-state-key" + + client_info = OAuthClientInformation(client_id="test-client-id") + + auth_url, code_verifier = start_authorization( + "https://api.example.com", + None, + client_info, + "https://redirect.example.com", + "provider-id", + "tenant-id", + ) + + # Should use default authorization endpoint + assert auth_url.startswith("https://api.example.com/authorize?") + + def test_start_authorization_invalid_metadata(self): + """Test starting authorization with invalid metadata.""" + metadata = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["token"], # No "code" support + code_challenge_methods_supported=["plain"], # No "S256" support + ) + client_info = OAuthClientInformation(client_id="test-client-id") + + with pytest.raises(ValueError) as exc_info: + start_authorization( + "https://api.example.com", + metadata, + client_info, + "https://redirect.example.com", + "provider-id", + "tenant-id", + ) + + assert "does not support response type code" in str(exc_info.value) + + @patch("core.helper.ssrf_proxy.post") + def test_exchange_authorization_success(self, mock_post): + """Test successful authorization code exchange.""" + mock_response = Mock() + mock_response.is_success = True + mock_response.json.return_value = { + "access_token": "new-access-token", + "token_type": "Bearer", + "expires_in": 3600, + "refresh_token": "new-refresh-token", + } + mock_post.return_value = mock_response + + metadata = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["code"], + grant_types_supported=["authorization_code"], + ) + client_info = OAuthClientInformation(client_id="test-client-id", client_secret="test-secret") + + tokens = exchange_authorization( + "https://api.example.com", + metadata, + client_info, + "auth-code-123", + "code-verifier-xyz", + "https://redirect.example.com", + ) + + assert tokens.access_token == "new-access-token" + assert tokens.token_type == "Bearer" + assert tokens.expires_in == 3600 + assert tokens.refresh_token == "new-refresh-token" + + # Verify request + mock_post.assert_called_once_with( + "https://auth.example.com/token", + data={ + "grant_type": "authorization_code", + "client_id": "test-client-id", + "client_secret": "test-secret", + "code": "auth-code-123", + "code_verifier": "code-verifier-xyz", + "redirect_uri": "https://redirect.example.com", + }, + ) + + @patch("core.helper.ssrf_proxy.post") + def test_exchange_authorization_failure(self, mock_post): + """Test failed authorization code exchange.""" + mock_response = Mock() + mock_response.is_success = False + mock_response.status_code = 400 + mock_post.return_value = mock_response + + client_info = OAuthClientInformation(client_id="test-client-id") + + with pytest.raises(ValueError) as exc_info: + exchange_authorization( + "https://api.example.com", + None, + client_info, + "invalid-code", + "code-verifier", + "https://redirect.example.com", + ) + + assert "Token exchange failed: HTTP 400" in str(exc_info.value) + + @patch("core.helper.ssrf_proxy.post") + def test_refresh_authorization_success(self, mock_post): + """Test successful token refresh.""" + mock_response = Mock() + mock_response.is_success = True + mock_response.json.return_value = { + "access_token": "refreshed-access-token", + "token_type": "Bearer", + "expires_in": 3600, + "refresh_token": "new-refresh-token", + } + mock_post.return_value = mock_response + + metadata = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["code"], + grant_types_supported=["refresh_token"], + ) + client_info = OAuthClientInformation(client_id="test-client-id") + + tokens = refresh_authorization("https://api.example.com", metadata, client_info, "old-refresh-token") + + assert tokens.access_token == "refreshed-access-token" + assert tokens.refresh_token == "new-refresh-token" + + # Verify request + mock_post.assert_called_once_with( + "https://auth.example.com/token", + data={ + "grant_type": "refresh_token", + "client_id": "test-client-id", + "refresh_token": "old-refresh-token", + }, + ) + + @patch("core.helper.ssrf_proxy.post") + def test_register_client_success(self, mock_post): + """Test successful client registration.""" + mock_response = Mock() + mock_response.is_success = True + mock_response.json.return_value = { + "client_id": "new-client-id", + "client_secret": "new-client-secret", + "client_name": "Dify", + "redirect_uris": ["https://redirect.example.com"], + } + mock_post.return_value = mock_response + + metadata = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + registration_endpoint="https://auth.example.com/register", + response_types_supported=["code"], + ) + client_metadata = OAuthClientMetadata( + client_name="Dify", + redirect_uris=["https://redirect.example.com"], + grant_types=["authorization_code"], + response_types=["code"], + ) + + client_info = register_client("https://api.example.com", metadata, client_metadata) + + assert isinstance(client_info, OAuthClientInformationFull) + assert client_info.client_id == "new-client-id" + assert client_info.client_secret == "new-client-secret" + + # Verify request + mock_post.assert_called_once_with( + "https://auth.example.com/register", + json=client_metadata.model_dump(), + headers={"Content-Type": "application/json"}, + ) + + def test_register_client_no_endpoint(self): + """Test client registration when no endpoint available.""" + metadata = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + registration_endpoint=None, + response_types_supported=["code"], + ) + client_metadata = OAuthClientMetadata(client_name="Dify", redirect_uris=["https://redirect.example.com"]) + + with pytest.raises(ValueError) as exc_info: + register_client("https://api.example.com", metadata, client_metadata) + + assert "does not support dynamic client registration" in str(exc_info.value) + + +class TestCallbackHandling: + """Test OAuth callback handling.""" + + @patch("core.mcp.auth.auth_flow._retrieve_redis_state") + @patch("core.mcp.auth.auth_flow.exchange_authorization") + def test_handle_callback_success(self, mock_exchange, mock_retrieve_state): + """Test successful callback handling.""" + # Setup state + state_data = OAuthCallbackState( + provider_id="test-provider", + tenant_id="test-tenant", + server_url="https://api.example.com", + metadata=None, + client_information=OAuthClientInformation(client_id="test-client"), + code_verifier="test-verifier", + redirect_uri="https://redirect.example.com", + ) + mock_retrieve_state.return_value = state_data + + # Setup token exchange + tokens = OAuthTokens( + access_token="new-token", + token_type="Bearer", + expires_in=3600, + ) + mock_exchange.return_value = tokens + + # Setup service + mock_service = Mock() + + state_result, tokens_result = handle_callback("state-key", "auth-code") + + assert state_result == state_data + assert tokens_result == tokens + + # Verify calls + mock_retrieve_state.assert_called_once_with("state-key") + mock_exchange.assert_called_once_with( + "https://api.example.com", + None, + state_data.client_information, + "auth-code", + "test-verifier", + "https://redirect.example.com", + ) + # Note: handle_callback no longer saves tokens directly, it just returns them + # The caller (e.g., controller) is responsible for saving via execute_auth_actions + + +class TestAuthOrchestration: + """Test the main auth orchestration function.""" + + @pytest.fixture + def mock_provider(self): + """Create a mock provider entity.""" + provider = Mock(spec=MCPProviderEntity) + provider.id = "provider-id" + provider.tenant_id = "tenant-id" + provider.decrypt_server_url.return_value = "https://api.example.com" + provider.client_metadata = OAuthClientMetadata( + client_name="Dify", + redirect_uris=["https://redirect.example.com"], + ) + provider.redirect_url = "https://redirect.example.com" + provider.retrieve_client_information.return_value = None + provider.retrieve_tokens.return_value = None + return provider + + @pytest.fixture + def mock_service(self): + """Create a mock MCP service.""" + return Mock() + + @patch("core.mcp.auth.auth_flow.discover_oauth_metadata") + @patch("core.mcp.auth.auth_flow.register_client") + @patch("core.mcp.auth.auth_flow.start_authorization") + def test_auth_new_registration(self, mock_start_auth, mock_register, mock_discover, mock_provider, mock_service): + """Test auth flow for new client registration.""" + # Setup + mock_discover.return_value = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["code"], + grant_types_supported=["authorization_code"], + ) + mock_register.return_value = OAuthClientInformationFull( + client_id="new-client-id", + client_name="Dify", + redirect_uris=["https://redirect.example.com"], + ) + mock_start_auth.return_value = ("https://auth.example.com/authorize?...", "code-verifier") + + result = auth(mock_provider) + + # auth() now returns AuthResult + assert isinstance(result, AuthResult) + assert result.response == {"authorization_url": "https://auth.example.com/authorize?..."} + + # Verify that the result contains the correct actions + assert len(result.actions) == 2 + # Check for SAVE_CLIENT_INFO action + client_info_action = next(a for a in result.actions if a.action_type == AuthActionType.SAVE_CLIENT_INFO) + assert client_info_action.data == {"client_information": mock_register.return_value.model_dump()} + assert client_info_action.provider_id == "provider-id" + assert client_info_action.tenant_id == "tenant-id" + + # Check for SAVE_CODE_VERIFIER action + verifier_action = next(a for a in result.actions if a.action_type == AuthActionType.SAVE_CODE_VERIFIER) + assert verifier_action.data == {"code_verifier": "code-verifier"} + assert verifier_action.provider_id == "provider-id" + assert verifier_action.tenant_id == "tenant-id" + + # Verify calls + mock_register.assert_called_once() + + @patch("core.mcp.auth.auth_flow.discover_oauth_metadata") + @patch("core.mcp.auth.auth_flow._retrieve_redis_state") + @patch("core.mcp.auth.auth_flow.exchange_authorization") + def test_auth_exchange_code(self, mock_exchange, mock_retrieve_state, mock_discover, mock_provider, mock_service): + """Test auth flow for exchanging authorization code.""" + # Setup metadata discovery + mock_discover.return_value = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["code"], + grant_types_supported=["authorization_code"], + ) + + # Setup existing client + mock_provider.retrieve_client_information.return_value = OAuthClientInformation(client_id="existing-client") + + # Setup state retrieval + state_data = OAuthCallbackState( + provider_id="provider-id", + tenant_id="tenant-id", + server_url="https://api.example.com", + metadata=None, + client_information=OAuthClientInformation(client_id="existing-client"), + code_verifier="test-verifier", + redirect_uri="https://redirect.example.com", + ) + mock_retrieve_state.return_value = state_data + + # Setup token exchange + tokens = OAuthTokens(access_token="new-token", token_type="Bearer", expires_in=3600) + mock_exchange.return_value = tokens + + result = auth(mock_provider, authorization_code="auth-code", state_param="state-key") + + # auth() now returns AuthResult, not a dict + assert isinstance(result, AuthResult) + assert result.response == {"result": "success"} + + # Verify that the result contains the correct action + assert len(result.actions) == 1 + assert result.actions[0].action_type == AuthActionType.SAVE_TOKENS + assert result.actions[0].data == tokens.model_dump() + assert result.actions[0].provider_id == "provider-id" + assert result.actions[0].tenant_id == "tenant-id" + + @patch("core.mcp.auth.auth_flow.discover_oauth_metadata") + def test_auth_exchange_code_without_state(self, mock_discover, mock_provider, mock_service): + """Test auth flow fails when exchanging code without state.""" + # Setup metadata discovery + mock_discover.return_value = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["code"], + grant_types_supported=["authorization_code"], + ) + + mock_provider.retrieve_client_information.return_value = OAuthClientInformation(client_id="existing-client") + + with pytest.raises(ValueError) as exc_info: + auth(mock_provider, authorization_code="auth-code") + + assert "State parameter is required" in str(exc_info.value) + + @patch("core.mcp.auth.auth_flow.refresh_authorization") + def test_auth_refresh_token(self, mock_refresh, mock_provider, mock_service): + """Test auth flow for refreshing tokens.""" + # Setup existing client and tokens + mock_provider.retrieve_client_information.return_value = OAuthClientInformation(client_id="existing-client") + mock_provider.retrieve_tokens.return_value = OAuthTokens( + access_token="old-token", + token_type="Bearer", + expires_in=0, + refresh_token="refresh-token", + ) + + # Setup refresh + new_tokens = OAuthTokens( + access_token="refreshed-token", + token_type="Bearer", + expires_in=3600, + refresh_token="new-refresh-token", + ) + mock_refresh.return_value = new_tokens + + with patch("core.mcp.auth.auth_flow.discover_oauth_metadata") as mock_discover: + mock_discover.return_value = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["code"], + grant_types_supported=["authorization_code"], + ) + + result = auth(mock_provider) + + # auth() now returns AuthResult + assert isinstance(result, AuthResult) + assert result.response == {"result": "success"} + + # Verify that the result contains the correct action + assert len(result.actions) == 1 + assert result.actions[0].action_type == AuthActionType.SAVE_TOKENS + assert result.actions[0].data == new_tokens.model_dump() + assert result.actions[0].provider_id == "provider-id" + assert result.actions[0].tenant_id == "tenant-id" + + # Verify refresh was called + mock_refresh.assert_called_once() + + @patch("core.mcp.auth.auth_flow.discover_oauth_metadata") + def test_auth_registration_fails_with_code(self, mock_discover, mock_provider, mock_service): + """Test auth fails when no client info exists but code is provided.""" + # Setup metadata discovery + mock_discover.return_value = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + response_types_supported=["code"], + grant_types_supported=["authorization_code"], + ) + + mock_provider.retrieve_client_information.return_value = None + + with pytest.raises(ValueError) as exc_info: + auth(mock_provider, authorization_code="auth-code") + + assert "Existing OAuth client information is required" in str(exc_info.value) diff --git a/api/tests/unit_tests/core/mcp/server/test_streamable_http.py b/api/tests/unit_tests/core/mcp/server/test_streamable_http.py index 895ebdd751..fe9f0935d5 100644 --- a/api/tests/unit_tests/core/mcp/server/test_streamable_http.py +++ b/api/tests/unit_tests/core/mcp/server/test_streamable_http.py @@ -235,7 +235,7 @@ class TestIndividualHandlers: # Type assertion needed due to union type text_content = result.content[0] assert hasattr(text_content, "text") - assert text_content.text == "test answer" # type: ignore[attr-defined] + assert text_content.text == "test answer" def test_handle_call_tool_no_end_user(self): """Test call tool handler without end user""" diff --git a/api/tests/unit_tests/core/mcp/test_auth_client_inheritance.py b/api/tests/unit_tests/core/mcp/test_auth_client_inheritance.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/mcp/test_entities.py b/api/tests/unit_tests/core/mcp/test_entities.py new file mode 100644 index 0000000000..3fede55916 --- /dev/null +++ b/api/tests/unit_tests/core/mcp/test_entities.py @@ -0,0 +1,239 @@ +"""Unit tests for MCP entities module.""" + +from unittest.mock import Mock + +from core.mcp.entities import ( + SUPPORTED_PROTOCOL_VERSIONS, + LifespanContextT, + RequestContext, + SessionT, +) +from core.mcp.session.base_session import BaseSession +from core.mcp.types import LATEST_PROTOCOL_VERSION, RequestParams + + +class TestProtocolVersions: + """Test protocol version constants.""" + + def test_supported_protocol_versions(self): + """Test supported protocol versions list.""" + assert isinstance(SUPPORTED_PROTOCOL_VERSIONS, list) + assert len(SUPPORTED_PROTOCOL_VERSIONS) >= 3 + assert "2024-11-05" in SUPPORTED_PROTOCOL_VERSIONS + assert "2025-03-26" in SUPPORTED_PROTOCOL_VERSIONS + assert LATEST_PROTOCOL_VERSION in SUPPORTED_PROTOCOL_VERSIONS + + def test_latest_protocol_version_is_supported(self): + """Test that latest protocol version is in supported versions.""" + assert LATEST_PROTOCOL_VERSION in SUPPORTED_PROTOCOL_VERSIONS + + +class TestRequestContext: + """Test RequestContext dataclass.""" + + def test_request_context_creation(self): + """Test creating a RequestContext instance.""" + mock_session = Mock(spec=BaseSession) + mock_lifespan = {"key": "value"} + mock_meta = RequestParams.Meta(progressToken="test-token") + + context = RequestContext( + request_id="test-request-123", + meta=mock_meta, + session=mock_session, + lifespan_context=mock_lifespan, + ) + + assert context.request_id == "test-request-123" + assert context.meta == mock_meta + assert context.session == mock_session + assert context.lifespan_context == mock_lifespan + + def test_request_context_with_none_meta(self): + """Test creating RequestContext with None meta.""" + mock_session = Mock(spec=BaseSession) + + context = RequestContext( + request_id=42, # Can be int or string + meta=None, + session=mock_session, + lifespan_context=None, + ) + + assert context.request_id == 42 + assert context.meta is None + assert context.session == mock_session + assert context.lifespan_context is None + + def test_request_context_attributes(self): + """Test RequestContext attributes are accessible.""" + mock_session = Mock(spec=BaseSession) + + context = RequestContext( + request_id="test-123", + meta=None, + session=mock_session, + lifespan_context=None, + ) + + # Verify attributes are accessible + assert hasattr(context, "request_id") + assert hasattr(context, "meta") + assert hasattr(context, "session") + assert hasattr(context, "lifespan_context") + + # Verify values + assert context.request_id == "test-123" + assert context.meta is None + assert context.session == mock_session + assert context.lifespan_context is None + + def test_request_context_generic_typing(self): + """Test RequestContext with different generic types.""" + # Create a mock session with specific type + mock_session = Mock(spec=BaseSession) + + # Create context with string lifespan context + context_str = RequestContext[BaseSession, str]( + request_id="test-1", + meta=None, + session=mock_session, + lifespan_context="string-context", + ) + assert isinstance(context_str.lifespan_context, str) + + # Create context with dict lifespan context + context_dict = RequestContext[BaseSession, dict]( + request_id="test-2", + meta=None, + session=mock_session, + lifespan_context={"key": "value"}, + ) + assert isinstance(context_dict.lifespan_context, dict) + + # Create context with custom object lifespan context + class CustomLifespan: + def __init__(self, data): + self.data = data + + custom_lifespan = CustomLifespan("test-data") + context_custom = RequestContext[BaseSession, CustomLifespan]( + request_id="test-3", + meta=None, + session=mock_session, + lifespan_context=custom_lifespan, + ) + assert isinstance(context_custom.lifespan_context, CustomLifespan) + assert context_custom.lifespan_context.data == "test-data" + + def test_request_context_with_progress_meta(self): + """Test RequestContext with progress metadata.""" + mock_session = Mock(spec=BaseSession) + progress_meta = RequestParams.Meta(progressToken="progress-123") + + context = RequestContext( + request_id="req-456", + meta=progress_meta, + session=mock_session, + lifespan_context=None, + ) + + assert context.meta is not None + assert context.meta.progressToken == "progress-123" + + def test_request_context_equality(self): + """Test RequestContext equality comparison.""" + mock_session1 = Mock(spec=BaseSession) + mock_session2 = Mock(spec=BaseSession) + + context1 = RequestContext( + request_id="test-123", + meta=None, + session=mock_session1, + lifespan_context="context", + ) + + context2 = RequestContext( + request_id="test-123", + meta=None, + session=mock_session1, + lifespan_context="context", + ) + + context3 = RequestContext( + request_id="test-456", + meta=None, + session=mock_session1, + lifespan_context="context", + ) + + # Same values should be equal + assert context1 == context2 + + # Different request_id should not be equal + assert context1 != context3 + + # Different session should not be equal + context4 = RequestContext( + request_id="test-123", + meta=None, + session=mock_session2, + lifespan_context="context", + ) + assert context1 != context4 + + def test_request_context_repr(self): + """Test RequestContext string representation.""" + mock_session = Mock(spec=BaseSession) + mock_session.__repr__ = Mock(return_value="") + + context = RequestContext( + request_id="test-123", + meta=None, + session=mock_session, + lifespan_context={"data": "test"}, + ) + + repr_str = repr(context) + assert "RequestContext" in repr_str + assert "test-123" in repr_str + assert "MockSession" in repr_str + + +class TestTypeVariables: + """Test type variables defined in the module.""" + + def test_session_type_var(self): + """Test SessionT type variable.""" + + # Create a custom session class + class CustomSession(BaseSession): + pass + + # Use in generic context + def process_session(session: SessionT) -> SessionT: + return session + + mock_session = Mock(spec=CustomSession) + result = process_session(mock_session) + assert result == mock_session + + def test_lifespan_context_type_var(self): + """Test LifespanContextT type variable.""" + + # Use in generic context + def process_lifespan(context: LifespanContextT) -> LifespanContextT: + return context + + # Test with different types + str_context = "string-context" + assert process_lifespan(str_context) == str_context + + dict_context = {"key": "value"} + assert process_lifespan(dict_context) == dict_context + + class CustomContext: + pass + + custom_context = CustomContext() + assert process_lifespan(custom_context) == custom_context diff --git a/api/tests/unit_tests/core/mcp/test_error.py b/api/tests/unit_tests/core/mcp/test_error.py new file mode 100644 index 0000000000..3a95fae673 --- /dev/null +++ b/api/tests/unit_tests/core/mcp/test_error.py @@ -0,0 +1,205 @@ +"""Unit tests for MCP error classes.""" + +import pytest + +from core.mcp.error import MCPAuthError, MCPConnectionError, MCPError + + +class TestMCPError: + """Test MCPError base exception class.""" + + def test_mcp_error_creation(self): + """Test creating MCPError instance.""" + error = MCPError("Test error message") + assert str(error) == "Test error message" + assert isinstance(error, Exception) + + def test_mcp_error_inheritance(self): + """Test MCPError inherits from Exception.""" + error = MCPError() + assert isinstance(error, Exception) + assert type(error).__name__ == "MCPError" + + def test_mcp_error_with_empty_message(self): + """Test MCPError with empty message.""" + error = MCPError() + assert str(error) == "" + + def test_mcp_error_raise(self): + """Test raising MCPError.""" + with pytest.raises(MCPError) as exc_info: + raise MCPError("Something went wrong") + + assert str(exc_info.value) == "Something went wrong" + + +class TestMCPConnectionError: + """Test MCPConnectionError exception class.""" + + def test_mcp_connection_error_creation(self): + """Test creating MCPConnectionError instance.""" + error = MCPConnectionError("Connection failed") + assert str(error) == "Connection failed" + assert isinstance(error, MCPError) + assert isinstance(error, Exception) + + def test_mcp_connection_error_inheritance(self): + """Test MCPConnectionError inheritance chain.""" + error = MCPConnectionError() + assert isinstance(error, MCPConnectionError) + assert isinstance(error, MCPError) + assert isinstance(error, Exception) + + def test_mcp_connection_error_raise(self): + """Test raising MCPConnectionError.""" + with pytest.raises(MCPConnectionError) as exc_info: + raise MCPConnectionError("Unable to connect to server") + + assert str(exc_info.value) == "Unable to connect to server" + + def test_mcp_connection_error_catch_as_mcp_error(self): + """Test catching MCPConnectionError as MCPError.""" + with pytest.raises(MCPError) as exc_info: + raise MCPConnectionError("Connection issue") + + assert isinstance(exc_info.value, MCPConnectionError) + assert str(exc_info.value) == "Connection issue" + + +class TestMCPAuthError: + """Test MCPAuthError exception class.""" + + def test_mcp_auth_error_creation(self): + """Test creating MCPAuthError instance.""" + error = MCPAuthError("Authentication failed") + assert str(error) == "Authentication failed" + assert isinstance(error, MCPConnectionError) + assert isinstance(error, MCPError) + assert isinstance(error, Exception) + + def test_mcp_auth_error_inheritance(self): + """Test MCPAuthError inheritance chain.""" + error = MCPAuthError() + assert isinstance(error, MCPAuthError) + assert isinstance(error, MCPConnectionError) + assert isinstance(error, MCPError) + assert isinstance(error, Exception) + + def test_mcp_auth_error_raise(self): + """Test raising MCPAuthError.""" + with pytest.raises(MCPAuthError) as exc_info: + raise MCPAuthError("Invalid credentials") + + assert str(exc_info.value) == "Invalid credentials" + + def test_mcp_auth_error_catch_hierarchy(self): + """Test catching MCPAuthError at different levels.""" + # Catch as MCPAuthError + with pytest.raises(MCPAuthError) as exc_info: + raise MCPAuthError("Auth specific error") + assert str(exc_info.value) == "Auth specific error" + + # Catch as MCPConnectionError + with pytest.raises(MCPConnectionError) as exc_info: + raise MCPAuthError("Auth connection error") + assert isinstance(exc_info.value, MCPAuthError) + assert str(exc_info.value) == "Auth connection error" + + # Catch as MCPError + with pytest.raises(MCPError) as exc_info: + raise MCPAuthError("Auth base error") + assert isinstance(exc_info.value, MCPAuthError) + assert str(exc_info.value) == "Auth base error" + + +class TestErrorHierarchy: + """Test the complete error hierarchy.""" + + def test_exception_hierarchy(self): + """Test the complete exception hierarchy.""" + # Create instances + base_error = MCPError("base") + connection_error = MCPConnectionError("connection") + auth_error = MCPAuthError("auth") + + # Test type relationships + assert not isinstance(base_error, MCPConnectionError) + assert not isinstance(base_error, MCPAuthError) + + assert isinstance(connection_error, MCPError) + assert not isinstance(connection_error, MCPAuthError) + + assert isinstance(auth_error, MCPError) + assert isinstance(auth_error, MCPConnectionError) + + def test_error_handling_patterns(self): + """Test common error handling patterns.""" + + def raise_auth_error(): + raise MCPAuthError("401 Unauthorized") + + def raise_connection_error(): + raise MCPConnectionError("Connection timeout") + + def raise_base_error(): + raise MCPError("Generic error") + + # Pattern 1: Catch specific errors first + errors_caught = [] + + for error_func in [raise_auth_error, raise_connection_error, raise_base_error]: + try: + error_func() + except MCPAuthError: + errors_caught.append("auth") + except MCPConnectionError: + errors_caught.append("connection") + except MCPError: + errors_caught.append("base") + + assert errors_caught == ["auth", "connection", "base"] + + # Pattern 2: Catch all as base error + for error_func in [raise_auth_error, raise_connection_error, raise_base_error]: + with pytest.raises(MCPError) as exc_info: + error_func() + assert isinstance(exc_info.value, MCPError) + + def test_error_with_cause(self): + """Test errors with cause (chained exceptions).""" + original_error = ValueError("Original error") + + def raise_chained_error(): + try: + raise original_error + except ValueError as e: + raise MCPConnectionError("Connection failed") from e + + with pytest.raises(MCPConnectionError) as exc_info: + raise_chained_error() + + assert str(exc_info.value) == "Connection failed" + assert exc_info.value.__cause__ == original_error + + def test_error_comparison(self): + """Test error instance comparison.""" + error1 = MCPError("Test message") + error2 = MCPError("Test message") + error3 = MCPError("Different message") + + # Errors are not equal even with same message (different instances) + assert error1 != error2 + assert error1 != error3 + + # But they have the same type + assert type(error1) == type(error2) == type(error3) + + def test_error_representation(self): + """Test error string representation.""" + base_error = MCPError("Base error message") + connection_error = MCPConnectionError("Connection error message") + auth_error = MCPAuthError("Auth error message") + + assert repr(base_error) == "MCPError('Base error message')" + assert repr(connection_error) == "MCPConnectionError('Connection error message')" + assert repr(auth_error) == "MCPAuthError('Auth error message')" diff --git a/api/tests/unit_tests/core/mcp/test_mcp_client.py b/api/tests/unit_tests/core/mcp/test_mcp_client.py new file mode 100644 index 0000000000..c0420d3371 --- /dev/null +++ b/api/tests/unit_tests/core/mcp/test_mcp_client.py @@ -0,0 +1,382 @@ +"""Unit tests for MCP client.""" + +from contextlib import ExitStack +from types import TracebackType +from unittest.mock import Mock, patch + +import pytest + +from core.mcp.error import MCPConnectionError +from core.mcp.mcp_client import MCPClient +from core.mcp.types import CallToolResult, ListToolsResult, TextContent, Tool, ToolAnnotations + + +class TestMCPClient: + """Test suite for MCPClient.""" + + def test_init(self): + """Test client initialization.""" + client = MCPClient( + server_url="http://test.example.com/mcp", + headers={"Authorization": "Bearer test"}, + timeout=30.0, + sse_read_timeout=60.0, + ) + + assert client.server_url == "http://test.example.com/mcp" + assert client.headers == {"Authorization": "Bearer test"} + assert client.timeout == 30.0 + assert client.sse_read_timeout == 60.0 + assert client._session is None + assert isinstance(client._exit_stack, ExitStack) + assert client._initialized is False + + def test_init_defaults(self): + """Test client initialization with defaults.""" + client = MCPClient(server_url="http://test.example.com") + + assert client.server_url == "http://test.example.com" + assert client.headers == {} + assert client.timeout is None + assert client.sse_read_timeout is None + + @patch("core.mcp.mcp_client.streamablehttp_client") + @patch("core.mcp.mcp_client.ClientSession") + def test_initialize_with_mcp_url(self, mock_client_session, mock_streamable_client): + """Test initialization with MCP URL.""" + # Setup mocks + mock_read_stream = Mock() + mock_write_stream = Mock() + mock_client_context = Mock() + mock_streamable_client.return_value.__enter__.return_value = ( + mock_read_stream, + mock_write_stream, + mock_client_context, + ) + + mock_session = Mock() + mock_client_session.return_value.__enter__.return_value = mock_session + + client = MCPClient(server_url="http://test.example.com/mcp") + client._initialize() + + # Verify streamable client was called + mock_streamable_client.assert_called_once_with( + url="http://test.example.com/mcp", + headers={}, + timeout=None, + sse_read_timeout=None, + ) + + # Verify session was created + mock_client_session.assert_called_once_with(mock_read_stream, mock_write_stream) + mock_session.initialize.assert_called_once() + assert client._session == mock_session + + @patch("core.mcp.mcp_client.sse_client") + @patch("core.mcp.mcp_client.ClientSession") + def test_initialize_with_sse_url(self, mock_client_session, mock_sse_client): + """Test initialization with SSE URL.""" + # Setup mocks + mock_read_stream = Mock() + mock_write_stream = Mock() + mock_sse_client.return_value.__enter__.return_value = (mock_read_stream, mock_write_stream) + + mock_session = Mock() + mock_client_session.return_value.__enter__.return_value = mock_session + + client = MCPClient(server_url="http://test.example.com/sse") + client._initialize() + + # Verify SSE client was called + mock_sse_client.assert_called_once_with( + url="http://test.example.com/sse", + headers={}, + timeout=None, + sse_read_timeout=None, + ) + + # Verify session was created + mock_client_session.assert_called_once_with(mock_read_stream, mock_write_stream) + mock_session.initialize.assert_called_once() + assert client._session == mock_session + + @patch("core.mcp.mcp_client.sse_client") + @patch("core.mcp.mcp_client.streamablehttp_client") + @patch("core.mcp.mcp_client.ClientSession") + def test_initialize_with_unknown_method_fallback_to_sse( + self, mock_client_session, mock_streamable_client, mock_sse_client + ): + """Test initialization with unknown method falls back to SSE.""" + # Setup mocks + mock_read_stream = Mock() + mock_write_stream = Mock() + mock_sse_client.return_value.__enter__.return_value = (mock_read_stream, mock_write_stream) + + mock_session = Mock() + mock_client_session.return_value.__enter__.return_value = mock_session + + client = MCPClient(server_url="http://test.example.com/unknown") + client._initialize() + + # Verify SSE client was tried + mock_sse_client.assert_called_once() + mock_streamable_client.assert_not_called() + + # Verify session was created + assert client._session == mock_session + + @patch("core.mcp.mcp_client.sse_client") + @patch("core.mcp.mcp_client.streamablehttp_client") + @patch("core.mcp.mcp_client.ClientSession") + def test_initialize_fallback_from_sse_to_mcp(self, mock_client_session, mock_streamable_client, mock_sse_client): + """Test initialization falls back from SSE to MCP on connection error.""" + # Setup SSE to fail + mock_sse_client.side_effect = MCPConnectionError("SSE connection failed") + + # Setup MCP to succeed + mock_read_stream = Mock() + mock_write_stream = Mock() + mock_client_context = Mock() + mock_streamable_client.return_value.__enter__.return_value = ( + mock_read_stream, + mock_write_stream, + mock_client_context, + ) + + mock_session = Mock() + mock_client_session.return_value.__enter__.return_value = mock_session + + client = MCPClient(server_url="http://test.example.com/unknown") + client._initialize() + + # Verify both were tried + mock_sse_client.assert_called_once() + mock_streamable_client.assert_called_once() + + # Verify session was created with MCP + assert client._session == mock_session + + @patch("core.mcp.mcp_client.streamablehttp_client") + @patch("core.mcp.mcp_client.ClientSession") + def test_connect_server_mcp(self, mock_client_session, mock_streamable_client): + """Test connect_server with MCP method.""" + # Setup mocks + mock_read_stream = Mock() + mock_write_stream = Mock() + mock_client_context = Mock() + mock_streamable_client.return_value.__enter__.return_value = ( + mock_read_stream, + mock_write_stream, + mock_client_context, + ) + + mock_session = Mock() + mock_client_session.return_value.__enter__.return_value = mock_session + + client = MCPClient(server_url="http://test.example.com") + client.connect_server(mock_streamable_client, "mcp") + + # Verify correct streams were passed + mock_client_session.assert_called_once_with(mock_read_stream, mock_write_stream) + mock_session.initialize.assert_called_once() + + @patch("core.mcp.mcp_client.sse_client") + @patch("core.mcp.mcp_client.ClientSession") + def test_connect_server_sse(self, mock_client_session, mock_sse_client): + """Test connect_server with SSE method.""" + # Setup mocks + mock_read_stream = Mock() + mock_write_stream = Mock() + mock_sse_client.return_value.__enter__.return_value = (mock_read_stream, mock_write_stream) + + mock_session = Mock() + mock_client_session.return_value.__enter__.return_value = mock_session + + client = MCPClient(server_url="http://test.example.com") + client.connect_server(mock_sse_client, "sse") + + # Verify correct streams were passed + mock_client_session.assert_called_once_with(mock_read_stream, mock_write_stream) + mock_session.initialize.assert_called_once() + + def test_context_manager_enter(self): + """Test context manager enter.""" + client = MCPClient(server_url="http://test.example.com") + + with patch.object(client, "_initialize") as mock_initialize: + result = client.__enter__() + + assert result == client + assert client._initialized is True + mock_initialize.assert_called_once() + + def test_context_manager_exit(self): + """Test context manager exit.""" + client = MCPClient(server_url="http://test.example.com") + + with patch.object(client, "cleanup") as mock_cleanup: + exc_type: type[BaseException] | None = None + exc_val: BaseException | None = None + exc_tb: TracebackType | None = None + client.__exit__(exc_type, exc_val, exc_tb) + + mock_cleanup.assert_called_once() + + def test_list_tools_not_initialized(self): + """Test list_tools when session not initialized.""" + client = MCPClient(server_url="http://test.example.com") + + with pytest.raises(ValueError) as exc_info: + client.list_tools() + + assert "Session not initialized" in str(exc_info.value) + + def test_list_tools_success(self): + """Test successful list_tools call.""" + client = MCPClient(server_url="http://test.example.com") + + # Setup mock session + mock_session = Mock() + expected_tools = [ + Tool( + name="test-tool", + description="A test tool", + inputSchema={"type": "object", "properties": {}}, + annotations=ToolAnnotations(title="Test Tool"), + ) + ] + mock_session.list_tools.return_value = ListToolsResult(tools=expected_tools) + client._session = mock_session + + result = client.list_tools() + + assert result == expected_tools + mock_session.list_tools.assert_called_once() + + def test_invoke_tool_not_initialized(self): + """Test invoke_tool when session not initialized.""" + client = MCPClient(server_url="http://test.example.com") + + with pytest.raises(ValueError) as exc_info: + client.invoke_tool("test-tool", {"arg": "value"}) + + assert "Session not initialized" in str(exc_info.value) + + def test_invoke_tool_success(self): + """Test successful invoke_tool call.""" + client = MCPClient(server_url="http://test.example.com") + + # Setup mock session + mock_session = Mock() + expected_result = CallToolResult( + content=[TextContent(type="text", text="Tool executed successfully")], + isError=False, + ) + mock_session.call_tool.return_value = expected_result + client._session = mock_session + + result = client.invoke_tool("test-tool", {"arg": "value"}) + + assert result == expected_result + mock_session.call_tool.assert_called_once_with("test-tool", {"arg": "value"}) + + def test_cleanup(self): + """Test cleanup method.""" + client = MCPClient(server_url="http://test.example.com") + mock_exit_stack = Mock(spec=ExitStack) + client._exit_stack = mock_exit_stack + client._session = Mock() + client._initialized = True + + client.cleanup() + + mock_exit_stack.close.assert_called_once() + assert client._session is None + assert client._initialized is False + + def test_cleanup_with_error(self): + """Test cleanup method with error.""" + client = MCPClient(server_url="http://test.example.com") + mock_exit_stack = Mock(spec=ExitStack) + mock_exit_stack.close.side_effect = Exception("Cleanup error") + client._exit_stack = mock_exit_stack + client._session = Mock() + client._initialized = True + + with pytest.raises(ValueError) as exc_info: + client.cleanup() + + assert "Error during cleanup: Cleanup error" in str(exc_info.value) + assert client._session is None + assert client._initialized is False + + @patch("core.mcp.mcp_client.streamablehttp_client") + @patch("core.mcp.mcp_client.ClientSession") + def test_full_context_manager_flow(self, mock_client_session, mock_streamable_client): + """Test full context manager flow.""" + # Setup mocks + mock_read_stream = Mock() + mock_write_stream = Mock() + mock_client_context = Mock() + mock_streamable_client.return_value.__enter__.return_value = ( + mock_read_stream, + mock_write_stream, + mock_client_context, + ) + + mock_session = Mock() + mock_client_session.return_value.__enter__.return_value = mock_session + + expected_tools = [Tool(name="test-tool", description="Test", inputSchema={})] + mock_session.list_tools.return_value = ListToolsResult(tools=expected_tools) + + with MCPClient(server_url="http://test.example.com/mcp") as client: + assert client._initialized is True + assert client._session == mock_session + + # Test tool operations + tools = client.list_tools() + assert tools == expected_tools + + # After exit, should be cleaned up + assert client._initialized is False + assert client._session is None + + def test_headers_passed_to_clients(self): + """Test that headers are properly passed to underlying clients.""" + custom_headers = { + "Authorization": "Bearer test-token", + "X-Custom-Header": "test-value", + } + + with patch("core.mcp.mcp_client.streamablehttp_client") as mock_streamable_client: + with patch("core.mcp.mcp_client.ClientSession") as mock_client_session: + # Setup mocks + mock_read_stream = Mock() + mock_write_stream = Mock() + mock_client_context = Mock() + mock_streamable_client.return_value.__enter__.return_value = ( + mock_read_stream, + mock_write_stream, + mock_client_context, + ) + + mock_session = Mock() + mock_client_session.return_value.__enter__.return_value = mock_session + + client = MCPClient( + server_url="http://test.example.com/mcp", + headers=custom_headers, + timeout=30.0, + sse_read_timeout=60.0, + ) + client._initialize() + + # Verify headers were passed + mock_streamable_client.assert_called_once_with( + url="http://test.example.com/mcp", + headers=custom_headers, + timeout=30.0, + sse_read_timeout=60.0, + ) diff --git a/api/tests/unit_tests/core/mcp/test_types.py b/api/tests/unit_tests/core/mcp/test_types.py new file mode 100644 index 0000000000..6d8130bd13 --- /dev/null +++ b/api/tests/unit_tests/core/mcp/test_types.py @@ -0,0 +1,492 @@ +"""Unit tests for MCP types module.""" + +import pytest +from pydantic import ValidationError + +from core.mcp.types import ( + INTERNAL_ERROR, + INVALID_PARAMS, + INVALID_REQUEST, + LATEST_PROTOCOL_VERSION, + METHOD_NOT_FOUND, + PARSE_ERROR, + SERVER_LATEST_PROTOCOL_VERSION, + Annotations, + CallToolRequest, + CallToolRequestParams, + CallToolResult, + ClientCapabilities, + CompleteRequest, + CompleteRequestParams, + CompleteResult, + Completion, + CompletionArgument, + CompletionContext, + ErrorData, + ImageContent, + Implementation, + InitializeRequest, + InitializeRequestParams, + InitializeResult, + JSONRPCError, + JSONRPCMessage, + JSONRPCNotification, + JSONRPCRequest, + JSONRPCResponse, + ListToolsRequest, + ListToolsResult, + OAuthClientInformation, + OAuthClientMetadata, + OAuthMetadata, + OAuthTokens, + PingRequest, + ProgressNotification, + ProgressNotificationParams, + PromptReference, + RequestParams, + ResourceTemplateReference, + Result, + ServerCapabilities, + TextContent, + Tool, + ToolAnnotations, +) + + +class TestConstants: + """Test module constants.""" + + def test_protocol_versions(self): + """Test protocol version constants.""" + assert LATEST_PROTOCOL_VERSION == "2025-03-26" + assert SERVER_LATEST_PROTOCOL_VERSION == "2024-11-05" + + def test_error_codes(self): + """Test JSON-RPC error code constants.""" + assert PARSE_ERROR == -32700 + assert INVALID_REQUEST == -32600 + assert METHOD_NOT_FOUND == -32601 + assert INVALID_PARAMS == -32602 + assert INTERNAL_ERROR == -32603 + + +class TestRequestParams: + """Test RequestParams and related classes.""" + + def test_request_params_basic(self): + """Test basic RequestParams creation.""" + params = RequestParams() + assert params.meta is None + + def test_request_params_with_meta(self): + """Test RequestParams with meta.""" + meta = RequestParams.Meta(progressToken="test-token") + params = RequestParams(_meta=meta) + assert params.meta is not None + assert params.meta.progressToken == "test-token" + + def test_request_params_meta_extra_fields(self): + """Test RequestParams.Meta allows extra fields.""" + meta = RequestParams.Meta(progressToken="token", customField="value") + assert meta.progressToken == "token" + assert meta.customField == "value" # type: ignore + + def test_request_params_serialization(self): + """Test RequestParams serialization with _meta alias.""" + meta = RequestParams.Meta(progressToken="test") + params = RequestParams(_meta=meta) + + # Model dump should use the alias + dumped = params.model_dump(by_alias=True) + assert "_meta" in dumped + assert dumped["_meta"] is not None + assert dumped["_meta"]["progressToken"] == "test" + + +class TestJSONRPCMessages: + """Test JSON-RPC message types.""" + + def test_jsonrpc_request(self): + """Test JSONRPCRequest creation and validation.""" + request = JSONRPCRequest(jsonrpc="2.0", id="test-123", method="test_method", params={"key": "value"}) + + assert request.jsonrpc == "2.0" + assert request.id == "test-123" + assert request.method == "test_method" + assert request.params == {"key": "value"} + + def test_jsonrpc_request_numeric_id(self): + """Test JSONRPCRequest with numeric ID.""" + request = JSONRPCRequest(jsonrpc="2.0", id=123, method="test", params=None) + assert request.id == 123 + + def test_jsonrpc_notification(self): + """Test JSONRPCNotification creation.""" + notification = JSONRPCNotification(jsonrpc="2.0", method="notification_method", params={"data": "test"}) + + assert notification.jsonrpc == "2.0" + assert notification.method == "notification_method" + assert not hasattr(notification, "id") # Notifications don't have ID + + def test_jsonrpc_response(self): + """Test JSONRPCResponse creation.""" + response = JSONRPCResponse(jsonrpc="2.0", id="req-123", result={"success": True}) + + assert response.jsonrpc == "2.0" + assert response.id == "req-123" + assert response.result == {"success": True} + + def test_jsonrpc_error(self): + """Test JSONRPCError creation.""" + error_data = ErrorData(code=INVALID_PARAMS, message="Invalid parameters", data={"field": "missing"}) + + error = JSONRPCError(jsonrpc="2.0", id="req-123", error=error_data) + + assert error.jsonrpc == "2.0" + assert error.id == "req-123" + assert error.error.code == INVALID_PARAMS + assert error.error.message == "Invalid parameters" + assert error.error.data == {"field": "missing"} + + def test_jsonrpc_message_parsing(self): + """Test JSONRPCMessage parsing different message types.""" + # Parse request + request_json = '{"jsonrpc": "2.0", "id": 1, "method": "test", "params": null}' + msg = JSONRPCMessage.model_validate_json(request_json) + assert isinstance(msg.root, JSONRPCRequest) + + # Parse response + response_json = '{"jsonrpc": "2.0", "id": 1, "result": {"data": "test"}}' + msg = JSONRPCMessage.model_validate_json(response_json) + assert isinstance(msg.root, JSONRPCResponse) + + # Parse error + error_json = '{"jsonrpc": "2.0", "id": 1, "error": {"code": -32600, "message": "Invalid Request"}}' + msg = JSONRPCMessage.model_validate_json(error_json) + assert isinstance(msg.root, JSONRPCError) + + +class TestCapabilities: + """Test capability classes.""" + + def test_client_capabilities(self): + """Test ClientCapabilities creation.""" + caps = ClientCapabilities( + experimental={"feature": {"enabled": True}}, + sampling={"model_config": {"extra": "allow"}}, + roots={"listChanged": True}, + ) + + assert caps.experimental == {"feature": {"enabled": True}} + assert caps.sampling is not None + assert caps.roots.listChanged is True # type: ignore + + def test_server_capabilities(self): + """Test ServerCapabilities creation.""" + caps = ServerCapabilities( + tools={"listChanged": True}, + resources={"subscribe": True, "listChanged": False}, + prompts={"listChanged": True}, + logging={}, + completions={}, + ) + + assert caps.tools.listChanged is True # type: ignore + assert caps.resources.subscribe is True # type: ignore + assert caps.resources.listChanged is False # type: ignore + + +class TestInitialization: + """Test initialization request/response types.""" + + def test_initialize_request(self): + """Test InitializeRequest creation.""" + client_info = Implementation(name="test-client", version="1.0.0") + capabilities = ClientCapabilities() + + params = InitializeRequestParams( + protocolVersion=LATEST_PROTOCOL_VERSION, capabilities=capabilities, clientInfo=client_info + ) + + request = InitializeRequest(params=params) + + assert request.method == "initialize" + assert request.params.protocolVersion == LATEST_PROTOCOL_VERSION + assert request.params.clientInfo.name == "test-client" + + def test_initialize_result(self): + """Test InitializeResult creation.""" + server_info = Implementation(name="test-server", version="1.0.0") + capabilities = ServerCapabilities() + + result = InitializeResult( + protocolVersion=LATEST_PROTOCOL_VERSION, + capabilities=capabilities, + serverInfo=server_info, + instructions="Welcome to test server", + ) + + assert result.protocolVersion == LATEST_PROTOCOL_VERSION + assert result.serverInfo.name == "test-server" + assert result.instructions == "Welcome to test server" + + +class TestTools: + """Test tool-related types.""" + + def test_tool_creation(self): + """Test Tool creation with all fields.""" + tool = Tool( + name="test_tool", + title="Test Tool", + description="A tool for testing", + inputSchema={"type": "object", "properties": {"input": {"type": "string"}}, "required": ["input"]}, + outputSchema={"type": "object", "properties": {"result": {"type": "string"}}}, + annotations=ToolAnnotations( + title="Test Tool", readOnlyHint=False, destructiveHint=False, idempotentHint=True + ), + ) + + assert tool.name == "test_tool" + assert tool.title == "Test Tool" + assert tool.description == "A tool for testing" + assert tool.inputSchema["properties"]["input"]["type"] == "string" + assert tool.annotations.idempotentHint is True + + def test_call_tool_request(self): + """Test CallToolRequest creation.""" + params = CallToolRequestParams(name="test_tool", arguments={"input": "test value"}) + + request = CallToolRequest(params=params) + + assert request.method == "tools/call" + assert request.params.name == "test_tool" + assert request.params.arguments == {"input": "test value"} + + def test_call_tool_result(self): + """Test CallToolResult creation.""" + result = CallToolResult( + content=[TextContent(type="text", text="Tool executed successfully")], + structuredContent={"status": "success", "data": "test"}, + isError=False, + ) + + assert len(result.content) == 1 + assert result.content[0].text == "Tool executed successfully" # type: ignore + assert result.structuredContent == {"status": "success", "data": "test"} + assert result.isError is False + + def test_list_tools_request(self): + """Test ListToolsRequest creation.""" + request = ListToolsRequest() + assert request.method == "tools/list" + + def test_list_tools_result(self): + """Test ListToolsResult creation.""" + tool1 = Tool(name="tool1", inputSchema={}) + tool2 = Tool(name="tool2", inputSchema={}) + + result = ListToolsResult(tools=[tool1, tool2]) + + assert len(result.tools) == 2 + assert result.tools[0].name == "tool1" + assert result.tools[1].name == "tool2" + + +class TestContent: + """Test content types.""" + + def test_text_content(self): + """Test TextContent creation.""" + annotations = Annotations(audience=["user"], priority=0.8) + content = TextContent(type="text", text="Hello, world!", annotations=annotations) + + assert content.type == "text" + assert content.text == "Hello, world!" + assert content.annotations is not None + assert content.annotations.priority == 0.8 + + def test_image_content(self): + """Test ImageContent creation.""" + content = ImageContent(type="image", data="base64encodeddata", mimeType="image/png") + + assert content.type == "image" + assert content.data == "base64encodeddata" + assert content.mimeType == "image/png" + + +class TestOAuth: + """Test OAuth-related types.""" + + def test_oauth_client_metadata(self): + """Test OAuthClientMetadata creation.""" + metadata = OAuthClientMetadata( + client_name="Test Client", + redirect_uris=["https://example.com/callback"], + grant_types=["authorization_code", "refresh_token"], + response_types=["code"], + token_endpoint_auth_method="none", + client_uri="https://example.com", + scope="read write", + ) + + assert metadata.client_name == "Test Client" + assert len(metadata.redirect_uris) == 1 + assert "authorization_code" in metadata.grant_types + + def test_oauth_client_information(self): + """Test OAuthClientInformation creation.""" + info = OAuthClientInformation(client_id="test-client-id", client_secret="test-secret") + + assert info.client_id == "test-client-id" + assert info.client_secret == "test-secret" + + def test_oauth_client_information_without_secret(self): + """Test OAuthClientInformation without secret.""" + info = OAuthClientInformation(client_id="public-client") + + assert info.client_id == "public-client" + assert info.client_secret is None + + def test_oauth_tokens(self): + """Test OAuthTokens creation.""" + tokens = OAuthTokens( + access_token="access-token-123", + token_type="Bearer", + expires_in=3600, + refresh_token="refresh-token-456", + scope="read write", + ) + + assert tokens.access_token == "access-token-123" + assert tokens.token_type == "Bearer" + assert tokens.expires_in == 3600 + assert tokens.refresh_token == "refresh-token-456" + assert tokens.scope == "read write" + + def test_oauth_metadata(self): + """Test OAuthMetadata creation.""" + metadata = OAuthMetadata( + authorization_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + registration_endpoint="https://auth.example.com/register", + response_types_supported=["code", "token"], + grant_types_supported=["authorization_code", "refresh_token"], + code_challenge_methods_supported=["plain", "S256"], + ) + + assert metadata.authorization_endpoint == "https://auth.example.com/authorize" + assert "code" in metadata.response_types_supported + assert "S256" in metadata.code_challenge_methods_supported + + +class TestNotifications: + """Test notification types.""" + + def test_progress_notification(self): + """Test ProgressNotification creation.""" + params = ProgressNotificationParams( + progressToken="progress-123", progress=50.0, total=100.0, message="Processing... 50%" + ) + + notification = ProgressNotification(params=params) + + assert notification.method == "notifications/progress" + assert notification.params.progressToken == "progress-123" + assert notification.params.progress == 50.0 + assert notification.params.total == 100.0 + assert notification.params.message == "Processing... 50%" + + def test_ping_request(self): + """Test PingRequest creation.""" + request = PingRequest() + assert request.method == "ping" + assert request.params is None + + +class TestCompletion: + """Test completion-related types.""" + + def test_completion_context(self): + """Test CompletionContext creation.""" + context = CompletionContext(arguments={"template_var": "value"}) + assert context.arguments == {"template_var": "value"} + + def test_resource_template_reference(self): + """Test ResourceTemplateReference creation.""" + ref = ResourceTemplateReference(type="ref/resource", uri="file:///path/to/{filename}") + assert ref.type == "ref/resource" + assert ref.uri == "file:///path/to/{filename}" + + def test_prompt_reference(self): + """Test PromptReference creation.""" + ref = PromptReference(type="ref/prompt", name="test_prompt") + assert ref.type == "ref/prompt" + assert ref.name == "test_prompt" + + def test_complete_request(self): + """Test CompleteRequest creation.""" + ref = PromptReference(type="ref/prompt", name="test_prompt") + arg = CompletionArgument(name="arg1", value="val") + + params = CompleteRequestParams(ref=ref, argument=arg, context=CompletionContext(arguments={"key": "value"})) + + request = CompleteRequest(params=params) + + assert request.method == "completion/complete" + assert request.params.ref.name == "test_prompt" # type: ignore + assert request.params.argument.name == "arg1" + + def test_complete_result(self): + """Test CompleteResult creation.""" + completion = Completion(values=["option1", "option2", "option3"], total=10, hasMore=True) + + result = CompleteResult(completion=completion) + + assert len(result.completion.values) == 3 + assert result.completion.total == 10 + assert result.completion.hasMore is True + + +class TestValidation: + """Test validation of various types.""" + + def test_invalid_jsonrpc_version(self): + """Test invalid JSON-RPC version validation.""" + with pytest.raises(ValidationError): + JSONRPCRequest( + jsonrpc="1.0", # Invalid version + id=1, + method="test", + ) + + def test_tool_annotations_validation(self): + """Test ToolAnnotations with invalid values.""" + # Valid annotations + annotations = ToolAnnotations( + title="Test", readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False + ) + assert annotations.title == "Test" + + def test_extra_fields_allowed(self): + """Test that extra fields are allowed in models.""" + # Most models should allow extra fields + tool = Tool( + name="test", + inputSchema={}, + customField="allowed", # type: ignore + ) + assert tool.customField == "allowed" # type: ignore + + def test_result_meta_alias(self): + """Test Result model with _meta alias.""" + # Create with the field name (not alias) + result = Result(_meta={"key": "value"}) + + # Verify the field is set correctly + assert result.meta == {"key": "value"} + + # Dump with alias + dumped = result.model_dump(by_alias=True) + assert "_meta" in dumped + assert dumped["_meta"] == {"key": "value"} diff --git a/api/tests/unit_tests/core/mcp/test_utils.py b/api/tests/unit_tests/core/mcp/test_utils.py new file mode 100644 index 0000000000..ca41d5f4c1 --- /dev/null +++ b/api/tests/unit_tests/core/mcp/test_utils.py @@ -0,0 +1,355 @@ +"""Unit tests for MCP utils module.""" + +import json +from collections.abc import Generator +from unittest.mock import MagicMock, Mock, patch + +import httpx +import httpx_sse +import pytest + +from core.mcp.utils import ( + STATUS_FORCELIST, + create_mcp_error_response, + create_ssrf_proxy_mcp_http_client, + ssrf_proxy_sse_connect, +) + + +class TestConstants: + """Test module constants.""" + + def test_status_forcelist(self): + """Test STATUS_FORCELIST contains expected HTTP status codes.""" + assert STATUS_FORCELIST == [429, 500, 502, 503, 504] + assert 429 in STATUS_FORCELIST # Too Many Requests + assert 500 in STATUS_FORCELIST # Internal Server Error + assert 502 in STATUS_FORCELIST # Bad Gateway + assert 503 in STATUS_FORCELIST # Service Unavailable + assert 504 in STATUS_FORCELIST # Gateway Timeout + + +class TestCreateSSRFProxyMCPHTTPClient: + """Test create_ssrf_proxy_mcp_http_client function.""" + + @patch("core.mcp.utils.dify_config") + def test_create_client_with_all_url_proxy(self, mock_config): + """Test client creation with SSRF_PROXY_ALL_URL configured.""" + mock_config.SSRF_PROXY_ALL_URL = "http://proxy.example.com:8080" + mock_config.HTTP_REQUEST_NODE_SSL_VERIFY = True + + client = create_ssrf_proxy_mcp_http_client( + headers={"Authorization": "Bearer token"}, timeout=httpx.Timeout(30.0) + ) + + assert isinstance(client, httpx.Client) + assert client.headers["Authorization"] == "Bearer token" + assert client.timeout.connect == 30.0 + assert client.follow_redirects is True + + # Clean up + client.close() + + @patch("core.mcp.utils.dify_config") + def test_create_client_with_http_https_proxies(self, mock_config): + """Test client creation with separate HTTP/HTTPS proxies.""" + mock_config.SSRF_PROXY_ALL_URL = None + mock_config.SSRF_PROXY_HTTP_URL = "http://http-proxy.example.com:8080" + mock_config.SSRF_PROXY_HTTPS_URL = "http://https-proxy.example.com:8443" + mock_config.HTTP_REQUEST_NODE_SSL_VERIFY = False + + client = create_ssrf_proxy_mcp_http_client() + + assert isinstance(client, httpx.Client) + assert client.follow_redirects is True + + # Clean up + client.close() + + @patch("core.mcp.utils.dify_config") + def test_create_client_without_proxy(self, mock_config): + """Test client creation without proxy configuration.""" + mock_config.SSRF_PROXY_ALL_URL = None + mock_config.SSRF_PROXY_HTTP_URL = None + mock_config.SSRF_PROXY_HTTPS_URL = None + mock_config.HTTP_REQUEST_NODE_SSL_VERIFY = True + + headers = {"X-Custom-Header": "value"} + timeout = httpx.Timeout(timeout=30.0, connect=5.0, read=10.0, write=30.0) + + client = create_ssrf_proxy_mcp_http_client(headers=headers, timeout=timeout) + + assert isinstance(client, httpx.Client) + assert client.headers["X-Custom-Header"] == "value" + assert client.timeout.connect == 5.0 + assert client.timeout.read == 10.0 + assert client.follow_redirects is True + + # Clean up + client.close() + + @patch("core.mcp.utils.dify_config") + def test_create_client_default_params(self, mock_config): + """Test client creation with default parameters.""" + mock_config.SSRF_PROXY_ALL_URL = None + mock_config.SSRF_PROXY_HTTP_URL = None + mock_config.SSRF_PROXY_HTTPS_URL = None + mock_config.HTTP_REQUEST_NODE_SSL_VERIFY = True + + client = create_ssrf_proxy_mcp_http_client() + + assert isinstance(client, httpx.Client) + # httpx.Client adds default headers, so we just check it's a Headers object + assert isinstance(client.headers, httpx.Headers) + # When no timeout is provided, httpx uses its default timeout + assert client.timeout is not None + + # Clean up + client.close() + + +class TestSSRFProxySSEConnect: + """Test ssrf_proxy_sse_connect function.""" + + @patch("core.mcp.utils.connect_sse") + @patch("core.mcp.utils.create_ssrf_proxy_mcp_http_client") + def test_sse_connect_with_provided_client(self, mock_create_client, mock_connect_sse): + """Test SSE connection with pre-configured client.""" + # Setup mocks + mock_client = Mock(spec=httpx.Client) + mock_event_source = Mock(spec=httpx_sse.EventSource) + mock_context = MagicMock() + mock_context.__enter__.return_value = mock_event_source + mock_connect_sse.return_value = mock_context + + # Call with provided client + result = ssrf_proxy_sse_connect( + "http://example.com/sse", client=mock_client, method="POST", headers={"Authorization": "Bearer token"} + ) + + # Verify client creation was not called + mock_create_client.assert_not_called() + + # Verify connect_sse was called correctly + mock_connect_sse.assert_called_once_with( + mock_client, "POST", "http://example.com/sse", headers={"Authorization": "Bearer token"} + ) + + # Verify result + assert result == mock_context + + @patch("core.mcp.utils.connect_sse") + @patch("core.mcp.utils.create_ssrf_proxy_mcp_http_client") + @patch("core.mcp.utils.dify_config") + def test_sse_connect_without_client(self, mock_config, mock_create_client, mock_connect_sse): + """Test SSE connection without pre-configured client.""" + # Setup config + mock_config.SSRF_DEFAULT_TIME_OUT = 30.0 + mock_config.SSRF_DEFAULT_CONNECT_TIME_OUT = 10.0 + mock_config.SSRF_DEFAULT_READ_TIME_OUT = 60.0 + mock_config.SSRF_DEFAULT_WRITE_TIME_OUT = 30.0 + + # Setup mocks + mock_client = Mock(spec=httpx.Client) + mock_create_client.return_value = mock_client + + mock_event_source = Mock(spec=httpx_sse.EventSource) + mock_context = MagicMock() + mock_context.__enter__.return_value = mock_event_source + mock_connect_sse.return_value = mock_context + + # Call without client + result = ssrf_proxy_sse_connect("http://example.com/sse", headers={"X-Custom": "value"}) + + # Verify client was created + mock_create_client.assert_called_once() + call_args = mock_create_client.call_args + assert call_args[1]["headers"] == {"X-Custom": "value"} + + timeout = call_args[1]["timeout"] + # httpx.Timeout object has these attributes + assert isinstance(timeout, httpx.Timeout) + assert timeout.connect == 10.0 + assert timeout.read == 60.0 + assert timeout.write == 30.0 + + # Verify connect_sse was called + mock_connect_sse.assert_called_once_with( + mock_client, + "GET", # Default method + "http://example.com/sse", + ) + + # Verify result + assert result == mock_context + + @patch("core.mcp.utils.connect_sse") + @patch("core.mcp.utils.create_ssrf_proxy_mcp_http_client") + def test_sse_connect_with_custom_timeout(self, mock_create_client, mock_connect_sse): + """Test SSE connection with custom timeout.""" + # Setup mocks + mock_client = Mock(spec=httpx.Client) + mock_create_client.return_value = mock_client + + mock_event_source = Mock(spec=httpx_sse.EventSource) + mock_context = MagicMock() + mock_context.__enter__.return_value = mock_event_source + mock_connect_sse.return_value = mock_context + + custom_timeout = httpx.Timeout(timeout=60.0, read=120.0) + + # Call with custom timeout + result = ssrf_proxy_sse_connect("http://example.com/sse", timeout=custom_timeout) + + # Verify client was created with custom timeout + mock_create_client.assert_called_once() + call_args = mock_create_client.call_args + assert call_args[1]["timeout"] == custom_timeout + + # Verify result + assert result == mock_context + + @patch("core.mcp.utils.connect_sse") + @patch("core.mcp.utils.create_ssrf_proxy_mcp_http_client") + def test_sse_connect_error_cleanup(self, mock_create_client, mock_connect_sse): + """Test SSE connection cleans up client on error.""" + # Setup mocks + mock_client = Mock(spec=httpx.Client) + mock_create_client.return_value = mock_client + + # Make connect_sse raise an exception + mock_connect_sse.side_effect = httpx.ConnectError("Connection failed") + + # Call should raise the exception + with pytest.raises(httpx.ConnectError): + ssrf_proxy_sse_connect("http://example.com/sse") + + # Verify client was cleaned up + mock_client.close.assert_called_once() + + @patch("core.mcp.utils.connect_sse") + def test_sse_connect_error_no_cleanup_with_provided_client(self, mock_connect_sse): + """Test SSE connection doesn't clean up provided client on error.""" + # Setup mocks + mock_client = Mock(spec=httpx.Client) + + # Make connect_sse raise an exception + mock_connect_sse.side_effect = httpx.ConnectError("Connection failed") + + # Call should raise the exception + with pytest.raises(httpx.ConnectError): + ssrf_proxy_sse_connect("http://example.com/sse", client=mock_client) + + # Verify client was NOT cleaned up (because it was provided) + mock_client.close.assert_not_called() + + +class TestCreateMCPErrorResponse: + """Test create_mcp_error_response function.""" + + def test_create_error_response_basic(self): + """Test creating basic error response.""" + generator = create_mcp_error_response(request_id="req-123", code=-32600, message="Invalid Request") + + # Generator should yield bytes + assert isinstance(generator, Generator) + + # Get the response + response_bytes = next(generator) + assert isinstance(response_bytes, bytes) + + # Parse the response + response_str = response_bytes.decode("utf-8") + response_json = json.loads(response_str) + + assert response_json["jsonrpc"] == "2.0" + assert response_json["id"] == "req-123" + assert response_json["error"]["code"] == -32600 + assert response_json["error"]["message"] == "Invalid Request" + assert response_json["error"]["data"] is None + + # Generator should be exhausted + with pytest.raises(StopIteration): + next(generator) + + def test_create_error_response_with_data(self): + """Test creating error response with additional data.""" + error_data = {"field": "username", "reason": "required"} + + generator = create_mcp_error_response( + request_id=456, # Numeric ID + code=-32602, + message="Invalid params", + data=error_data, + ) + + response_bytes = next(generator) + response_json = json.loads(response_bytes.decode("utf-8")) + + assert response_json["id"] == 456 + assert response_json["error"]["code"] == -32602 + assert response_json["error"]["message"] == "Invalid params" + assert response_json["error"]["data"] == error_data + + def test_create_error_response_without_request_id(self): + """Test creating error response without request ID.""" + generator = create_mcp_error_response(request_id=None, code=-32700, message="Parse error") + + response_bytes = next(generator) + response_json = json.loads(response_bytes.decode("utf-8")) + + # Should default to ID 1 + assert response_json["id"] == 1 + assert response_json["error"]["code"] == -32700 + assert response_json["error"]["message"] == "Parse error" + + def test_create_error_response_with_complex_data(self): + """Test creating error response with complex error data.""" + complex_data = { + "errors": [{"field": "name", "message": "Too short"}, {"field": "email", "message": "Invalid format"}], + "timestamp": "2024-01-01T00:00:00Z", + } + + generator = create_mcp_error_response( + request_id="complex-req", code=-32602, message="Validation failed", data=complex_data + ) + + response_bytes = next(generator) + response_json = json.loads(response_bytes.decode("utf-8")) + + assert response_json["error"]["data"] == complex_data + assert len(response_json["error"]["data"]["errors"]) == 2 + + def test_create_error_response_encoding(self): + """Test error response with non-ASCII characters.""" + generator = create_mcp_error_response( + request_id="unicode-req", + code=-32603, + message="内部错误", # Chinese characters + data={"details": "エラー詳細"}, # Japanese characters + ) + + response_bytes = next(generator) + + # Should be valid UTF-8 + response_str = response_bytes.decode("utf-8") + response_json = json.loads(response_str) + + assert response_json["error"]["message"] == "内部错误" + assert response_json["error"]["data"]["details"] == "エラー詳細" + + def test_create_error_response_yields_once(self): + """Test that error response generator yields exactly once.""" + generator = create_mcp_error_response(request_id="test", code=-32600, message="Test") + + # First yield should work + first_yield = next(generator) + assert isinstance(first_yield, bytes) + + # Second yield should raise StopIteration + with pytest.raises(StopIteration): + next(generator) + + # Subsequent calls should also raise + with pytest.raises(StopIteration): + next(generator) diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py b/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py new file mode 100644 index 0000000000..8ccd739e64 --- /dev/null +++ b/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py @@ -0,0 +1,733 @@ +import json +import unittest +from unittest.mock import MagicMock, patch + +import pytest + +from core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector import ( + AlibabaCloudMySQLVector, + AlibabaCloudMySQLVectorConfig, +) +from core.rag.models.document import Document + +try: + from mysql.connector import Error as MySQLError +except ImportError: + # Fallback for testing environments where mysql-connector-python might not be installed + class MySQLError(Exception): + def __init__(self, errno, msg): + self.errno = errno + self.msg = msg + super().__init__(msg) + + +class TestAlibabaCloudMySQLVector(unittest.TestCase): + def setUp(self): + self.config = AlibabaCloudMySQLVectorConfig( + host="localhost", + port=3306, + user="test_user", + password="test_password", + database="test_db", + max_connection=5, + charset="utf8mb4", + ) + self.collection_name = "test_collection" + + # Sample documents for testing + self.sample_documents = [ + Document( + page_content="This is a test document about AI.", + metadata={"doc_id": "doc1", "document_id": "dataset1", "source": "test"}, + ), + Document( + page_content="Another document about machine learning.", + metadata={"doc_id": "doc2", "document_id": "dataset1", "source": "test"}, + ), + ] + + # Sample embeddings + self.sample_embeddings = [[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_init(self, mock_pool_class): + """Test AlibabaCloudMySQLVector initialization.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor for vector support check + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [ + {"VERSION()": "8.0.36"}, # Version check + {"vector_support": True}, # Vector support check + ] + + alibabacloud_mysql_vector = AlibabaCloudMySQLVector(self.collection_name, self.config) + + assert alibabacloud_mysql_vector.collection_name == self.collection_name + assert alibabacloud_mysql_vector.table_name == self.collection_name.lower() + assert alibabacloud_mysql_vector.get_type() == "alibabacloud_mysql" + assert alibabacloud_mysql_vector.distance_function == "cosine" + assert alibabacloud_mysql_vector.pool is not None + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + @patch("core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.redis_client") + def test_create_collection(self, mock_redis, mock_pool_class): + """Test collection creation.""" + # Mock Redis operations + mock_redis.lock.return_value.__enter__ = MagicMock() + mock_redis.lock.return_value.__exit__ = MagicMock() + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [ + {"VERSION()": "8.0.36"}, # Version check + {"vector_support": True}, # Vector support check + ] + + alibabacloud_mysql_vector = AlibabaCloudMySQLVector(self.collection_name, self.config) + alibabacloud_mysql_vector._create_collection(768) + + # Verify SQL execution calls - should include table creation and index creation + assert mock_cursor.execute.called + assert mock_cursor.execute.call_count >= 3 # CREATE TABLE + 2 indexes + mock_redis.set.assert_called_once() + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_vector_support_check_success(self, mock_pool_class): + """Test successful vector support check.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + # Should not raise an exception + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + assert vector_store is not None + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_vector_support_check_failure(self, mock_pool_class): + """Test vector support check failure.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.35"}, {"vector_support": False}] + + with pytest.raises(ValueError) as context: + AlibabaCloudMySQLVector(self.collection_name, self.config) + + assert "RDS MySQL Vector functions are not available" in str(context.value) + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_vector_support_check_function_error(self, mock_pool_class): + """Test vector support check with function not found error.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.return_value = {"VERSION()": "8.0.36"} + mock_cursor.execute.side_effect = [None, MySQLError(errno=1305, msg="FUNCTION VEC_FromText does not exist")] + + with pytest.raises(ValueError) as context: + AlibabaCloudMySQLVector(self.collection_name, self.config) + + assert "RDS MySQL Vector functions are not available" in str(context.value) + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + @patch("core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.redis_client") + def test_create_documents(self, mock_redis, mock_pool_class): + """Test creating documents with embeddings.""" + # Setup mocks + self._setup_mocks(mock_redis, mock_pool_class) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + result = vector_store.create(self.sample_documents, self.sample_embeddings) + + assert len(result) == 2 + assert "doc1" in result + assert "doc2" in result + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_add_texts(self, mock_pool_class): + """Test adding texts to the vector store.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + result = vector_store.add_texts(self.sample_documents, self.sample_embeddings) + + assert len(result) == 2 + mock_cursor.executemany.assert_called_once() + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_text_exists(self, mock_pool_class): + """Test checking if text exists.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [ + {"VERSION()": "8.0.36"}, + {"vector_support": True}, + {"id": "doc1"}, # Text exists + ] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + exists = vector_store.text_exists("doc1") + + assert exists + # Check that the correct SQL was executed (last call after init) + execute_calls = mock_cursor.execute.call_args_list + last_call = execute_calls[-1] + assert "SELECT id FROM" in last_call[0][0] + assert last_call[0][1] == ("doc1",) + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_text_not_exists(self, mock_pool_class): + """Test checking if text does not exist.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [ + {"VERSION()": "8.0.36"}, + {"vector_support": True}, + None, # Text does not exist + ] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + exists = vector_store.text_exists("nonexistent") + + assert not exists + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_get_by_ids(self, mock_pool_class): + """Test getting documents by IDs.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [ + {"meta": json.dumps({"doc_id": "doc1", "source": "test"}), "text": "Test document 1"}, + {"meta": json.dumps({"doc_id": "doc2", "source": "test"}), "text": "Test document 2"}, + ] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + docs = vector_store.get_by_ids(["doc1", "doc2"]) + + assert len(docs) == 2 + assert docs[0].page_content == "Test document 1" + assert docs[1].page_content == "Test document 2" + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_get_by_ids_empty_list(self, mock_pool_class): + """Test getting documents with empty ID list.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + docs = vector_store.get_by_ids([]) + + assert len(docs) == 0 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_by_ids(self, mock_pool_class): + """Test deleting documents by IDs.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + vector_store.delete_by_ids(["doc1", "doc2"]) + + # Check that delete SQL was executed + execute_calls = mock_cursor.execute.call_args_list + delete_calls = [call for call in execute_calls if "DELETE" in str(call)] + assert len(delete_calls) == 1 + delete_call = delete_calls[0] + assert "DELETE FROM" in delete_call[0][0] + assert delete_call[0][1] == ["doc1", "doc2"] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_by_ids_empty_list(self, mock_pool_class): + """Test deleting with empty ID list.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + vector_store.delete_by_ids([]) # Should not raise an exception + + # Verify no delete SQL was executed + execute_calls = mock_cursor.execute.call_args_list + delete_calls = [call for call in execute_calls if "DELETE" in str(call)] + assert len(delete_calls) == 0 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_by_ids_table_not_exists(self, mock_pool_class): + """Test deleting when table doesn't exist.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + # Simulate table doesn't exist error on delete + + def execute_side_effect(*args, **kwargs): + if "DELETE" in args[0]: + raise MySQLError(errno=1146, msg="Table doesn't exist") + + mock_cursor.execute.side_effect = execute_side_effect + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + # Should not raise an exception + vector_store.delete_by_ids(["doc1"]) + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_by_metadata_field(self, mock_pool_class): + """Test deleting documents by metadata field.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + vector_store.delete_by_metadata_field("document_id", "dataset1") + + # Check that the correct SQL was executed + execute_calls = mock_cursor.execute.call_args_list + delete_calls = [call for call in execute_calls if "DELETE" in str(call)] + assert len(delete_calls) == 1 + delete_call = delete_calls[0] + assert "JSON_UNQUOTE(JSON_EXTRACT(meta" in delete_call[0][0] + assert delete_call[0][1] == ("$.document_id", "dataset1") + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_cosine(self, mock_pool_class): + """Test vector search with cosine distance.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [{"meta": json.dumps({"doc_id": "doc1", "source": "test"}), "text": "Test document 1", "distance": 0.1}] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + query_vector = [0.1, 0.2, 0.3, 0.4] + docs = vector_store.search_by_vector(query_vector, top_k=5) + + assert len(docs) == 1 + assert docs[0].page_content == "Test document 1" + assert abs(docs[0].metadata["score"] - 0.9) < 0.1 # 1 - 0.1 = 0.9 + assert docs[0].metadata["distance"] == 0.1 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_euclidean(self, mock_pool_class): + """Test vector search with euclidean distance.""" + config = AlibabaCloudMySQLVectorConfig( + host="localhost", + port=3306, + user="test_user", + password="test_password", + database="test_db", + max_connection=5, + distance_function="euclidean", + ) + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [{"meta": json.dumps({"doc_id": "doc1", "source": "test"}), "text": "Test document 1", "distance": 2.0}] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, config) + query_vector = [0.1, 0.2, 0.3, 0.4] + docs = vector_store.search_by_vector(query_vector, top_k=5) + + assert len(docs) == 1 + assert abs(docs[0].metadata["score"] - 1.0 / 3.0) < 0.01 # 1/(1+2) = 1/3 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_with_filter(self, mock_pool_class): + """Test vector search with document ID filter.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter([]) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + query_vector = [0.1, 0.2, 0.3, 0.4] + docs = vector_store.search_by_vector(query_vector, top_k=5, document_ids_filter=["dataset1"]) + + # Verify the SQL contains the WHERE clause for filtering + execute_calls = mock_cursor.execute.call_args_list + search_calls = [call for call in execute_calls if "VEC_DISTANCE" in str(call)] + assert len(search_calls) > 0 + search_call = search_calls[0] + assert "WHERE JSON_UNQUOTE" in search_call[0][0] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_with_score_threshold(self, mock_pool_class): + """Test vector search with score threshold.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [ + { + "meta": json.dumps({"doc_id": "doc1", "source": "test"}), + "text": "High similarity document", + "distance": 0.1, # High similarity (score = 0.9) + }, + { + "meta": json.dumps({"doc_id": "doc2", "source": "test"}), + "text": "Low similarity document", + "distance": 0.8, # Low similarity (score = 0.2) + }, + ] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + query_vector = [0.1, 0.2, 0.3, 0.4] + docs = vector_store.search_by_vector(query_vector, top_k=5, score_threshold=0.5) + + # Only the high similarity document should be returned + assert len(docs) == 1 + assert docs[0].page_content == "High similarity document" + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_invalid_top_k(self, mock_pool_class): + """Test vector search with invalid top_k.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + query_vector = [0.1, 0.2, 0.3, 0.4] + + with pytest.raises(ValueError): + vector_store.search_by_vector(query_vector, top_k=0) + + with pytest.raises(ValueError): + vector_store.search_by_vector(query_vector, top_k="invalid") + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_full_text(self, mock_pool_class): + """Test full-text search.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [ + { + "meta": {"doc_id": "doc1", "source": "test"}, + "text": "This document contains machine learning content", + "score": 1.5, + } + ] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + docs = vector_store.search_by_full_text("machine learning", top_k=5) + + assert len(docs) == 1 + assert docs[0].page_content == "This document contains machine learning content" + assert docs[0].metadata["score"] == 1.5 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_full_text_with_filter(self, mock_pool_class): + """Test full-text search with document ID filter.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter([]) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + docs = vector_store.search_by_full_text("machine learning", top_k=5, document_ids_filter=["dataset1"]) + + # Verify the SQL contains the AND clause for filtering + execute_calls = mock_cursor.execute.call_args_list + search_calls = [call for call in execute_calls if "MATCH" in str(call)] + assert len(search_calls) > 0 + search_call = search_calls[0] + assert "AND JSON_UNQUOTE" in search_call[0][0] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_full_text_invalid_top_k(self, mock_pool_class): + """Test full-text search with invalid top_k.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + + with pytest.raises(ValueError): + vector_store.search_by_full_text("test", top_k=0) + + with pytest.raises(ValueError): + vector_store.search_by_full_text("test", top_k="invalid") + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_collection(self, mock_pool_class): + """Test deleting the entire collection.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + vector_store.delete() + + # Check that DROP TABLE SQL was executed + execute_calls = mock_cursor.execute.call_args_list + drop_calls = [call for call in execute_calls if "DROP TABLE" in str(call)] + assert len(drop_calls) == 1 + drop_call = drop_calls[0] + assert f"DROP TABLE IF EXISTS {self.collection_name.lower()}" in drop_call[0][0] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_unsupported_distance_function(self, mock_pool_class): + """Test that Pydantic validation rejects unsupported distance functions.""" + # Test that creating config with unsupported distance function raises ValidationError + with pytest.raises(ValueError) as context: + AlibabaCloudMySQLVectorConfig( + host="localhost", + port=3306, + user="test_user", + password="test_password", + database="test_db", + max_connection=5, + distance_function="manhattan", # Unsupported - not in Literal["cosine", "euclidean"] + ) + + # The error should be related to validation + assert "Input should be 'cosine' or 'euclidean'" in str(context.value) or "manhattan" in str(context.value) + + def _setup_mocks(self, mock_redis, mock_pool_class): + """Helper method to setup common mocks.""" + # Mock Redis operations + mock_redis.lock.return_value.__enter__ = MagicMock() + mock_redis.lock.return_value.__exit__ = MagicMock() + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + +@pytest.mark.parametrize( + "invalid_config_override", + [ + {"host": ""}, # Test empty host + {"port": 0}, # Test invalid port + {"max_connection": 0}, # Test invalid max_connection + ], +) +def test_config_validation_parametrized(invalid_config_override): + """Test configuration validation for various invalid inputs using parametrize.""" + config = { + "host": "localhost", + "port": 3306, + "user": "test", + "password": "test", + "database": "test", + "max_connection": 5, + } + config.update(invalid_config_override) + + with pytest.raises(ValueError): + AlibabaCloudMySQLVectorConfig(**config) + + +if __name__ == "__main__": + unittest.main() diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py b/api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py index 48cc8a7e1c..fb2ddfe162 100644 --- a/api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py +++ b/api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py @@ -11,8 +11,8 @@ def test_default_value(): config = valid_config.copy() del config[key] with pytest.raises(ValidationError) as e: - MilvusConfig(**config) + MilvusConfig.model_validate(config) assert e.value.errors()[0]["msg"] == f"Value error, config MILVUS_{key.upper()} is required" - config = MilvusConfig(**valid_config) + config = MilvusConfig.model_validate(valid_config) assert config.database == "default" diff --git a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py index 6689e13b96..b4ee1b91b4 100644 --- a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py +++ b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py @@ -1,10 +1,12 @@ import os +from pytest_mock import MockerFixture + from core.rag.extractor.firecrawl.firecrawl_app import FirecrawlApp from tests.unit_tests.core.rag.extractor.test_notion_extractor import _mock_response -def test_firecrawl_web_extractor_crawl_mode(mocker): +def test_firecrawl_web_extractor_crawl_mode(mocker: MockerFixture): url = "https://firecrawl.dev" api_key = os.getenv("FIRECRAWL_API_KEY") or "fc-" base_url = "https://api.firecrawl.dev" @@ -18,7 +20,7 @@ def test_firecrawl_web_extractor_crawl_mode(mocker): mocked_firecrawl = { "id": "test", } - mocker.patch("requests.post", return_value=_mock_response(mocked_firecrawl)) + mocker.patch("httpx.post", return_value=_mock_response(mocked_firecrawl)) job_id = firecrawl_app.crawl_url(url, params) assert job_id is not None diff --git a/api/tests/unit_tests/core/rag/extractor/test_notion_extractor.py b/api/tests/unit_tests/core/rag/extractor/test_notion_extractor.py index eea584a2f8..58bec7d19e 100644 --- a/api/tests/unit_tests/core/rag/extractor/test_notion_extractor.py +++ b/api/tests/unit_tests/core/rag/extractor/test_notion_extractor.py @@ -1,5 +1,7 @@ from unittest import mock +from pytest_mock import MockerFixture + from core.rag.extractor import notion_extractor user_id = "user1" @@ -57,7 +59,7 @@ def _remove_multiple_new_lines(text): return text.strip() -def test_notion_page(mocker): +def test_notion_page(mocker: MockerFixture): texts = ["Head 1", "1.1", "paragraph 1", "1.1.1"] mocked_notion_page = { "object": "list", @@ -69,7 +71,7 @@ def test_notion_page(mocker): ], "next_cursor": None, } - mocker.patch("requests.request", return_value=_mock_response(mocked_notion_page)) + mocker.patch("httpx.request", return_value=_mock_response(mocked_notion_page)) page_docs = extractor._load_data_as_documents(page_id, "page") assert len(page_docs) == 1 @@ -77,14 +79,14 @@ def test_notion_page(mocker): assert content == "# Head 1\n## 1.1\nparagraph 1\n### 1.1.1" -def test_notion_database(mocker): +def test_notion_database(mocker: MockerFixture): page_title_list = ["page1", "page2", "page3"] mocked_notion_database = { "object": "list", "results": [_generate_page(i) for i in page_title_list], "next_cursor": None, } - mocker.patch("requests.post", return_value=_mock_response(mocked_notion_database)) + mocker.patch("httpx.post", return_value=_mock_response(mocked_notion_database)) database_docs = extractor._load_data_as_documents(database_id, "database") assert len(database_docs) == 1 content = _remove_multiple_new_lines(database_docs[0].page_content) diff --git a/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py b/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py index e7733b2317..e6d0371cd5 100644 --- a/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py +++ b/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py @@ -140,7 +140,7 @@ class TestCeleryWorkflowExecutionRepository: assert call_args["execution_data"] == sample_workflow_execution.model_dump() assert call_args["tenant_id"] == mock_account.current_tenant_id assert call_args["app_id"] == "test-app" - assert call_args["triggered_from"] == WorkflowRunTriggeredFrom.APP_RUN.value + assert call_args["triggered_from"] == WorkflowRunTriggeredFrom.APP_RUN assert call_args["creator_user_id"] == mock_account.id # Verify no task tracking occurs (no _pending_saves attribute) diff --git a/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py b/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py index 3abe20fca1..f6211f4cca 100644 --- a/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py +++ b/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py @@ -149,7 +149,7 @@ class TestCeleryWorkflowNodeExecutionRepository: assert call_args["execution_data"] == sample_workflow_node_execution.model_dump() assert call_args["tenant_id"] == mock_account.current_tenant_id assert call_args["app_id"] == "test-app" - assert call_args["triggered_from"] == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value + assert call_args["triggered_from"] == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN assert call_args["creator_user_id"] == mock_account.id # Verify execution is cached diff --git a/api/tests/unit_tests/core/repositories/test_workflow_node_execution_truncation.py b/api/tests/unit_tests/core/repositories/test_workflow_node_execution_truncation.py index 36f7d3ef55..485be90eae 100644 --- a/api/tests/unit_tests/core/repositories/test_workflow_node_execution_truncation.py +++ b/api/tests/unit_tests/core/repositories/test_workflow_node_execution_truncation.py @@ -145,12 +145,12 @@ class TestSQLAlchemyWorkflowNodeExecutionRepositoryTruncation: db_model.index = 1 db_model.predecessor_node_id = None db_model.node_id = "node-id" - db_model.node_type = NodeType.LLM.value + db_model.node_type = NodeType.LLM db_model.title = "Test Node" db_model.inputs = json.dumps({"value": "inputs"}) db_model.process_data = json.dumps({"value": "process_data"}) db_model.outputs = json.dumps({"value": "outputs"}) - db_model.status = WorkflowNodeExecutionStatus.SUCCEEDED.value + db_model.status = WorkflowNodeExecutionStatus.SUCCEEDED db_model.error = None db_model.elapsed_time = 1.0 db_model.execution_metadata = "{}" diff --git a/api/tests/unit_tests/core/test_model_manager.py b/api/tests/unit_tests/core/test_model_manager.py index d98e9f6bad..5a7547e85c 100644 --- a/api/tests/unit_tests/core/test_model_manager.py +++ b/api/tests/unit_tests/core/test_model_manager.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch import pytest import redis +from pytest_mock import MockerFixture from core.entities.provider_entities import ModelLoadBalancingConfiguration from core.model_manager import LBModelManager @@ -39,7 +40,7 @@ def lb_model_manager(): return lb_model_manager -def test_lb_model_manager_fetch_next(mocker, lb_model_manager): +def test_lb_model_manager_fetch_next(mocker: MockerFixture, lb_model_manager: LBModelManager): # initialize redis client redis_client.initialize(redis.Redis()) diff --git a/api/tests/unit_tests/core/test_provider_configuration.py b/api/tests/unit_tests/core/test_provider_configuration.py index 75621ecb6a..9060cf7b6c 100644 --- a/api/tests/unit_tests/core/test_provider_configuration.py +++ b/api/tests/unit_tests/core/test_provider_configuration.py @@ -14,7 +14,13 @@ from core.entities.provider_entities import ( ) from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.entities.provider_entities import ConfigurateMethod, ProviderEntity +from core.model_runtime.entities.provider_entities import ( + ConfigurateMethod, + CredentialFormSchema, + FormOption, + FormType, + ProviderEntity, +) from models.provider import Provider, ProviderType @@ -306,3 +312,174 @@ class TestProviderConfiguration: # Assert assert credentials == {"openai_api_key": "test_key"} + + def test_extract_secret_variables_with_secret_input(self, provider_configuration): + """Test extracting secret variables from credential form schemas""" + # Arrange + credential_form_schemas = [ + CredentialFormSchema( + variable="api_key", + label=I18nObject(en_US="API Key", zh_Hans="API 密钥"), + type=FormType.SECRET_INPUT, + required=True, + ), + CredentialFormSchema( + variable="model_name", + label=I18nObject(en_US="Model Name", zh_Hans="模型名称"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="secret_token", + label=I18nObject(en_US="Secret Token", zh_Hans="密钥令牌"), + type=FormType.SECRET_INPUT, + required=False, + ), + ] + + # Act + secret_variables = provider_configuration.extract_secret_variables(credential_form_schemas) + + # Assert + assert len(secret_variables) == 2 + assert "api_key" in secret_variables + assert "secret_token" in secret_variables + assert "model_name" not in secret_variables + + def test_extract_secret_variables_no_secret_input(self, provider_configuration): + """Test extracting secret variables when no secret input fields exist""" + # Arrange + credential_form_schemas = [ + CredentialFormSchema( + variable="model_name", + label=I18nObject(en_US="Model Name", zh_Hans="模型名称"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=FormType.SELECT, + required=True, + options=[FormOption(label=I18nObject(en_US="0.1", zh_Hans="0.1"), value="0.1")], + ), + ] + + # Act + secret_variables = provider_configuration.extract_secret_variables(credential_form_schemas) + + # Assert + assert len(secret_variables) == 0 + + def test_extract_secret_variables_empty_list(self, provider_configuration): + """Test extracting secret variables from empty credential form schemas""" + # Arrange + credential_form_schemas = [] + + # Act + secret_variables = provider_configuration.extract_secret_variables(credential_form_schemas) + + # Assert + assert len(secret_variables) == 0 + + @patch("core.entities.provider_configuration.encrypter") + def test_obfuscated_credentials_with_secret_variables(self, mock_encrypter, provider_configuration): + """Test obfuscating credentials with secret variables""" + # Arrange + credentials = { + "api_key": "sk-1234567890abcdef", + "model_name": "gpt-4", + "secret_token": "secret_value_123", + "temperature": "0.7", + } + + credential_form_schemas = [ + CredentialFormSchema( + variable="api_key", + label=I18nObject(en_US="API Key", zh_Hans="API 密钥"), + type=FormType.SECRET_INPUT, + required=True, + ), + CredentialFormSchema( + variable="model_name", + label=I18nObject(en_US="Model Name", zh_Hans="模型名称"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="secret_token", + label=I18nObject(en_US="Secret Token", zh_Hans="密钥令牌"), + type=FormType.SECRET_INPUT, + required=False, + ), + CredentialFormSchema( + variable="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=FormType.TEXT_INPUT, + required=True, + ), + ] + + mock_encrypter.obfuscated_token.side_effect = lambda x: f"***{x[-4:]}" + + # Act + obfuscated = provider_configuration.obfuscated_credentials(credentials, credential_form_schemas) + + # Assert + assert obfuscated["api_key"] == "***cdef" + assert obfuscated["model_name"] == "gpt-4" # Not obfuscated + assert obfuscated["secret_token"] == "***_123" + assert obfuscated["temperature"] == "0.7" # Not obfuscated + + # Verify encrypter was called for secret fields only + assert mock_encrypter.obfuscated_token.call_count == 2 + mock_encrypter.obfuscated_token.assert_any_call("sk-1234567890abcdef") + mock_encrypter.obfuscated_token.assert_any_call("secret_value_123") + + def test_obfuscated_credentials_no_secret_variables(self, provider_configuration): + """Test obfuscating credentials when no secret variables exist""" + # Arrange + credentials = { + "model_name": "gpt-4", + "temperature": "0.7", + "max_tokens": "1000", + } + + credential_form_schemas = [ + CredentialFormSchema( + variable="model_name", + label=I18nObject(en_US="Model Name", zh_Hans="模型名称"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="max_tokens", + label=I18nObject(en_US="Max Tokens", zh_Hans="最大令牌数"), + type=FormType.TEXT_INPUT, + required=True, + ), + ] + + # Act + obfuscated = provider_configuration.obfuscated_credentials(credentials, credential_form_schemas) + + # Assert + assert obfuscated == credentials # No changes expected + + def test_obfuscated_credentials_empty_credentials(self, provider_configuration): + """Test obfuscating empty credentials""" + # Arrange + credentials = {} + credential_form_schemas = [] + + # Act + obfuscated = provider_configuration.obfuscated_credentials(credentials, credential_form_schemas) + + # Assert + assert obfuscated == {} diff --git a/api/tests/unit_tests/core/test_provider_manager.py b/api/tests/unit_tests/core/test_provider_manager.py index 2dab394029..0c3887beab 100644 --- a/api/tests/unit_tests/core/test_provider_manager.py +++ b/api/tests/unit_tests/core/test_provider_manager.py @@ -1,4 +1,5 @@ import pytest +from pytest_mock import MockerFixture from core.entities.provider_entities import ModelSettings from core.model_runtime.entities.model_entities import ModelType @@ -7,19 +8,25 @@ from models.provider import LoadBalancingModelConfig, ProviderModelSetting @pytest.fixture -def mock_provider_entity(mocker): +def mock_provider_entity(mocker: MockerFixture): mock_entity = mocker.Mock() mock_entity.provider = "openai" mock_entity.configurate_methods = ["predefined-model"] mock_entity.supported_model_types = [ModelType.LLM] - mock_entity.model_credential_schema = mocker.Mock() - mock_entity.model_credential_schema.credential_form_schemas = [] + # Use PropertyMock to ensure credential_form_schemas is iterable + provider_credential_schema = mocker.Mock() + type(provider_credential_schema).credential_form_schemas = mocker.PropertyMock(return_value=[]) + mock_entity.provider_credential_schema = provider_credential_schema + + model_credential_schema = mocker.Mock() + type(model_credential_schema).credential_form_schemas = mocker.PropertyMock(return_value=[]) + mock_entity.model_credential_schema = model_credential_schema return mock_entity -def test__to_model_settings(mocker, mock_provider_entity): +def test__to_model_settings(mocker: MockerFixture, mock_provider_entity): # Mocking the inputs provider_model_settings = [ ProviderModelSetting( @@ -79,7 +86,7 @@ def test__to_model_settings(mocker, mock_provider_entity): assert result[0].load_balancing_configs[1].name == "first" -def test__to_model_settings_only_one_lb(mocker, mock_provider_entity): +def test__to_model_settings_only_one_lb(mocker: MockerFixture, mock_provider_entity): # Mocking the inputs provider_model_settings = [ ProviderModelSetting( @@ -127,7 +134,7 @@ def test__to_model_settings_only_one_lb(mocker, mock_provider_entity): assert len(result[0].load_balancing_configs) == 0 -def test__to_model_settings_lb_disabled(mocker, mock_provider_entity): +def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_entity): # Mocking the inputs provider_model_settings = [ ProviderModelSetting( diff --git a/api/tests/unit_tests/core/tools/test_tool_entities.py b/api/tests/unit_tests/core/tools/test_tool_entities.py new file mode 100644 index 0000000000..a5b7e8a9a3 --- /dev/null +++ b/api/tests/unit_tests/core/tools/test_tool_entities.py @@ -0,0 +1,29 @@ +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_entities import ToolEntity, ToolIdentity, ToolInvokeMessage + + +def _make_identity() -> ToolIdentity: + return ToolIdentity( + author="author", + name="tool", + label=I18nObject(en_US="Label"), + provider="builtin", + ) + + +def test_log_message_metadata_none_defaults_to_empty_dict(): + log_message = ToolInvokeMessage.LogMessage( + id="log-1", + label="Log entry", + status=ToolInvokeMessage.LogMessage.LogStatus.START, + data={}, + metadata=None, + ) + + assert log_message.metadata == {} + + +def test_tool_entity_output_schema_none_defaults_to_empty_dict(): + entity = ToolEntity(identity=_make_identity(), output_schema=None) + + assert entity.output_schema == {} diff --git a/api/tests/unit_tests/core/tools/utils/test_parser.py b/api/tests/unit_tests/core/tools/utils/test_parser.py index e1eab21ca4..f39158aa59 100644 --- a/api/tests/unit_tests/core/tools/utils/test_parser.py +++ b/api/tests/unit_tests/core/tools/utils/test_parser.py @@ -109,3 +109,83 @@ def test_parse_openapi_to_tool_bundle_properties_all_of(app): assert tool_bundles[0].parameters[0].llm_description == "desc prop1" # TODO: support enum in OpenAPI # assert set(tool_bundles[0].parameters[0].options) == {"option1", "option2", "option3"} + + +def test_parse_openapi_to_tool_bundle_default_value_type_casting(app): + """ + Test that default values are properly cast to match parameter types. + This addresses the issue where array default values like [] cause validation errors + when parameter type is inferred as string/number/boolean. + """ + openapi = { + "openapi": "3.0.0", + "info": {"title": "Test API", "version": "1.0.0"}, + "servers": [{"url": "https://example.com"}], + "paths": { + "/product/create": { + "post": { + "operationId": "createProduct", + "summary": "Create a product", + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "categories": { + "description": "List of category identifiers", + "default": [], + "type": "array", + "items": {"type": "string"}, + }, + "name": { + "description": "Product name", + "default": "Default Product", + "type": "string", + }, + "price": {"description": "Product price", "default": 0.0, "type": "number"}, + "available": { + "description": "Product availability", + "default": True, + "type": "boolean", + }, + }, + } + } + } + }, + "responses": {"200": {"description": "Default Response"}}, + } + } + }, + } + + with app.test_request_context(): + tool_bundles = ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(openapi) + + assert len(tool_bundles) == 1 + bundle = tool_bundles[0] + assert len(bundle.parameters) == 4 + + # Find parameters by name + params_by_name = {param.name: param for param in bundle.parameters} + + # Check categories parameter (array type with [] default) + categories_param = params_by_name["categories"] + assert categories_param.type == "array" # Will be detected by _get_tool_parameter_type + assert categories_param.default is None # Array default [] is converted to None + + # Check name parameter (string type with string default) + name_param = params_by_name["name"] + assert name_param.type == "string" + assert name_param.default == "Default Product" + + # Check price parameter (number type with number default) + price_param = params_by_name["price"] + assert price_param.type == "number" + assert price_param.default == 0.0 + + # Check available parameter (boolean type with boolean default) + available_param = params_by_name["available"] + assert available_param.type == "boolean" + assert available_param.default is True diff --git a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py index 17e3ebeea0..c68aad0b22 100644 --- a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py +++ b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py @@ -34,12 +34,17 @@ def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_fiel monkeypatch.setattr(tool, "_get_app", lambda *args, **kwargs: None) monkeypatch.setattr(tool, "_get_workflow", lambda *args, **kwargs: None) + # Mock user resolution to avoid database access + from unittest.mock import Mock + + mock_user = Mock() + monkeypatch.setattr(tool, "_resolve_user", lambda *args, **kwargs: mock_user) + # replace `WorkflowAppGenerator.generate` 's return value. monkeypatch.setattr( "core.app.apps.workflow.app_generator.WorkflowAppGenerator.generate", lambda *args, **kwargs: {"data": {"error": "oops"}}, ) - monkeypatch.setattr("libs.login.current_user", lambda *args, **kwargs: None) with pytest.raises(ToolInvokeError) as exc_info: # WorkflowTool always returns a generator, so we need to iterate to diff --git a/api/tests/unit_tests/core/variables/test_segment.py b/api/tests/unit_tests/core/variables/test_segment.py index 5cd595088a..af4f96ba23 100644 --- a/api/tests/unit_tests/core/variables/test_segment.py +++ b/api/tests/unit_tests/core/variables/test_segment.py @@ -37,7 +37,7 @@ from core.variables.variables import ( Variable, VariableUnion, ) -from core.workflow.entities import VariablePool +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable diff --git a/api/tests/unit_tests/core/workflow/entities/test_graph_runtime_state.py b/api/tests/unit_tests/core/workflow/entities/test_graph_runtime_state.py index 2614424dc7..deff06fc5d 100644 --- a/api/tests/unit_tests/core/workflow/entities/test_graph_runtime_state.py +++ b/api/tests/unit_tests/core/workflow/entities/test_graph_runtime_state.py @@ -1,9 +1,23 @@ +import json from time import time +from unittest.mock import MagicMock, patch import pytest -from core.workflow.entities.graph_runtime_state import GraphRuntimeState -from core.workflow.entities.variable_pool import VariablePool +from core.model_runtime.entities.llm_entities import LLMUsage +from core.workflow.runtime import GraphRuntimeState, ReadOnlyGraphRuntimeStateWrapper, VariablePool + + +class StubCoordinator: + def __init__(self) -> None: + self.state = "initial" + + def dumps(self) -> str: + return json.dumps({"state": self.state}) + + def loads(self, data: str) -> None: + payload = json.loads(data) + self.state = payload["state"] class TestGraphRuntimeState: @@ -95,3 +109,173 @@ class TestGraphRuntimeState: # Test add_tokens validation with pytest.raises(ValueError): state.add_tokens(-1) + + def test_ready_queue_default_instantiation(self): + state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time()) + + queue = state.ready_queue + + from core.workflow.graph_engine.ready_queue import InMemoryReadyQueue + + assert isinstance(queue, InMemoryReadyQueue) + assert state.ready_queue is queue + + def test_graph_execution_lazy_instantiation(self): + state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time()) + + execution = state.graph_execution + + from core.workflow.graph_engine.domain.graph_execution import GraphExecution + + assert isinstance(execution, GraphExecution) + assert execution.workflow_id == "" + assert state.graph_execution is execution + + def test_response_coordinator_configuration(self): + variable_pool = VariablePool() + state = GraphRuntimeState(variable_pool=variable_pool, start_at=time()) + + with pytest.raises(ValueError): + _ = state.response_coordinator + + mock_graph = MagicMock() + with patch("core.workflow.graph_engine.response_coordinator.ResponseStreamCoordinator") as coordinator_cls: + coordinator_instance = MagicMock() + coordinator_cls.return_value = coordinator_instance + + state.configure(graph=mock_graph) + + assert state.response_coordinator is coordinator_instance + coordinator_cls.assert_called_once_with(variable_pool=variable_pool, graph=mock_graph) + + # Configure again with same graph should be idempotent + state.configure(graph=mock_graph) + + other_graph = MagicMock() + with pytest.raises(ValueError): + state.attach_graph(other_graph) + + def test_read_only_wrapper_exposes_additional_state(self): + state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time()) + state.configure() + + wrapper = ReadOnlyGraphRuntimeStateWrapper(state) + + assert wrapper.ready_queue_size == 0 + assert wrapper.exceptions_count == 0 + + def test_read_only_wrapper_serializes_runtime_state(self): + state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time()) + state.total_tokens = 5 + state.set_output("result", {"success": True}) + state.ready_queue.put("node-1") + + wrapper = ReadOnlyGraphRuntimeStateWrapper(state) + + wrapper_snapshot = json.loads(wrapper.dumps()) + state_snapshot = json.loads(state.dumps()) + + assert wrapper_snapshot == state_snapshot + + def test_dumps_and_loads_roundtrip_with_response_coordinator(self): + variable_pool = VariablePool() + variable_pool.add(("node1", "value"), "payload") + + state = GraphRuntimeState(variable_pool=variable_pool, start_at=time()) + state.total_tokens = 10 + state.node_run_steps = 3 + state.set_output("final", {"result": True}) + usage = LLMUsage.from_metadata( + { + "prompt_tokens": 2, + "completion_tokens": 3, + "total_tokens": 5, + "total_price": "1.23", + "currency": "USD", + "latency": 0.5, + } + ) + state.llm_usage = usage + state.ready_queue.put("node-A") + + graph_execution = state.graph_execution + graph_execution.workflow_id = "wf-123" + graph_execution.exceptions_count = 4 + graph_execution.started = True + + mock_graph = MagicMock() + stub = StubCoordinator() + with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=stub): + state.attach_graph(mock_graph) + + stub.state = "configured" + + snapshot = state.dumps() + + restored = GraphRuntimeState.from_snapshot(snapshot) + + assert restored.total_tokens == 10 + assert restored.node_run_steps == 3 + assert restored.get_output("final") == {"result": True} + assert restored.llm_usage.total_tokens == usage.total_tokens + assert restored.ready_queue.qsize() == 1 + assert restored.ready_queue.get(timeout=0.01) == "node-A" + + restored_segment = restored.variable_pool.get(("node1", "value")) + assert restored_segment is not None + assert restored_segment.value == "payload" + + restored_execution = restored.graph_execution + assert restored_execution.workflow_id == "wf-123" + assert restored_execution.exceptions_count == 4 + assert restored_execution.started is True + + new_stub = StubCoordinator() + with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=new_stub): + restored.attach_graph(mock_graph) + + assert new_stub.state == "configured" + + def test_loads_rehydrates_existing_instance(self): + variable_pool = VariablePool() + variable_pool.add(("node", "key"), "value") + + state = GraphRuntimeState(variable_pool=variable_pool, start_at=time()) + state.total_tokens = 7 + state.node_run_steps = 2 + state.set_output("foo", "bar") + state.ready_queue.put("node-1") + + execution = state.graph_execution + execution.workflow_id = "wf-456" + execution.started = True + + mock_graph = MagicMock() + original_stub = StubCoordinator() + with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=original_stub): + state.attach_graph(mock_graph) + + original_stub.state = "configured" + snapshot = state.dumps() + + new_stub = StubCoordinator() + with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=new_stub): + restored = GraphRuntimeState(variable_pool=VariablePool(), start_at=0.0) + restored.attach_graph(mock_graph) + restored.loads(snapshot) + + assert restored.total_tokens == 7 + assert restored.node_run_steps == 2 + assert restored.get_output("foo") == "bar" + assert restored.ready_queue.qsize() == 1 + assert restored.ready_queue.get(timeout=0.01) == "node-1" + + restored_segment = restored.variable_pool.get(("node", "key")) + assert restored_segment is not None + assert restored_segment.value == "value" + + restored_execution = restored.graph_execution + assert restored_execution.workflow_id == "wf-456" + assert restored_execution.started is True + + assert new_stub.state == "configured" diff --git a/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py b/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py new file mode 100644 index 0000000000..f9de456b19 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py @@ -0,0 +1,113 @@ +from core.variables.segments import ( + BooleanSegment, + IntegerSegment, + NoneSegment, + StringSegment, +) +from core.workflow.runtime import VariablePool + + +class TestVariablePoolGetAndNestedAttribute: + # + # _get_nested_attribute tests + # + def test__get_nested_attribute_existing_key(self): + pool = VariablePool.empty() + obj = {"a": 123} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert segment.value == 123 + + def test__get_nested_attribute_missing_key(self): + pool = VariablePool.empty() + obj = {"a": 123} + segment = pool._get_nested_attribute(obj, "b") + assert segment is None + + def test__get_nested_attribute_non_dict(self): + pool = VariablePool.empty() + obj = ["not", "a", "dict"] + segment = pool._get_nested_attribute(obj, "a") + assert segment is None + + def test__get_nested_attribute_with_none_value(self): + pool = VariablePool.empty() + obj = {"a": None} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert isinstance(segment, NoneSegment) + + def test__get_nested_attribute_with_empty_string(self): + pool = VariablePool.empty() + obj = {"a": ""} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert isinstance(segment, StringSegment) + assert segment.value == "" + + # + # get tests + # + def test_get_simple_variable(self): + pool = VariablePool.empty() + pool.add(("node1", "var1"), "value1") + segment = pool.get(("node1", "var1")) + assert segment is not None + assert segment.value == "value1" + + def test_get_missing_variable(self): + pool = VariablePool.empty() + result = pool.get(("node1", "unknown")) + assert result is None + + def test_get_with_too_short_selector(self): + pool = VariablePool.empty() + result = pool.get(("only_node",)) + assert result is None + + def test_get_nested_object_attribute(self): + pool = VariablePool.empty() + obj_value = {"inner": "hello"} + pool.add(("node1", "obj"), obj_value) + + # simulate selector with nested attr + segment = pool.get(("node1", "obj", "inner")) + assert segment is not None + assert segment.value == "hello" + + def test_get_nested_object_missing_attribute(self): + pool = VariablePool.empty() + obj_value = {"inner": "hello"} + pool.add(("node1", "obj"), obj_value) + + result = pool.get(("node1", "obj", "not_exist")) + assert result is None + + def test_get_nested_object_attribute_with_falsy_values(self): + pool = VariablePool.empty() + obj_value = { + "inner_none": None, + "inner_empty": "", + "inner_zero": 0, + "inner_false": False, + } + pool.add(("node1", "obj"), obj_value) + + segment_none = pool.get(("node1", "obj", "inner_none")) + assert segment_none is not None + assert isinstance(segment_none, NoneSegment) + + segment_empty = pool.get(("node1", "obj", "inner_empty")) + assert segment_empty is not None + assert isinstance(segment_empty, StringSegment) + assert segment_empty.value == "" + + segment_zero = pool.get(("node1", "obj", "inner_zero")) + assert segment_zero is not None + assert isinstance(segment_zero, IntegerSegment) + assert segment_zero.value == 0 + + segment_false = pool.get(("node1", "obj", "inner_false")) + assert segment_false is not None + assert isinstance(segment_false, BooleanSegment) + assert segment_false.value is False diff --git a/api/tests/unit_tests/core/workflow/graph/test_graph_builder.py b/api/tests/unit_tests/core/workflow/graph/test_graph_builder.py new file mode 100644 index 0000000000..15d1dcb48d --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph/test_graph_builder.py @@ -0,0 +1,59 @@ +from unittest.mock import MagicMock + +import pytest + +from core.workflow.enums import NodeType +from core.workflow.graph import Graph +from core.workflow.nodes.base.node import Node + + +def _make_node(node_id: str, node_type: NodeType = NodeType.START) -> Node: + node = MagicMock(spec=Node) + node.id = node_id + node.node_type = node_type + node.execution_type = None # attribute not used in builder path + return node + + +def test_graph_builder_creates_linear_graph(): + builder = Graph.new() + root = _make_node("root", NodeType.START) + mid = _make_node("mid", NodeType.LLM) + end = _make_node("end", NodeType.END) + + graph = builder.add_root(root).add_node(mid).add_node(end).build() + + assert graph.root_node is root + assert graph.nodes == {"root": root, "mid": mid, "end": end} + assert len(graph.edges) == 2 + first_edge = next(iter(graph.edges.values())) + assert first_edge.tail == "root" + assert first_edge.head == "mid" + assert graph.out_edges["mid"] == [edge_id for edge_id, edge in graph.edges.items() if edge.tail == "mid"] + + +def test_graph_builder_supports_custom_predecessor(): + builder = Graph.new() + root = _make_node("root") + branch = _make_node("branch") + other = _make_node("other") + + graph = builder.add_root(root).add_node(branch).add_node(other, from_node_id="root").build() + + outgoing_root = graph.out_edges["root"] + assert len(outgoing_root) == 2 + edge_targets = {graph.edges[eid].head for eid in outgoing_root} + assert edge_targets == {"branch", "other"} + + +def test_graph_builder_validates_usage(): + builder = Graph.new() + node = _make_node("node") + + with pytest.raises(ValueError, match="Root node"): + builder.add_node(node) + + builder.add_root(node) + duplicate = _make_node("node") + with pytest.raises(ValueError, match="Duplicate"): + builder.add_node(duplicate) diff --git a/api/tests/unit_tests/core/workflow/graph/test_graph_validation.py b/api/tests/unit_tests/core/workflow/graph/test_graph_validation.py new file mode 100644 index 0000000000..b55d4998c4 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph/test_graph_validation.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +import time +from collections.abc import Mapping +from dataclasses import dataclass +from typing import Any + +import pytest + +from core.app.entities.app_invoke_entities import InvokeFrom +from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType +from core.workflow.graph import Graph +from core.workflow.graph.validation import GraphValidationError +from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig +from core.workflow.nodes.base.node import Node +from core.workflow.system_variable import SystemVariable +from models.enums import UserFrom + + +class _TestNode(Node): + node_type = NodeType.ANSWER + execution_type = NodeExecutionType.EXECUTABLE + + @classmethod + def version(cls) -> str: + return "test" + + def __init__( + self, + *, + id: str, + config: Mapping[str, object], + graph_init_params: GraphInitParams, + graph_runtime_state: GraphRuntimeState, + ) -> None: + super().__init__( + id=id, + config=config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + data = config.get("data", {}) + if isinstance(data, Mapping): + execution_type = data.get("execution_type") + if isinstance(execution_type, str): + self.execution_type = NodeExecutionType(execution_type) + self._base_node_data = BaseNodeData(title=str(data.get("title", self.id))) + self.data: dict[str, object] = {} + + def init_node_data(self, data: Mapping[str, object]) -> None: + title = str(data.get("title", self.id)) + desc = data.get("description") + error_strategy_value = data.get("error_strategy") + error_strategy: ErrorStrategy | None = None + if isinstance(error_strategy_value, ErrorStrategy): + error_strategy = error_strategy_value + elif isinstance(error_strategy_value, str): + error_strategy = ErrorStrategy(error_strategy_value) + self._base_node_data = BaseNodeData( + title=title, + desc=str(desc) if desc is not None else None, + error_strategy=error_strategy, + ) + self.data = dict(data) + + def _run(self): + raise NotImplementedError + + def _get_error_strategy(self) -> ErrorStrategy | None: + return self._base_node_data.error_strategy + + def _get_retry_config(self) -> RetryConfig: + return self._base_node_data.retry_config + + def _get_title(self) -> str: + return self._base_node_data.title + + def _get_description(self) -> str | None: + return self._base_node_data.desc + + def _get_default_value_dict(self) -> dict[str, Any]: + return self._base_node_data.default_value_dict + + def get_base_node_data(self) -> BaseNodeData: + return self._base_node_data + + +@dataclass(slots=True) +class _SimpleNodeFactory: + graph_init_params: GraphInitParams + graph_runtime_state: GraphRuntimeState + + def create_node(self, node_config: Mapping[str, object]) -> _TestNode: + node_id = str(node_config["id"]) + node = _TestNode( + id=node_id, + config=node_config, + graph_init_params=self.graph_init_params, + graph_runtime_state=self.graph_runtime_state, + ) + node.init_node_data(node_config.get("data", {})) + return node + + +@pytest.fixture +def graph_init_dependencies() -> tuple[_SimpleNodeFactory, dict[str, object]]: + graph_config: dict[str, object] = {"edges": [], "nodes": []} + init_params = GraphInitParams( + tenant_id="tenant", + app_id="app", + workflow_id="workflow", + graph_config=graph_config, + user_id="user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, + call_depth=0, + ) + variable_pool = VariablePool(system_variables=SystemVariable(user_id="user", files=[]), user_inputs={}) + runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) + factory = _SimpleNodeFactory(graph_init_params=init_params, graph_runtime_state=runtime_state) + return factory, graph_config + + +def test_graph_initialization_runs_default_validators( + graph_init_dependencies: tuple[_SimpleNodeFactory, dict[str, object]], +): + node_factory, graph_config = graph_init_dependencies + graph_config["nodes"] = [ + {"id": "start", "data": {"type": NodeType.START, "title": "Start", "execution_type": NodeExecutionType.ROOT}}, + {"id": "answer", "data": {"type": NodeType.ANSWER, "title": "Answer"}}, + ] + graph_config["edges"] = [ + {"source": "start", "target": "answer", "sourceHandle": "success"}, + ] + + graph = Graph.init(graph_config=graph_config, node_factory=node_factory) + + assert graph.root_node.id == "start" + assert "answer" in graph.nodes + + +def test_graph_validation_fails_for_unknown_edge_targets( + graph_init_dependencies: tuple[_SimpleNodeFactory, dict[str, object]], +) -> None: + node_factory, graph_config = graph_init_dependencies + graph_config["nodes"] = [ + {"id": "start", "data": {"type": NodeType.START, "title": "Start", "execution_type": NodeExecutionType.ROOT}}, + ] + graph_config["edges"] = [ + {"source": "start", "target": "missing", "sourceHandle": "success"}, + ] + + with pytest.raises(GraphValidationError) as exc: + Graph.init(graph_config=graph_config, node_factory=node_factory) + + assert any(issue.code == "MISSING_NODE" for issue in exc.value.issues) + + +def test_graph_promotes_fail_branch_nodes_to_branch_execution_type( + graph_init_dependencies: tuple[_SimpleNodeFactory, dict[str, object]], +) -> None: + node_factory, graph_config = graph_init_dependencies + graph_config["nodes"] = [ + {"id": "start", "data": {"type": NodeType.START, "title": "Start", "execution_type": NodeExecutionType.ROOT}}, + { + "id": "branch", + "data": { + "type": NodeType.IF_ELSE, + "title": "Branch", + "error_strategy": ErrorStrategy.FAIL_BRANCH, + }, + }, + ] + graph_config["edges"] = [ + {"source": "start", "target": "branch", "sourceHandle": "success"}, + ] + + graph = Graph.init(graph_config=graph_config, node_factory=node_factory) + + assert graph.nodes["branch"].execution_type == NodeExecutionType.BRANCH diff --git a/api/tests/unit_tests/core/workflow/graph_engine/README.md b/api/tests/unit_tests/core/workflow/graph_engine/README.md index bff82b3ac4..3fff4cf6a9 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/README.md +++ b/api/tests/unit_tests/core/workflow/graph_engine/README.md @@ -20,9 +20,6 @@ The TableTestRunner (`test_table_runner.py`) provides a robust table-driven test - **Mock configuration** - Seamless integration with the auto-mock system - **Performance metrics** - Track execution times and bottlenecks - **Detailed error reporting** - Comprehensive failure diagnostics -- **Test tagging** - Organize and filter tests by tags -- **Retry mechanism** - Handle flaky tests gracefully -- **Custom validators** - Define custom validation logic ### Basic Usage @@ -68,49 +65,6 @@ suite_result = runner.run_table_tests( print(f"Success rate: {suite_result.success_rate:.1f}%") ``` -#### Test Tagging and Filtering - -```python -test_case = WorkflowTestCase( - fixture_path="workflow", - inputs={}, - expected_outputs={}, - tags=["smoke", "critical"], -) - -# Run only tests with specific tags -suite_result = runner.run_table_tests( - test_cases, - tags_filter=["smoke"] -) -``` - -#### Retry Mechanism - -```python -test_case = WorkflowTestCase( - fixture_path="flaky_workflow", - inputs={}, - expected_outputs={}, - retry_count=2, # Retry up to 2 times on failure -) -``` - -#### Custom Validators - -```python -def custom_validator(outputs: dict) -> bool: - # Custom validation logic - return "error" not in outputs.get("status", "") - -test_case = WorkflowTestCase( - fixture_path="workflow", - inputs={}, - expected_outputs={"status": "success"}, - custom_validator=custom_validator, -) -``` - #### Event Sequence Validation ```python diff --git a/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py b/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py index 2c08fff27b..8677325d4e 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py @@ -35,11 +35,15 @@ class TestRedisChannel: """Test sending a command to Redis.""" mock_redis = MagicMock() mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + context = MagicMock() + context.__enter__.return_value = mock_pipe + context.__exit__.return_value = None + mock_redis.pipeline.return_value = context channel = RedisChannel(mock_redis, "test:key", 3600) + pending_key = "test:key:pending" + # Create a test command command = GraphEngineCommand(command_type=CommandType.ABORT) @@ -55,6 +59,7 @@ class TestRedisChannel: # Verify expire was set mock_pipe.expire.assert_called_once_with("test:key", 3600) + mock_pipe.set.assert_called_once_with(pending_key, "1", ex=3600) # Verify execute was called mock_pipe.execute.assert_called_once() @@ -62,33 +67,48 @@ class TestRedisChannel: def test_fetch_commands_empty(self): """Test fetching commands when Redis list is empty.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context] - # Simulate empty list - mock_pipe.execute.return_value = [[], 1] # Empty list, delete successful + # No pending marker + pending_pipe.execute.return_value = [None, 0] + mock_redis.llen.return_value = 0 channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() assert commands == [] - mock_pipe.lrange.assert_called_once_with("test:key", 0, -1) - mock_pipe.delete.assert_called_once_with("test:key") + mock_redis.pipeline.assert_called_once() + fetch_pipe.lrange.assert_not_called() + fetch_pipe.delete.assert_not_called() def test_fetch_commands_with_abort_command(self): """Test fetching abort commands from Redis.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Create abort command data abort_command = AbortCommand() command_json = json.dumps(abort_command.model_dump()) # Simulate Redis returning one command - mock_pipe.execute.return_value = [[command_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -100,9 +120,15 @@ class TestRedisChannel: def test_fetch_commands_multiple(self): """Test fetching multiple commands from Redis.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Create multiple commands command1 = GraphEngineCommand(command_type=CommandType.ABORT) @@ -112,7 +138,8 @@ class TestRedisChannel: command2_json = json.dumps(command2.model_dump()) # Simulate Redis returning multiple commands - mock_pipe.execute.return_value = [[command1_json.encode(), command2_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command1_json.encode(), command2_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -124,9 +151,15 @@ class TestRedisChannel: def test_fetch_commands_skips_invalid_json(self): """Test that invalid JSON commands are skipped.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mix valid and invalid JSON valid_command = AbortCommand() @@ -134,7 +167,8 @@ class TestRedisChannel: invalid_json = b"invalid json {" # Simulate Redis returning mixed valid/invalid commands - mock_pipe.execute.return_value = [[invalid_json, valid_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[invalid_json, valid_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -147,7 +181,7 @@ class TestRedisChannel: """Test deserializing an abort command.""" channel = RedisChannel(MagicMock(), "test:key") - abort_data = {"command_type": CommandType.ABORT.value} + abort_data = {"command_type": CommandType.ABORT} command = channel._deserialize_command(abort_data) assert isinstance(command, AbortCommand) @@ -158,7 +192,7 @@ class TestRedisChannel: channel = RedisChannel(MagicMock(), "test:key") # For now, only ABORT is supported, but test generic handling - generic_data = {"command_type": CommandType.ABORT.value} + generic_data = {"command_type": CommandType.ABORT} command = channel._deserialize_command(generic_data) assert command is not None @@ -187,13 +221,20 @@ class TestRedisChannel: def test_atomic_fetch_and_clear(self): """Test that fetch_commands atomically fetches and clears the list.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] command = AbortCommand() command_json = json.dumps(command.model_dump()) - mock_pipe.execute.return_value = [[command_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") @@ -202,7 +243,29 @@ class TestRedisChannel: assert len(commands) == 1 # Verify both lrange and delete were called in the pipeline - assert mock_pipe.lrange.call_count == 1 - assert mock_pipe.delete.call_count == 1 - mock_pipe.lrange.assert_called_with("test:key", 0, -1) - mock_pipe.delete.assert_called_with("test:key") + assert fetch_pipe.lrange.call_count == 1 + assert fetch_pipe.delete.call_count == 1 + fetch_pipe.lrange.assert_called_with("test:key", 0, -1) + fetch_pipe.delete.assert_called_with("test:key") + + def test_fetch_commands_without_pending_marker_returns_empty(self): + """Ensure we avoid unnecessary list reads when pending flag is missing.""" + mock_redis = MagicMock() + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] + + # Pending flag absent + pending_pipe.execute.return_value = [None, 0] + channel = RedisChannel(mock_redis, "test:key") + commands = channel.fetch_commands() + + assert commands == [] + mock_redis.llen.assert_not_called() + assert mock_redis.pipeline.call_count == 1 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py b/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py index d556bb138e..2b8f04979d 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import datetime -from core.workflow.entities import GraphRuntimeState, VariablePool from core.workflow.enums import NodeExecutionType, NodeState, NodeType, WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.graph_engine.domain.graph_execution import GraphExecution @@ -16,6 +15,7 @@ from core.workflow.graph_engine.response_coordinator.coordinator import Response from core.workflow.graph_events import NodeRunRetryEvent, NodeRunStartedEvent from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.entities import RetryConfig +from core.workflow.runtime import GraphRuntimeState, VariablePool class _StubEdgeProcessor: diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py b/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py index 9fec855a93..d451e7e608 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py @@ -3,12 +3,12 @@ import time from unittest.mock import MagicMock -from core.workflow.entities import GraphRuntimeState, VariablePool from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel -from core.workflow.graph_engine.entities.commands import AbortCommand -from core.workflow.graph_events import GraphRunAbortedEvent, GraphRunStartedEvent +from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType, PauseCommand +from core.workflow.graph_events import GraphRunAbortedEvent, GraphRunPausedEvent, GraphRunStartedEvent +from core.workflow.runtime import GraphRuntimeState, VariablePool def test_abort_command(): @@ -100,8 +100,57 @@ def test_redis_channel_serialization(): assert command_data["command_type"] == "abort" assert command_data["reason"] == "Test abort" + # Test pause command serialization + pause_command = PauseCommand(reason="User requested pause") + channel.send_command(pause_command) -if __name__ == "__main__": - test_abort_command() - test_redis_channel_serialization() - print("All tests passed!") + assert len(mock_pipeline.rpush.call_args_list) == 2 + second_call_args = mock_pipeline.rpush.call_args_list[1] + pause_command_json = second_call_args[0][1] + pause_command_data = json.loads(pause_command_json) + assert pause_command_data["command_type"] == CommandType.PAUSE.value + assert pause_command_data["reason"] == "User requested pause" + + +def test_pause_command(): + """Test that GraphEngine properly handles pause commands.""" + + shared_runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + mock_graph.root_node.id = "start" + + mock_start_node = MagicMock() + mock_start_node.state = None + mock_start_node.id = "start" + mock_start_node.graph_runtime_state = shared_runtime_state + mock_graph.nodes["start"] = mock_start_node + + mock_graph.get_outgoing_edges = MagicMock(return_value=[]) + mock_graph.get_incoming_edges = MagicMock(return_value=[]) + + command_channel = InMemoryChannel() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=shared_runtime_state, + command_channel=command_channel, + ) + + pause_command = PauseCommand(reason="User requested pause") + command_channel.send_command(pause_command) + + events = list(engine.run()) + + assert any(isinstance(e, GraphRunStartedEvent) for e in events) + pause_events = [e for e in events if isinstance(e, GraphRunPausedEvent)] + assert len(pause_events) == 1 + assert pause_events[0].reason == "User requested pause" + + graph_execution = engine.graph_runtime_state.graph_execution + assert graph_execution.is_paused + assert graph_execution.pause_reason == "User requested pause" diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_complex_branch_workflow.py b/api/tests/unit_tests/core/workflow/graph_engine/test_complex_branch_workflow.py index fc38393e75..96926797ec 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_complex_branch_workflow.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_complex_branch_workflow.py @@ -7,14 +7,11 @@ This test suite validates the behavior of a workflow that: 3. Handles multiple answer nodes with different outputs """ -import pytest - from core.workflow.graph_events import ( GraphRunStartedEvent, GraphRunSucceededEvent, NodeRunStartedEvent, NodeRunStreamChunkEvent, - NodeRunSucceededEvent, ) from .test_mock_config import MockConfigBuilder @@ -29,7 +26,6 @@ class TestComplexBranchWorkflow: self.runner = TableTestRunner() self.fixture_path = "test_complex_branch" - @pytest.mark.skip(reason="output in this workflow can be random") def test_hello_branch_with_llm(self): """ Test when query contains 'hello' - should trigger true branch. @@ -41,42 +37,17 @@ class TestComplexBranchWorkflow: fixture_path=self.fixture_path, query="hello world", expected_outputs={ - "answer": f"{mock_text_1}contains 'hello'", + "answer": f"contains 'hello'{mock_text_1}", }, description="Basic hello case with parallel LLM execution", use_auto_mock=True, mock_config=(MockConfigBuilder().with_node_output("1755502777322", {"text": mock_text_1}).build()), - expected_event_sequence=[ - GraphRunStartedEvent, - # Start - NodeRunStartedEvent, - NodeRunSucceededEvent, - # If/Else (no streaming) - NodeRunStartedEvent, - NodeRunSucceededEvent, - # LLM (with streaming) - NodeRunStartedEvent, - ] - # LLM - + [NodeRunStreamChunkEvent] * (mock_text_1.count(" ") + 2) - + [ - # Answer's text - NodeRunStreamChunkEvent, - NodeRunSucceededEvent, - # Answer - NodeRunStartedEvent, - NodeRunSucceededEvent, - # Answer 2 - NodeRunStartedEvent, - NodeRunSucceededEvent, - GraphRunSucceededEvent, - ], ), WorkflowTestCase( fixture_path=self.fixture_path, query="say hello to everyone", expected_outputs={ - "answer": "Mocked response for greetingcontains 'hello'", + "answer": "contains 'hello'Mocked response for greeting", }, description="Hello in middle of sentence", use_auto_mock=True, @@ -93,6 +64,35 @@ class TestComplexBranchWorkflow: for result in suite_result.results: assert result.success, f"Test '{result.test_case.description}' failed: {result.error}" assert result.actual_outputs + assert any(isinstance(event, GraphRunStartedEvent) for event in result.events) + assert any(isinstance(event, GraphRunSucceededEvent) for event in result.events) + + start_index = next( + idx for idx, event in enumerate(result.events) if isinstance(event, GraphRunStartedEvent) + ) + success_index = max( + idx for idx, event in enumerate(result.events) if isinstance(event, GraphRunSucceededEvent) + ) + assert start_index < success_index + + started_node_ids = {event.node_id for event in result.events if isinstance(event, NodeRunStartedEvent)} + assert {"1755502773326", "1755502777322"}.issubset(started_node_ids), ( + f"Branch or LLM nodes missing in events: {started_node_ids}" + ) + + assert any(isinstance(event, NodeRunStreamChunkEvent) for event in result.events), ( + "Expected streaming chunks from LLM execution" + ) + + llm_start_index = next( + idx + for idx, event in enumerate(result.events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == "1755502777322" + ) + assert any( + idx > llm_start_index and isinstance(event, NodeRunStreamChunkEvent) + for idx, event in enumerate(result.events) + ), "Streaming chunks should follow LLM node start" def test_non_hello_branch_with_llm(self): """ diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py b/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py new file mode 100644 index 0000000000..3fe4ce3400 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py @@ -0,0 +1,109 @@ +"""Tests for dispatcher command checking behavior.""" + +from __future__ import annotations + +import queue +from datetime import datetime + +from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus +from core.workflow.graph_engine.event_management.event_manager import EventManager +from core.workflow.graph_engine.orchestration.dispatcher import Dispatcher +from core.workflow.graph_events import NodeRunStartedEvent, NodeRunSucceededEvent +from core.workflow.node_events import NodeRunResult + + +class _StubExecutionCoordinator: + """Stub execution coordinator that tracks command checks.""" + + def __init__(self) -> None: + self.command_checks = 0 + self.scaling_checks = 0 + self._execution_complete = False + self.mark_complete_called = False + self.failed = False + self._paused = False + + def check_commands(self) -> None: + self.command_checks += 1 + + def check_scaling(self) -> None: + self.scaling_checks += 1 + + @property + def is_paused(self) -> bool: + return self._paused + + def is_execution_complete(self) -> bool: + return self._execution_complete + + def mark_complete(self) -> None: + self.mark_complete_called = True + + def mark_failed(self, error: Exception) -> None: # pragma: no cover - defensive, not triggered in tests + self.failed = True + + def set_execution_complete(self) -> None: + self._execution_complete = True + + +class _StubEventHandler: + """Minimal event handler that marks execution complete after handling an event.""" + + def __init__(self, coordinator: _StubExecutionCoordinator) -> None: + self._coordinator = coordinator + self.events = [] + + def dispatch(self, event) -> None: + self.events.append(event) + self._coordinator.set_execution_complete() + + +def _run_dispatcher_for_event(event) -> int: + """Run the dispatcher loop for a single event and return command check count.""" + event_queue: queue.Queue = queue.Queue() + event_queue.put(event) + + coordinator = _StubExecutionCoordinator() + event_handler = _StubEventHandler(coordinator) + event_manager = EventManager() + + dispatcher = Dispatcher( + event_queue=event_queue, + event_handler=event_handler, + event_collector=event_manager, + execution_coordinator=coordinator, + ) + + dispatcher._dispatcher_loop() + + return coordinator.command_checks + + +def _make_started_event() -> NodeRunStartedEvent: + return NodeRunStartedEvent( + id="start-event", + node_id="node-1", + node_type=NodeType.CODE, + node_title="Test Node", + start_at=datetime.utcnow(), + ) + + +def _make_succeeded_event() -> NodeRunSucceededEvent: + return NodeRunSucceededEvent( + id="success-event", + node_id="node-1", + node_type=NodeType.CODE, + node_title="Test Node", + start_at=datetime.utcnow(), + node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED), + ) + + +def test_dispatcher_checks_commands_during_idle_and_on_completion() -> None: + """Dispatcher polls commands when idle and after completion events.""" + started_checks = _run_dispatcher_for_event(_make_started_event()) + succeeded_checks = _run_dispatcher_for_event(_make_succeeded_event()) + + assert started_checks == 1 + assert succeeded_checks == 2 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_execution_coordinator.py b/api/tests/unit_tests/core/workflow/graph_engine/test_execution_coordinator.py new file mode 100644 index 0000000000..025393e435 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_execution_coordinator.py @@ -0,0 +1,62 @@ +"""Unit tests for the execution coordinator orchestration logic.""" + +from unittest.mock import MagicMock + +from core.workflow.graph_engine.command_processing.command_processor import CommandProcessor +from core.workflow.graph_engine.domain.graph_execution import GraphExecution +from core.workflow.graph_engine.graph_state_manager import GraphStateManager +from core.workflow.graph_engine.orchestration.execution_coordinator import ExecutionCoordinator +from core.workflow.graph_engine.worker_management.worker_pool import WorkerPool + + +def _build_coordinator(graph_execution: GraphExecution) -> tuple[ExecutionCoordinator, MagicMock, MagicMock]: + command_processor = MagicMock(spec=CommandProcessor) + state_manager = MagicMock(spec=GraphStateManager) + worker_pool = MagicMock(spec=WorkerPool) + + coordinator = ExecutionCoordinator( + graph_execution=graph_execution, + state_manager=state_manager, + command_processor=command_processor, + worker_pool=worker_pool, + ) + return coordinator, state_manager, worker_pool + + +def test_handle_pause_stops_workers_and_clears_state() -> None: + """Paused execution should stop workers and clear executing state.""" + graph_execution = GraphExecution(workflow_id="workflow") + graph_execution.start() + graph_execution.pause("Awaiting human input") + + coordinator, state_manager, worker_pool = _build_coordinator(graph_execution) + + coordinator.handle_pause_if_needed() + + worker_pool.stop.assert_called_once_with() + state_manager.clear_executing.assert_called_once_with() + + +def test_handle_pause_noop_when_execution_running() -> None: + """Running execution should not trigger pause handling.""" + graph_execution = GraphExecution(workflow_id="workflow") + graph_execution.start() + + coordinator, state_manager, worker_pool = _build_coordinator(graph_execution) + + coordinator.handle_pause_if_needed() + + worker_pool.stop.assert_not_called() + state_manager.clear_executing.assert_not_called() + + +def test_is_execution_complete_when_paused() -> None: + """Paused execution should be treated as complete.""" + graph_execution = GraphExecution(workflow_id="workflow") + graph_execution.start() + graph_execution.pause("Awaiting input") + + coordinator, state_manager, _worker_pool = _build_coordinator(graph_execution) + state_manager.is_execution_complete.return_value = False + + assert coordinator.is_execution_complete() diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_multi_branch.py b/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_multi_branch.py new file mode 100644 index 0000000000..c9e7e31e52 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_multi_branch.py @@ -0,0 +1,341 @@ +import time +from collections.abc import Iterable + +from core.model_runtime.entities.llm_entities import LLMMode +from core.model_runtime.entities.message_entities import PromptMessageRole +from core.workflow.entities import GraphInitParams +from core.workflow.graph import Graph +from core.workflow.graph_events import ( + GraphRunPausedEvent, + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunPauseRequestedEvent, + NodeRunStartedEvent, + NodeRunStreamChunkEvent, + NodeRunSucceededEvent, +) +from core.workflow.nodes.base.entities import VariableSelector +from core.workflow.nodes.end.end_node import EndNode +from core.workflow.nodes.end.entities import EndNodeData +from core.workflow.nodes.human_input import HumanInputNode +from core.workflow.nodes.human_input.entities import HumanInputNodeData +from core.workflow.nodes.llm.entities import ( + ContextConfig, + LLMNodeChatModelMessage, + LLMNodeData, + ModelConfig, + VisionConfig, +) +from core.workflow.nodes.start.entities import StartNodeData +from core.workflow.nodes.start.start_node import StartNode +from core.workflow.runtime import GraphRuntimeState, VariablePool +from core.workflow.system_variable import SystemVariable + +from .test_mock_config import MockConfig +from .test_mock_nodes import MockLLMNode +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def _build_branching_graph(mock_config: MockConfig) -> tuple[Graph, GraphRuntimeState]: + graph_config: dict[str, object] = {"nodes": [], "edges": []} + graph_init_params = GraphInitParams( + tenant_id="tenant", + app_id="app", + workflow_id="workflow", + graph_config=graph_config, + user_id="user", + user_from="account", + invoke_from="debugger", + call_depth=0, + ) + + variable_pool = VariablePool( + system_variables=SystemVariable(user_id="user", app_id="app", workflow_id="workflow"), + user_inputs={}, + conversation_variables=[], + ) + graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) + + start_config = {"id": "start", "data": StartNodeData(title="Start", variables=[]).model_dump()} + start_node = StartNode( + id=start_config["id"], + config=start_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + start_node.init_node_data(start_config["data"]) + + def _create_llm_node(node_id: str, title: str, prompt_text: str) -> MockLLMNode: + llm_data = LLMNodeData( + title=title, + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + prompt_template=[ + LLMNodeChatModelMessage( + text=prompt_text, + role=PromptMessageRole.USER, + edition_type="basic", + ) + ], + context=ContextConfig(enabled=False, variable_selector=None), + vision=VisionConfig(enabled=False), + reasoning_format="tagged", + ) + llm_config = {"id": node_id, "data": llm_data.model_dump()} + llm_node = MockLLMNode( + id=node_id, + config=llm_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + mock_config=mock_config, + ) + llm_node.init_node_data(llm_config["data"]) + return llm_node + + llm_initial = _create_llm_node("llm_initial", "Initial LLM", "Initial stream") + + human_data = HumanInputNodeData( + title="Human Input", + required_variables=["human.input_ready"], + pause_reason="Awaiting human input", + ) + human_config = {"id": "human", "data": human_data.model_dump()} + human_node = HumanInputNode( + id=human_config["id"], + config=human_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + human_node.init_node_data(human_config["data"]) + + llm_primary = _create_llm_node("llm_primary", "Primary LLM", "Primary stream output") + llm_secondary = _create_llm_node("llm_secondary", "Secondary LLM", "Secondary") + + end_primary_data = EndNodeData( + title="End Primary", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="primary_text", value_selector=["llm_primary", "text"]), + ], + desc=None, + ) + end_primary_config = {"id": "end_primary", "data": end_primary_data.model_dump()} + end_primary = EndNode( + id=end_primary_config["id"], + config=end_primary_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_primary.init_node_data(end_primary_config["data"]) + + end_secondary_data = EndNodeData( + title="End Secondary", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="secondary_text", value_selector=["llm_secondary", "text"]), + ], + desc=None, + ) + end_secondary_config = {"id": "end_secondary", "data": end_secondary_data.model_dump()} + end_secondary = EndNode( + id=end_secondary_config["id"], + config=end_secondary_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_secondary.init_node_data(end_secondary_config["data"]) + + graph = ( + Graph.new() + .add_root(start_node) + .add_node(llm_initial) + .add_node(human_node) + .add_node(llm_primary, from_node_id="human", source_handle="primary") + .add_node(end_primary, from_node_id="llm_primary") + .add_node(llm_secondary, from_node_id="human", source_handle="secondary") + .add_node(end_secondary, from_node_id="llm_secondary") + .build() + ) + return graph, graph_runtime_state + + +def _expected_mock_llm_chunks(text: str) -> list[str]: + chunks: list[str] = [] + for index, word in enumerate(text.split(" ")): + chunk = word if index == 0 else f" {word}" + chunks.append(chunk) + chunks.append("") + return chunks + + +def _assert_stream_chunk_sequence( + chunk_events: Iterable[NodeRunStreamChunkEvent], + expected_nodes: list[str], + expected_chunks: list[str], +) -> None: + actual_nodes = [event.node_id for event in chunk_events] + actual_chunks = [event.chunk for event in chunk_events] + assert actual_nodes == expected_nodes + assert actual_chunks == expected_chunks + + +def test_human_input_llm_streaming_across_multiple_branches() -> None: + mock_config = MockConfig() + mock_config.set_node_outputs("llm_initial", {"text": "Initial stream"}) + mock_config.set_node_outputs("llm_primary", {"text": "Primary stream output"}) + mock_config.set_node_outputs("llm_secondary", {"text": "Secondary"}) + + branch_scenarios = [ + { + "handle": "primary", + "resume_llm": "llm_primary", + "end_node": "end_primary", + "expected_pre_chunks": [ + ("llm_initial", _expected_mock_llm_chunks("Initial stream")), # cached output before branch completes + ("end_primary", ["\n"]), # literal segment emitted when end_primary session activates + ], + "expected_post_chunks": [ + ("llm_primary", _expected_mock_llm_chunks("Primary stream output")), # live stream from chosen branch + ], + }, + { + "handle": "secondary", + "resume_llm": "llm_secondary", + "end_node": "end_secondary", + "expected_pre_chunks": [ + ("llm_initial", _expected_mock_llm_chunks("Initial stream")), # cached output before branch completes + ("end_secondary", ["\n"]), # literal segment emitted when end_secondary session activates + ], + "expected_post_chunks": [ + ("llm_secondary", _expected_mock_llm_chunks("Secondary")), # live stream from chosen branch + ], + }, + ] + + for scenario in branch_scenarios: + runner = TableTestRunner() + + def initial_graph_factory() -> tuple[Graph, GraphRuntimeState]: + return _build_branching_graph(mock_config) + + initial_case = WorkflowTestCase( + description="HumanInput pause before branching decision", + graph_factory=initial_graph_factory, + expected_event_sequence=[ + GraphRunStartedEvent, # initial run: graph execution starts + NodeRunStartedEvent, # start node begins execution + NodeRunSucceededEvent, # start node completes + NodeRunStartedEvent, # llm_initial starts streaming + NodeRunSucceededEvent, # llm_initial completes streaming + NodeRunStartedEvent, # human node begins and issues pause + NodeRunPauseRequestedEvent, # human node requests pause awaiting input + GraphRunPausedEvent, # graph run pauses awaiting resume + ], + ) + + initial_result = runner.run_test_case(initial_case) + + assert initial_result.success, initial_result.event_mismatch_details + assert not any(isinstance(event, NodeRunStreamChunkEvent) for event in initial_result.events) + + graph_runtime_state = initial_result.graph_runtime_state + graph = initial_result.graph + assert graph_runtime_state is not None + assert graph is not None + + graph_runtime_state.variable_pool.add(("human", "input_ready"), True) + graph_runtime_state.variable_pool.add(("human", "edge_source_handle"), scenario["handle"]) + graph_runtime_state.graph_execution.pause_reason = None + + pre_chunk_count = sum(len(chunks) for _, chunks in scenario["expected_pre_chunks"]) + post_chunk_count = sum(len(chunks) for _, chunks in scenario["expected_post_chunks"]) + + expected_resume_sequence: list[type] = ( + [ + GraphRunStartedEvent, + NodeRunStartedEvent, + ] + + [NodeRunStreamChunkEvent] * pre_chunk_count + + [ + NodeRunSucceededEvent, + NodeRunStartedEvent, + ] + + [NodeRunStreamChunkEvent] * post_chunk_count + + [ + NodeRunSucceededEvent, + NodeRunStartedEvent, + NodeRunSucceededEvent, + GraphRunSucceededEvent, + ] + ) + + def resume_graph_factory( + graph_snapshot: Graph = graph, + state_snapshot: GraphRuntimeState = graph_runtime_state, + ) -> tuple[Graph, GraphRuntimeState]: + return graph_snapshot, state_snapshot + + resume_case = WorkflowTestCase( + description=f"HumanInput resumes via {scenario['handle']} branch", + graph_factory=resume_graph_factory, + expected_event_sequence=expected_resume_sequence, + ) + + resume_result = runner.run_test_case(resume_case) + + assert resume_result.success, resume_result.event_mismatch_details + + resume_events = resume_result.events + + chunk_events = [event for event in resume_events if isinstance(event, NodeRunStreamChunkEvent)] + assert len(chunk_events) == pre_chunk_count + post_chunk_count + + pre_chunk_events = chunk_events[:pre_chunk_count] + post_chunk_events = chunk_events[pre_chunk_count:] + + expected_pre_nodes: list[str] = [] + expected_pre_chunks: list[str] = [] + for node_id, chunks in scenario["expected_pre_chunks"]: + expected_pre_nodes.extend([node_id] * len(chunks)) + expected_pre_chunks.extend(chunks) + _assert_stream_chunk_sequence(pre_chunk_events, expected_pre_nodes, expected_pre_chunks) + + expected_post_nodes: list[str] = [] + expected_post_chunks: list[str] = [] + for node_id, chunks in scenario["expected_post_chunks"]: + expected_post_nodes.extend([node_id] * len(chunks)) + expected_post_chunks.extend(chunks) + _assert_stream_chunk_sequence(post_chunk_events, expected_post_nodes, expected_post_chunks) + + human_success_index = next( + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "human" + ) + pre_indices = [ + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunStreamChunkEvent) and index < human_success_index + ] + assert pre_indices == list(range(2, 2 + pre_chunk_count)) + + resume_chunk_indices = [ + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunStreamChunkEvent) and event.node_id == scenario["resume_llm"] + ] + assert resume_chunk_indices, "Expected streaming output from the selected branch" + resume_start_index = next( + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == scenario["resume_llm"] + ) + resume_success_index = next( + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == scenario["resume_llm"] + ) + assert resume_start_index < min(resume_chunk_indices) + assert max(resume_chunk_indices) < resume_success_index + + started_nodes = [event.node_id for event in resume_events if isinstance(event, NodeRunStartedEvent)] + assert started_nodes == ["human", scenario["resume_llm"], scenario["end_node"]] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_single_branch.py b/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_single_branch.py new file mode 100644 index 0000000000..27d264365d --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_single_branch.py @@ -0,0 +1,297 @@ +import time + +from core.model_runtime.entities.llm_entities import LLMMode +from core.model_runtime.entities.message_entities import PromptMessageRole +from core.workflow.entities import GraphInitParams +from core.workflow.graph import Graph +from core.workflow.graph_events import ( + GraphRunPausedEvent, + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunPauseRequestedEvent, + NodeRunStartedEvent, + NodeRunStreamChunkEvent, + NodeRunSucceededEvent, +) +from core.workflow.nodes.base.entities import VariableSelector +from core.workflow.nodes.end.end_node import EndNode +from core.workflow.nodes.end.entities import EndNodeData +from core.workflow.nodes.human_input import HumanInputNode +from core.workflow.nodes.human_input.entities import HumanInputNodeData +from core.workflow.nodes.llm.entities import ( + ContextConfig, + LLMNodeChatModelMessage, + LLMNodeData, + ModelConfig, + VisionConfig, +) +from core.workflow.nodes.start.entities import StartNodeData +from core.workflow.nodes.start.start_node import StartNode +from core.workflow.runtime import GraphRuntimeState, VariablePool +from core.workflow.system_variable import SystemVariable + +from .test_mock_config import MockConfig +from .test_mock_nodes import MockLLMNode +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def _build_llm_human_llm_graph(mock_config: MockConfig) -> tuple[Graph, GraphRuntimeState]: + graph_config: dict[str, object] = {"nodes": [], "edges": []} + graph_init_params = GraphInitParams( + tenant_id="tenant", + app_id="app", + workflow_id="workflow", + graph_config=graph_config, + user_id="user", + user_from="account", + invoke_from="debugger", + call_depth=0, + ) + + variable_pool = VariablePool( + system_variables=SystemVariable(user_id="user", app_id="app", workflow_id="workflow"), + user_inputs={}, + conversation_variables=[], + ) + graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) + + start_config = {"id": "start", "data": StartNodeData(title="Start", variables=[]).model_dump()} + start_node = StartNode( + id=start_config["id"], + config=start_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + start_node.init_node_data(start_config["data"]) + + def _create_llm_node(node_id: str, title: str, prompt_text: str) -> MockLLMNode: + llm_data = LLMNodeData( + title=title, + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + prompt_template=[ + LLMNodeChatModelMessage( + text=prompt_text, + role=PromptMessageRole.USER, + edition_type="basic", + ) + ], + context=ContextConfig(enabled=False, variable_selector=None), + vision=VisionConfig(enabled=False), + reasoning_format="tagged", + ) + llm_config = {"id": node_id, "data": llm_data.model_dump()} + llm_node = MockLLMNode( + id=node_id, + config=llm_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + mock_config=mock_config, + ) + llm_node.init_node_data(llm_config["data"]) + return llm_node + + llm_first = _create_llm_node("llm_initial", "Initial LLM", "Initial prompt") + + human_data = HumanInputNodeData( + title="Human Input", + required_variables=["human.input_ready"], + pause_reason="Awaiting human input", + ) + human_config = {"id": "human", "data": human_data.model_dump()} + human_node = HumanInputNode( + id=human_config["id"], + config=human_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + human_node.init_node_data(human_config["data"]) + + llm_second = _create_llm_node("llm_resume", "Follow-up LLM", "Follow-up prompt") + + end_data = EndNodeData( + title="End", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="resume_text", value_selector=["llm_resume", "text"]), + ], + desc=None, + ) + end_config = {"id": "end", "data": end_data.model_dump()} + end_node = EndNode( + id=end_config["id"], + config=end_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_node.init_node_data(end_config["data"]) + + graph = ( + Graph.new() + .add_root(start_node) + .add_node(llm_first) + .add_node(human_node) + .add_node(llm_second) + .add_node(end_node) + .build() + ) + return graph, graph_runtime_state + + +def _expected_mock_llm_chunks(text: str) -> list[str]: + chunks: list[str] = [] + for index, word in enumerate(text.split(" ")): + chunk = word if index == 0 else f" {word}" + chunks.append(chunk) + chunks.append("") + return chunks + + +def test_human_input_llm_streaming_order_across_pause() -> None: + runner = TableTestRunner() + + initial_text = "Hello, pause" + resume_text = "Welcome back!" + + mock_config = MockConfig() + mock_config.set_node_outputs("llm_initial", {"text": initial_text}) + mock_config.set_node_outputs("llm_resume", {"text": resume_text}) + + expected_initial_sequence: list[type] = [ + GraphRunStartedEvent, # graph run begins + NodeRunStartedEvent, # start node begins + NodeRunSucceededEvent, # start node completes + NodeRunStartedEvent, # llm_initial begins streaming + NodeRunSucceededEvent, # llm_initial completes streaming + NodeRunStartedEvent, # human node begins and requests pause + NodeRunPauseRequestedEvent, # human node pause requested + GraphRunPausedEvent, # graph run pauses awaiting resume + ] + + def graph_factory() -> tuple[Graph, GraphRuntimeState]: + return _build_llm_human_llm_graph(mock_config) + + initial_case = WorkflowTestCase( + description="HumanInput pause preserves LLM streaming order", + graph_factory=graph_factory, + expected_event_sequence=expected_initial_sequence, + ) + + initial_result = runner.run_test_case(initial_case) + + assert initial_result.success, initial_result.event_mismatch_details + + initial_events = initial_result.events + initial_chunks = _expected_mock_llm_chunks(initial_text) + + initial_stream_chunk_events = [event for event in initial_events if isinstance(event, NodeRunStreamChunkEvent)] + assert initial_stream_chunk_events == [] + + pause_index = next(i for i, event in enumerate(initial_events) if isinstance(event, GraphRunPausedEvent)) + llm_succeeded_index = next( + i + for i, event in enumerate(initial_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "llm_initial" + ) + assert llm_succeeded_index < pause_index + + graph_runtime_state = initial_result.graph_runtime_state + graph = initial_result.graph + assert graph_runtime_state is not None + assert graph is not None + + coordinator = graph_runtime_state.response_coordinator + stream_buffers = coordinator._stream_buffers # Tests may access internals for assertions + assert ("llm_initial", "text") in stream_buffers + initial_stream_chunks = [event.chunk for event in stream_buffers[("llm_initial", "text")]] + assert initial_stream_chunks == initial_chunks + assert ("llm_resume", "text") not in stream_buffers + + resume_chunks = _expected_mock_llm_chunks(resume_text) + expected_resume_sequence: list[type] = [ + GraphRunStartedEvent, # resumed graph run begins + NodeRunStartedEvent, # human node restarts + NodeRunStreamChunkEvent, # cached llm_initial chunk 1 + NodeRunStreamChunkEvent, # cached llm_initial chunk 2 + NodeRunStreamChunkEvent, # cached llm_initial final chunk + NodeRunStreamChunkEvent, # end node emits combined template separator + NodeRunSucceededEvent, # human node finishes instantly after input + NodeRunStartedEvent, # llm_resume begins streaming + NodeRunStreamChunkEvent, # llm_resume chunk 1 + NodeRunStreamChunkEvent, # llm_resume chunk 2 + NodeRunStreamChunkEvent, # llm_resume final chunk + NodeRunSucceededEvent, # llm_resume completes streaming + NodeRunStartedEvent, # end node starts + NodeRunSucceededEvent, # end node finishes + GraphRunSucceededEvent, # graph run succeeds after resume + ] + + def resume_graph_factory() -> tuple[Graph, GraphRuntimeState]: + assert graph_runtime_state is not None + assert graph is not None + graph_runtime_state.variable_pool.add(("human", "input_ready"), True) + graph_runtime_state.graph_execution.pause_reason = None + return graph, graph_runtime_state + + resume_case = WorkflowTestCase( + description="HumanInput resume continues LLM streaming order", + graph_factory=resume_graph_factory, + expected_event_sequence=expected_resume_sequence, + ) + + resume_result = runner.run_test_case(resume_case) + + assert resume_result.success, resume_result.event_mismatch_details + + resume_events = resume_result.events + + success_index = next(i for i, event in enumerate(resume_events) if isinstance(event, GraphRunSucceededEvent)) + llm_resume_succeeded_index = next( + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "llm_resume" + ) + assert llm_resume_succeeded_index < success_index + + resume_chunk_events = [event for event in resume_events if isinstance(event, NodeRunStreamChunkEvent)] + assert [event.node_id for event in resume_chunk_events[:3]] == ["llm_initial"] * 3 + assert [event.chunk for event in resume_chunk_events[:3]] == initial_chunks + assert resume_chunk_events[3].node_id == "end" + assert resume_chunk_events[3].chunk == "\n" + assert [event.node_id for event in resume_chunk_events[4:]] == ["llm_resume"] * 3 + assert [event.chunk for event in resume_chunk_events[4:]] == resume_chunks + + human_success_index = next( + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "human" + ) + cached_chunk_indices = [ + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunStreamChunkEvent) and event.node_id in {"llm_initial", "end"} + ] + assert all(index < human_success_index for index in cached_chunk_indices) + + llm_resume_start_index = next( + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == "llm_resume" + ) + llm_resume_success_index = next( + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "llm_resume" + ) + llm_resume_chunk_indices = [ + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunStreamChunkEvent) and event.node_id == "llm_resume" + ] + assert llm_resume_chunk_indices + first_resume_chunk_index = min(llm_resume_chunk_indices) + last_resume_chunk_index = max(llm_resume_chunk_indices) + assert llm_resume_start_index < first_resume_chunk_index + assert last_resume_chunk_index < llm_resume_success_index + + started_nodes = [event.node_id for event in resume_events if isinstance(event, NodeRunStartedEvent)] + assert started_nodes == ["human", "llm_resume", "end"] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_if_else_streaming.py b/api/tests/unit_tests/core/workflow/graph_engine/test_if_else_streaming.py new file mode 100644 index 0000000000..dfd33f135f --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_if_else_streaming.py @@ -0,0 +1,321 @@ +import time + +from core.model_runtime.entities.llm_entities import LLMMode +from core.model_runtime.entities.message_entities import PromptMessageRole +from core.workflow.entities import GraphInitParams +from core.workflow.graph import Graph +from core.workflow.graph_events import ( + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunStartedEvent, + NodeRunStreamChunkEvent, + NodeRunSucceededEvent, +) +from core.workflow.nodes.base.entities import VariableSelector +from core.workflow.nodes.end.end_node import EndNode +from core.workflow.nodes.end.entities import EndNodeData +from core.workflow.nodes.if_else.entities import IfElseNodeData +from core.workflow.nodes.if_else.if_else_node import IfElseNode +from core.workflow.nodes.llm.entities import ( + ContextConfig, + LLMNodeChatModelMessage, + LLMNodeData, + ModelConfig, + VisionConfig, +) +from core.workflow.nodes.start.entities import StartNodeData +from core.workflow.nodes.start.start_node import StartNode +from core.workflow.runtime import GraphRuntimeState, VariablePool +from core.workflow.system_variable import SystemVariable +from core.workflow.utils.condition.entities import Condition + +from .test_mock_config import MockConfig +from .test_mock_nodes import MockLLMNode +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def _build_if_else_graph(branch_value: str, mock_config: MockConfig) -> tuple[Graph, GraphRuntimeState]: + graph_config: dict[str, object] = {"nodes": [], "edges": []} + graph_init_params = GraphInitParams( + tenant_id="tenant", + app_id="app", + workflow_id="workflow", + graph_config=graph_config, + user_id="user", + user_from="account", + invoke_from="debugger", + call_depth=0, + ) + + variable_pool = VariablePool( + system_variables=SystemVariable(user_id="user", app_id="app", workflow_id="workflow"), + user_inputs={}, + conversation_variables=[], + ) + variable_pool.add(("branch", "value"), branch_value) + graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) + + start_config = {"id": "start", "data": StartNodeData(title="Start", variables=[]).model_dump()} + start_node = StartNode( + id=start_config["id"], + config=start_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + start_node.init_node_data(start_config["data"]) + + def _create_llm_node(node_id: str, title: str, prompt_text: str) -> MockLLMNode: + llm_data = LLMNodeData( + title=title, + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + prompt_template=[ + LLMNodeChatModelMessage( + text=prompt_text, + role=PromptMessageRole.USER, + edition_type="basic", + ) + ], + context=ContextConfig(enabled=False, variable_selector=None), + vision=VisionConfig(enabled=False), + reasoning_format="tagged", + ) + llm_config = {"id": node_id, "data": llm_data.model_dump()} + llm_node = MockLLMNode( + id=node_id, + config=llm_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + mock_config=mock_config, + ) + llm_node.init_node_data(llm_config["data"]) + return llm_node + + llm_initial = _create_llm_node("llm_initial", "Initial LLM", "Initial stream") + + if_else_data = IfElseNodeData( + title="IfElse", + cases=[ + IfElseNodeData.Case( + case_id="primary", + logical_operator="and", + conditions=[ + Condition(variable_selector=["branch", "value"], comparison_operator="is", value="primary") + ], + ), + IfElseNodeData.Case( + case_id="secondary", + logical_operator="and", + conditions=[ + Condition(variable_selector=["branch", "value"], comparison_operator="is", value="secondary") + ], + ), + ], + ) + if_else_config = {"id": "if_else", "data": if_else_data.model_dump()} + if_else_node = IfElseNode( + id=if_else_config["id"], + config=if_else_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + if_else_node.init_node_data(if_else_config["data"]) + + llm_primary = _create_llm_node("llm_primary", "Primary LLM", "Primary stream output") + llm_secondary = _create_llm_node("llm_secondary", "Secondary LLM", "Secondary") + + end_primary_data = EndNodeData( + title="End Primary", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="primary_text", value_selector=["llm_primary", "text"]), + ], + desc=None, + ) + end_primary_config = {"id": "end_primary", "data": end_primary_data.model_dump()} + end_primary = EndNode( + id=end_primary_config["id"], + config=end_primary_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_primary.init_node_data(end_primary_config["data"]) + + end_secondary_data = EndNodeData( + title="End Secondary", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="secondary_text", value_selector=["llm_secondary", "text"]), + ], + desc=None, + ) + end_secondary_config = {"id": "end_secondary", "data": end_secondary_data.model_dump()} + end_secondary = EndNode( + id=end_secondary_config["id"], + config=end_secondary_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_secondary.init_node_data(end_secondary_config["data"]) + + graph = ( + Graph.new() + .add_root(start_node) + .add_node(llm_initial) + .add_node(if_else_node) + .add_node(llm_primary, from_node_id="if_else", source_handle="primary") + .add_node(end_primary, from_node_id="llm_primary") + .add_node(llm_secondary, from_node_id="if_else", source_handle="secondary") + .add_node(end_secondary, from_node_id="llm_secondary") + .build() + ) + return graph, graph_runtime_state + + +def _expected_mock_llm_chunks(text: str) -> list[str]: + chunks: list[str] = [] + for index, word in enumerate(text.split(" ")): + chunk = word if index == 0 else f" {word}" + chunks.append(chunk) + chunks.append("") + return chunks + + +def test_if_else_llm_streaming_order() -> None: + mock_config = MockConfig() + mock_config.set_node_outputs("llm_initial", {"text": "Initial stream"}) + mock_config.set_node_outputs("llm_primary", {"text": "Primary stream output"}) + mock_config.set_node_outputs("llm_secondary", {"text": "Secondary"}) + + scenarios = [ + { + "branch": "primary", + "resume_llm": "llm_primary", + "end_node": "end_primary", + "expected_sequence": [ + GraphRunStartedEvent, # graph run begins + NodeRunStartedEvent, # start node begins execution + NodeRunSucceededEvent, # start node completes + NodeRunStartedEvent, # llm_initial starts and streams + NodeRunSucceededEvent, # llm_initial completes streaming + NodeRunStartedEvent, # if_else evaluates conditions + NodeRunStreamChunkEvent, # cached llm_initial chunk 1 flushed + NodeRunStreamChunkEvent, # cached llm_initial chunk 2 flushed + NodeRunStreamChunkEvent, # cached llm_initial final chunk flushed + NodeRunStreamChunkEvent, # template literal newline emitted + NodeRunSucceededEvent, # if_else completes branch selection + NodeRunStartedEvent, # llm_primary begins streaming + NodeRunStreamChunkEvent, # llm_primary chunk 1 + NodeRunStreamChunkEvent, # llm_primary chunk 2 + NodeRunStreamChunkEvent, # llm_primary chunk 3 + NodeRunStreamChunkEvent, # llm_primary final chunk + NodeRunSucceededEvent, # llm_primary completes streaming + NodeRunStartedEvent, # end_primary node starts + NodeRunSucceededEvent, # end_primary finishes aggregation + GraphRunSucceededEvent, # graph run succeeds + ], + "expected_chunks": [ + ("llm_initial", _expected_mock_llm_chunks("Initial stream")), + ("end_primary", ["\n"]), + ("llm_primary", _expected_mock_llm_chunks("Primary stream output")), + ], + }, + { + "branch": "secondary", + "resume_llm": "llm_secondary", + "end_node": "end_secondary", + "expected_sequence": [ + GraphRunStartedEvent, # graph run begins + NodeRunStartedEvent, # start node begins execution + NodeRunSucceededEvent, # start node completes + NodeRunStartedEvent, # llm_initial starts and streams + NodeRunSucceededEvent, # llm_initial completes streaming + NodeRunStartedEvent, # if_else evaluates conditions + NodeRunStreamChunkEvent, # cached llm_initial chunk 1 flushed + NodeRunStreamChunkEvent, # cached llm_initial chunk 2 flushed + NodeRunStreamChunkEvent, # cached llm_initial final chunk flushed + NodeRunStreamChunkEvent, # template literal newline emitted + NodeRunSucceededEvent, # if_else completes branch selection + NodeRunStartedEvent, # llm_secondary begins streaming + NodeRunStreamChunkEvent, # llm_secondary chunk 1 + NodeRunStreamChunkEvent, # llm_secondary final chunk + NodeRunSucceededEvent, # llm_secondary completes + NodeRunStartedEvent, # end_secondary node starts + NodeRunSucceededEvent, # end_secondary finishes aggregation + GraphRunSucceededEvent, # graph run succeeds + ], + "expected_chunks": [ + ("llm_initial", _expected_mock_llm_chunks("Initial stream")), + ("end_secondary", ["\n"]), + ("llm_secondary", _expected_mock_llm_chunks("Secondary")), + ], + }, + ] + + for scenario in scenarios: + runner = TableTestRunner() + + def graph_factory( + branch_value: str = scenario["branch"], + cfg: MockConfig = mock_config, + ) -> tuple[Graph, GraphRuntimeState]: + return _build_if_else_graph(branch_value, cfg) + + test_case = WorkflowTestCase( + description=f"IfElse streaming via {scenario['branch']} branch", + graph_factory=graph_factory, + expected_event_sequence=scenario["expected_sequence"], + ) + + result = runner.run_test_case(test_case) + + assert result.success, result.event_mismatch_details + + chunk_events = [event for event in result.events if isinstance(event, NodeRunStreamChunkEvent)] + expected_nodes: list[str] = [] + expected_chunks: list[str] = [] + for node_id, chunks in scenario["expected_chunks"]: + expected_nodes.extend([node_id] * len(chunks)) + expected_chunks.extend(chunks) + assert [event.node_id for event in chunk_events] == expected_nodes + assert [event.chunk for event in chunk_events] == expected_chunks + + branch_node_index = next( + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == "if_else" + ) + branch_success_index = next( + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "if_else" + ) + pre_branch_chunk_indices = [ + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunStreamChunkEvent) and index < branch_success_index + ] + assert len(pre_branch_chunk_indices) == len(_expected_mock_llm_chunks("Initial stream")) + 1 + assert min(pre_branch_chunk_indices) == branch_node_index + 1 + assert max(pre_branch_chunk_indices) < branch_success_index + + resume_chunk_indices = [ + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunStreamChunkEvent) and event.node_id == scenario["resume_llm"] + ] + assert resume_chunk_indices + resume_start_index = next( + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == scenario["resume_llm"] + ) + resume_success_index = next( + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == scenario["resume_llm"] + ) + assert resume_start_index < min(resume_chunk_indices) + assert max(resume_chunk_indices) < resume_success_index + + started_nodes = [event.node_id for event in result.events if isinstance(event, NodeRunStartedEvent)] + assert started_nodes == ["start", "llm_initial", "if_else", scenario["resume_llm"], scenario["end_node"]] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_iteration_flatten_output.py b/api/tests/unit_tests/core/workflow/graph_engine/test_iteration_flatten_output.py new file mode 100644 index 0000000000..f2095a8a70 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_iteration_flatten_output.py @@ -0,0 +1,96 @@ +""" +Test cases for the Iteration node's flatten_output functionality. + +This module tests the iteration node's ability to: +1. Flatten array outputs when flatten_output=True (default) +2. Preserve nested array structure when flatten_output=False +""" + +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def test_iteration_with_flatten_output_enabled(): + """ + Test iteration node with flatten_output=True (default behavior). + + The fixture implements an iteration that: + 1. Iterates over [1, 2, 3] + 2. For each item, outputs [item, item*2] + 3. With flatten_output=True, should output [1, 2, 2, 4, 3, 6] + """ + runner = TableTestRunner() + + test_case = WorkflowTestCase( + fixture_path="iteration_flatten_output_enabled_workflow", + inputs={}, + expected_outputs={"output": [1, 2, 2, 4, 3, 6]}, + description="Iteration with flatten_output=True flattens nested arrays", + use_auto_mock=False, # Run code nodes directly + ) + + result = runner.run_test_case(test_case) + + assert result.success, f"Test failed: {result.error}" + assert result.actual_outputs is not None, "Should have outputs" + assert result.actual_outputs == {"output": [1, 2, 2, 4, 3, 6]}, ( + f"Expected flattened output [1, 2, 2, 4, 3, 6], got {result.actual_outputs}" + ) + + +def test_iteration_with_flatten_output_disabled(): + """ + Test iteration node with flatten_output=False. + + The fixture implements an iteration that: + 1. Iterates over [1, 2, 3] + 2. For each item, outputs [item, item*2] + 3. With flatten_output=False, should output [[1, 2], [2, 4], [3, 6]] + """ + runner = TableTestRunner() + + test_case = WorkflowTestCase( + fixture_path="iteration_flatten_output_disabled_workflow", + inputs={}, + expected_outputs={"output": [[1, 2], [2, 4], [3, 6]]}, + description="Iteration with flatten_output=False preserves nested structure", + use_auto_mock=False, # Run code nodes directly + ) + + result = runner.run_test_case(test_case) + + assert result.success, f"Test failed: {result.error}" + assert result.actual_outputs is not None, "Should have outputs" + assert result.actual_outputs == {"output": [[1, 2], [2, 4], [3, 6]]}, ( + f"Expected nested output [[1, 2], [2, 4], [3, 6]], got {result.actual_outputs}" + ) + + +def test_iteration_flatten_output_comparison(): + """ + Run both flatten_output configurations in parallel to verify the difference. + """ + runner = TableTestRunner() + + test_cases = [ + WorkflowTestCase( + fixture_path="iteration_flatten_output_enabled_workflow", + inputs={}, + expected_outputs={"output": [1, 2, 2, 4, 3, 6]}, + description="flatten_output=True: Flattened output", + use_auto_mock=False, # Run code nodes directly + ), + WorkflowTestCase( + fixture_path="iteration_flatten_output_disabled_workflow", + inputs={}, + expected_outputs={"output": [[1, 2], [2, 4], [3, 6]]}, + description="flatten_output=False: Nested output", + use_auto_mock=False, # Run code nodes directly + ), + ] + + suite_result = runner.run_table_tests(test_cases, parallel=True) + + # Assert all tests passed + assert suite_result.passed_tests == 2, f"Expected 2 passed tests, got {suite_result.passed_tests}" + assert suite_result.failed_tests == 0, f"Expected 0 failed tests, got {suite_result.failed_tests}" + assert suite_result.success_rate == 100.0, f"Expected 100% success rate, got {suite_result.success_rate}" diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py index 7f802effa6..03de984bd1 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py @@ -27,7 +27,8 @@ from .test_mock_nodes import ( ) if TYPE_CHECKING: - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState from .test_mock_config import MockConfig diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py index 6a9bfbdcc3..48fa00f105 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py @@ -42,7 +42,8 @@ def test_mock_iteration_node_preserves_config(): """Test that MockIterationNode preserves mock configuration.""" from core.app.entities.app_invoke_entities import InvokeFrom - from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool from models.enums import UserFrom from tests.unit_tests.core.workflow.graph_engine.test_mock_nodes import MockIterationNode @@ -56,8 +57,8 @@ def test_mock_iteration_node_preserves_config(): workflow_id="test", graph_config={"nodes": [], "edges": []}, user_id="test", - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) @@ -103,7 +104,8 @@ def test_mock_loop_node_preserves_config(): """Test that MockLoopNode preserves mock configuration.""" from core.app.entities.app_invoke_entities import InvokeFrom - from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool from models.enums import UserFrom from tests.unit_tests.core.workflow.graph_engine.test_mock_nodes import MockLoopNode @@ -117,8 +119,8 @@ def test_mock_loop_node_preserves_config(): workflow_id="test", graph_config={"nodes": [], "edges": []}, user_id="test", - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py index e5ae32bbff..68f57ee9fb 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py @@ -24,7 +24,8 @@ from core.workflow.nodes.template_transform import TemplateTransformNode from core.workflow.nodes.tool import ToolNode if TYPE_CHECKING: - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState from .test_mock_config import MockConfig @@ -561,10 +562,11 @@ class MockIterationNode(MockNodeMixin, IterationNode): def _create_graph_engine(self, index: int, item: Any): """Create a graph engine with MockNodeFactory instead of DifyNodeFactory.""" # Import dependencies - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel + from core.workflow.runtime import GraphRuntimeState # Import our MockNodeFactory instead of DifyNodeFactory from .test_mock_factory import MockNodeFactory @@ -635,10 +637,11 @@ class MockLoopNode(MockNodeMixin, LoopNode): def _create_graph_engine(self, start_at, root_node_id: str): """Create a graph engine with MockNodeFactory instead of DifyNodeFactory.""" # Import dependencies - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel + from core.workflow.runtime import GraphRuntimeState # Import our MockNodeFactory instead of DifyNodeFactory from .test_mock_factory import MockNodeFactory diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py index 394addd5c2..23274f5981 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py @@ -16,8 +16,8 @@ class TestMockTemplateTransformNode: def test_mock_template_transform_node_default_output(self): """Test that MockTemplateTransformNode processes templates with Jinja2.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -76,8 +76,8 @@ class TestMockTemplateTransformNode: def test_mock_template_transform_node_custom_output(self): """Test that MockTemplateTransformNode returns custom configured output.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -137,8 +137,8 @@ class TestMockTemplateTransformNode: def test_mock_template_transform_node_error_simulation(self): """Test that MockTemplateTransformNode can simulate errors.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -196,8 +196,8 @@ class TestMockTemplateTransformNode: def test_mock_template_transform_node_with_variables(self): """Test that MockTemplateTransformNode processes templates with variables.""" from core.variables import StringVariable - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -262,8 +262,8 @@ class TestMockCodeNode: def test_mock_code_node_default_output(self): """Test that MockCodeNode returns default output.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -323,8 +323,8 @@ class TestMockCodeNode: def test_mock_code_node_with_output_schema(self): """Test that MockCodeNode generates outputs based on schema.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -392,8 +392,8 @@ class TestMockCodeNode: def test_mock_code_node_custom_output(self): """Test that MockCodeNode returns custom configured output.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -463,8 +463,8 @@ class TestMockNodeFactory: def test_code_and_template_nodes_mocked_by_default(self): """Test that CODE and TEMPLATE_TRANSFORM nodes are mocked by default (they require SSRF proxy).""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -504,8 +504,8 @@ class TestMockNodeFactory: def test_factory_creates_mock_template_transform_node(self): """Test that MockNodeFactory creates MockTemplateTransformNode for template-transform type.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -555,8 +555,8 @@ class TestMockNodeFactory: def test_factory_creates_mock_code_node(self): """Test that MockNodeFactory creates MockCodeNode for code type.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_parallel_streaming_workflow.py b/api/tests/unit_tests/core/workflow/graph_engine/test_parallel_streaming_workflow.py index d1f1f53b78..b76fe42fce 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_parallel_streaming_workflow.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_parallel_streaming_workflow.py @@ -13,7 +13,7 @@ from unittest.mock import patch from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine @@ -27,6 +27,7 @@ from core.workflow.graph_events import ( from core.workflow.node_events import NodeRunResult, StreamCompletedEvent from core.workflow.nodes.llm.node import LLMNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py b/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py index b286d99f70..f1a495d20a 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py @@ -13,7 +13,7 @@ import redis from core.app.apps.base_app_queue_manager import AppQueueManager from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel -from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType +from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType, PauseCommand from core.workflow.graph_engine.manager import GraphEngineManager @@ -49,9 +49,32 @@ class TestRedisStopIntegration: # Verify the command data command_json = calls[0][0][1] command_data = json.loads(command_json) - assert command_data["command_type"] == CommandType.ABORT.value + assert command_data["command_type"] == CommandType.ABORT assert command_data["reason"] == "Test stop" + def test_graph_engine_manager_sends_pause_command(self): + """Test that GraphEngineManager correctly sends pause command through Redis.""" + task_id = "test-task-pause-123" + expected_channel_key = f"workflow:{task_id}:commands" + + mock_redis = MagicMock() + mock_pipeline = MagicMock() + mock_redis.pipeline.return_value.__enter__ = Mock(return_value=mock_pipeline) + mock_redis.pipeline.return_value.__exit__ = Mock(return_value=None) + + with patch("core.workflow.graph_engine.manager.redis_client", mock_redis): + GraphEngineManager.send_pause_command(task_id, reason="Awaiting resources") + + mock_redis.pipeline.assert_called_once() + calls = mock_pipeline.rpush.call_args_list + assert len(calls) == 1 + assert calls[0][0][0] == expected_channel_key + + command_json = calls[0][0][1] + command_data = json.loads(command_json) + assert command_data["command_type"] == CommandType.PAUSE.value + assert command_data["reason"] == "Awaiting resources" + def test_graph_engine_manager_handles_redis_failure_gracefully(self): """Test that GraphEngineManager handles Redis failures without raising exceptions.""" task_id = "test-task-456" @@ -105,45 +128,64 @@ class TestRedisStopIntegration: channel_key = "workflow:test:commands" channel = RedisChannel(mock_redis, channel_key) - # Create abort command + # Create commands abort_command = AbortCommand(reason="User requested stop") + pause_command = PauseCommand(reason="User requested pause") # Execute channel.send_command(abort_command) + channel.send_command(pause_command) # Verify - mock_redis.pipeline.assert_called_once() + mock_redis.pipeline.assert_called() # Check rpush was called calls = mock_pipeline.rpush.call_args_list - assert len(calls) == 1 + assert len(calls) == 2 assert calls[0][0][0] == channel_key + assert calls[1][0][0] == channel_key - # Verify serialized command - command_json = calls[0][0][1] - command_data = json.loads(command_json) - assert command_data["command_type"] == CommandType.ABORT.value - assert command_data["reason"] == "User requested stop" + # Verify serialized commands + abort_command_json = calls[0][0][1] + abort_command_data = json.loads(abort_command_json) + assert abort_command_data["command_type"] == CommandType.ABORT.value + assert abort_command_data["reason"] == "User requested stop" - # Check expire was set - mock_pipeline.expire.assert_called_once_with(channel_key, 3600) + pause_command_json = calls[1][0][1] + pause_command_data = json.loads(pause_command_json) + assert pause_command_data["command_type"] == CommandType.PAUSE.value + assert pause_command_data["reason"] == "User requested pause" + + # Check expire was set for each + assert mock_pipeline.expire.call_count == 2 + mock_pipeline.expire.assert_any_call(channel_key, 3600) def test_redis_channel_fetch_commands(self): """Test RedisChannel correctly fetches and deserializes commands.""" # Setup mock_redis = MagicMock() - mock_pipeline = MagicMock() - mock_redis.pipeline.return_value.__enter__ = Mock(return_value=mock_pipeline) - mock_redis.pipeline.return_value.__exit__ = Mock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mock command data abort_command_json = json.dumps( {"command_type": CommandType.ABORT.value, "reason": "Test abort", "payload": None} ) + pause_command_json = json.dumps( + {"command_type": CommandType.PAUSE.value, "reason": "Pause requested", "payload": None} + ) # Mock pipeline execute to return commands - mock_pipeline.execute.return_value = [ - [abort_command_json.encode()], # lrange result + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [ + [abort_command_json.encode(), pause_command_json.encode()], # lrange result True, # delete result ] @@ -154,25 +196,38 @@ class TestRedisStopIntegration: commands = channel.fetch_commands() # Verify - assert len(commands) == 1 + assert len(commands) == 2 assert isinstance(commands[0], AbortCommand) assert commands[0].command_type == CommandType.ABORT assert commands[0].reason == "Test abort" + assert isinstance(commands[1], PauseCommand) + assert commands[1].command_type == CommandType.PAUSE + assert commands[1].reason == "Pause requested" # Verify Redis operations - mock_pipeline.lrange.assert_called_once_with(channel_key, 0, -1) - mock_pipeline.delete.assert_called_once_with(channel_key) + pending_pipe.get.assert_called_once_with(f"{channel_key}:pending") + pending_pipe.delete.assert_called_once_with(f"{channel_key}:pending") + fetch_pipe.lrange.assert_called_once_with(channel_key, 0, -1) + fetch_pipe.delete.assert_called_once_with(channel_key) + assert mock_redis.pipeline.call_count == 2 def test_redis_channel_fetch_commands_handles_invalid_json(self): """Test RedisChannel gracefully handles invalid JSON in commands.""" # Setup mock_redis = MagicMock() - mock_pipeline = MagicMock() - mock_redis.pipeline.return_value.__enter__ = Mock(return_value=mock_pipeline) - mock_redis.pipeline.return_value.__exit__ = Mock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mock invalid command data - mock_pipeline.execute.return_value = [ + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [ [b"invalid json", b'{"command_type": "invalid_type"}'], # lrange result True, # delete result ] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_table_runner.py b/api/tests/unit_tests/core/workflow/graph_engine/test_table_runner.py index 0f3a142b1a..08f7b00a33 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_table_runner.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_table_runner.py @@ -29,7 +29,6 @@ from core.variables import ( ObjectVariable, StringVariable, ) -from core.workflow.entities import GraphRuntimeState, VariablePool from core.workflow.entities.graph_init_params import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine @@ -40,6 +39,7 @@ from core.workflow.graph_events import ( GraphRunSucceededEvent, ) from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from .test_mock_config import MockConfig @@ -52,8 +52,8 @@ logger = logging.getLogger(__name__) class WorkflowTestCase: """Represents a single test case for table-driven testing.""" - fixture_path: str - expected_outputs: dict[str, Any] + fixture_path: str = "" + expected_outputs: dict[str, Any] = field(default_factory=dict) inputs: dict[str, Any] = field(default_factory=dict) query: str = "" description: str = "" @@ -61,11 +61,7 @@ class WorkflowTestCase: mock_config: MockConfig | None = None use_auto_mock: bool = False expected_event_sequence: Sequence[type[GraphEngineEvent]] | None = None - tags: list[str] = field(default_factory=list) - skip: bool = False - skip_reason: str = "" - retry_count: int = 0 - custom_validator: Callable[[dict[str, Any]], bool] | None = None + graph_factory: Callable[[], tuple[Graph, GraphRuntimeState]] | None = None @dataclass @@ -80,7 +76,8 @@ class WorkflowTestResult: event_sequence_match: bool | None = None event_mismatch_details: str | None = None events: list[GraphEngineEvent] = field(default_factory=list) - retry_attempts: int = 0 + graph: Graph | None = None + graph_runtime_state: GraphRuntimeState | None = None validation_details: str | None = None @@ -91,7 +88,6 @@ class TestSuiteResult: total_tests: int passed_tests: int failed_tests: int - skipped_tests: int total_execution_time: float results: list[WorkflowTestResult] @@ -106,10 +102,6 @@ class TestSuiteResult: """Get all failed test results.""" return [r for r in self.results if not r.success] - def get_results_by_tag(self, tag: str) -> list[WorkflowTestResult]: - """Get test results filtered by tag.""" - return [r for r in self.results if tag in r.test_case.tags] - class WorkflowRunner: """Core workflow execution engine for tests.""" @@ -286,90 +278,30 @@ class TableTestRunner: Returns: WorkflowTestResult with execution details """ - if test_case.skip: - self.logger.info("Skipping test: %s - %s", test_case.description, test_case.skip_reason) - return WorkflowTestResult( - test_case=test_case, - success=True, - execution_time=0.0, - validation_details=f"Skipped: {test_case.skip_reason}", - ) - - retry_attempts = 0 - last_result = None - last_error = None start_time = time.perf_counter() - for attempt in range(test_case.retry_count + 1): - start_time = time.perf_counter() - - try: - result = self._execute_test_case(test_case) - last_result = result # Save the last result - - if result.success: - result.retry_attempts = retry_attempts - self.logger.info("Test passed: %s", test_case.description) - return result - - last_error = result.error - retry_attempts += 1 - - if attempt < test_case.retry_count: - self.logger.warning( - "Test failed (attempt %d/%d): %s", - attempt + 1, - test_case.retry_count + 1, - test_case.description, - ) - time.sleep(0.5 * (attempt + 1)) # Exponential backoff - - except Exception as e: - last_error = e - retry_attempts += 1 - - if attempt < test_case.retry_count: - self.logger.warning( - "Test error (attempt %d/%d): %s - %s", - attempt + 1, - test_case.retry_count + 1, - test_case.description, - str(e), - ) - time.sleep(0.5 * (attempt + 1)) - - # All retries failed - return the last result if available - if last_result: - last_result.retry_attempts = retry_attempts - self.logger.error("Test failed after %d attempts: %s", retry_attempts, test_case.description) - return last_result - - # If no result available (all attempts threw exceptions), create a failure result - self.logger.error("Test failed after %d attempts: %s", retry_attempts, test_case.description) - return WorkflowTestResult( - test_case=test_case, - success=False, - error=last_error, - execution_time=time.perf_counter() - start_time, - retry_attempts=retry_attempts, - ) + try: + result = self._execute_test_case(test_case) + if result.success: + self.logger.info("Test passed: %s", test_case.description) + else: + self.logger.error("Test failed: %s", test_case.description) + return result + except Exception as exc: + self.logger.exception("Error executing test case: %s", test_case.description) + return WorkflowTestResult( + test_case=test_case, + success=False, + error=exc, + execution_time=time.perf_counter() - start_time, + ) def _execute_test_case(self, test_case: WorkflowTestCase) -> WorkflowTestResult: """Internal method to execute a single test case.""" start_time = time.perf_counter() try: - # Load fixture data - fixture_data = self.workflow_runner.load_fixture(test_case.fixture_path) - - # Create graph from fixture - graph, graph_runtime_state = self.workflow_runner.create_graph_from_fixture( - fixture_data=fixture_data, - inputs=test_case.inputs, - query=test_case.query, - use_mock_factory=test_case.use_auto_mock, - mock_config=test_case.mock_config, - ) + graph, graph_runtime_state = self._create_graph_runtime_state(test_case) # Create and run the engine with configured worker settings engine = GraphEngine( @@ -384,7 +316,7 @@ class TableTestRunner: ) # Execute and collect events - events = [] + events: list[GraphEngineEvent] = [] for event in engine.run(): events.append(event) @@ -416,6 +348,8 @@ class TableTestRunner: events=events, event_sequence_match=event_sequence_match, event_mismatch_details=event_mismatch_details, + graph=graph, + graph_runtime_state=graph_runtime_state, ) # Get actual outputs @@ -423,9 +357,7 @@ class TableTestRunner: actual_outputs = success_event.outputs or {} # Validate outputs - output_success, validation_details = self._validate_outputs( - test_case.expected_outputs, actual_outputs, test_case.custom_validator - ) + output_success, validation_details = self._validate_outputs(test_case.expected_outputs, actual_outputs) # Overall success requires both output and event sequence validation success = output_success and (event_sequence_match if event_sequence_match is not None else True) @@ -440,6 +372,8 @@ class TableTestRunner: events=events, validation_details=validation_details, error=None if success else Exception(validation_details or event_mismatch_details or "Test failed"), + graph=graph, + graph_runtime_state=graph_runtime_state, ) except Exception as e: @@ -449,13 +383,33 @@ class TableTestRunner: success=False, error=e, execution_time=time.perf_counter() - start_time, + graph=graph if "graph" in locals() else None, + graph_runtime_state=graph_runtime_state if "graph_runtime_state" in locals() else None, ) + def _create_graph_runtime_state(self, test_case: WorkflowTestCase) -> tuple[Graph, GraphRuntimeState]: + """Create or retrieve graph/runtime state according to test configuration.""" + + if test_case.graph_factory is not None: + return test_case.graph_factory() + + if not test_case.fixture_path: + raise ValueError("fixture_path must be provided when graph_factory is not specified") + + fixture_data = self.workflow_runner.load_fixture(test_case.fixture_path) + + return self.workflow_runner.create_graph_from_fixture( + fixture_data=fixture_data, + inputs=test_case.inputs, + query=test_case.query, + use_mock_factory=test_case.use_auto_mock, + mock_config=test_case.mock_config, + ) + def _validate_outputs( self, expected_outputs: dict[str, Any], actual_outputs: dict[str, Any], - custom_validator: Callable[[dict[str, Any]], bool] | None = None, ) -> tuple[bool, str | None]: """ Validate actual outputs against expected outputs. @@ -490,14 +444,6 @@ class TableTestRunner: f"Value mismatch for key '{key}':\n Expected: {expected_value}\n Actual: {actual_value}" ) - # Apply custom validator if provided - if custom_validator: - try: - if not custom_validator(actual_outputs): - validation_errors.append("Custom validator failed") - except Exception as e: - validation_errors.append(f"Custom validator error: {str(e)}") - if validation_errors: return False, "\n".join(validation_errors) @@ -537,7 +483,6 @@ class TableTestRunner: self, test_cases: list[WorkflowTestCase], parallel: bool = False, - tags_filter: list[str] | None = None, fail_fast: bool = False, ) -> TestSuiteResult: """ @@ -546,22 +491,16 @@ class TableTestRunner: Args: test_cases: List of test cases to execute parallel: Run tests in parallel - tags_filter: Only run tests with specified tags - fail_fast: Stop execution on first failure + fail_fast: Stop execution on first failure Returns: TestSuiteResult with aggregated results """ - # Filter by tags if specified - if tags_filter: - test_cases = [tc for tc in test_cases if any(tag in tc.tags for tag in tags_filter)] - if not test_cases: return TestSuiteResult( total_tests=0, passed_tests=0, failed_tests=0, - skipped_tests=0, total_execution_time=0.0, results=[], ) @@ -576,16 +515,14 @@ class TableTestRunner: # Calculate statistics total_tests = len(results) - passed_tests = sum(1 for r in results if r.success and not r.test_case.skip) - failed_tests = sum(1 for r in results if not r.success and not r.test_case.skip) - skipped_tests = sum(1 for r in results if r.test_case.skip) + passed_tests = sum(1 for r in results if r.success) + failed_tests = total_tests - passed_tests total_execution_time = time.perf_counter() - start_time return TestSuiteResult( total_tests=total_tests, passed_tests=passed_tests, failed_tests=failed_tests, - skipped_tests=skipped_tests, total_execution_time=total_execution_time, results=results, ) @@ -598,7 +535,7 @@ class TableTestRunner: result = self.run_test_case(test_case) results.append(result) - if fail_fast and not result.success and not result.test_case.skip: + if fail_fast and not result.success: self.logger.info("Fail-fast enabled: stopping execution") break @@ -618,11 +555,11 @@ class TableTestRunner: result = future.result() results.append(result) - if fail_fast and not result.success and not result.test_case.skip: + if fail_fast and not result.success: self.logger.info("Fail-fast enabled: cancelling remaining tests") - # Cancel remaining futures - for f in future_to_test: - f.cancel() + for remaining_future in future_to_test: + if not remaining_future.done(): + remaining_future.cancel() break except Exception as e: @@ -636,8 +573,9 @@ class TableTestRunner: ) if fail_fast: - for f in future_to_test: - f.cancel() + for remaining_future in future_to_test: + if not remaining_future.done(): + remaining_future.cancel() break return results @@ -663,7 +601,6 @@ class TableTestRunner: report.append(f" Total Tests: {suite_result.total_tests}") report.append(f" Passed: {suite_result.passed_tests}") report.append(f" Failed: {suite_result.failed_tests}") - report.append(f" Skipped: {suite_result.skipped_tests}") report.append(f" Success Rate: {suite_result.success_rate:.1f}%") report.append(f" Total Time: {suite_result.total_execution_time:.2f}s") report.append("") diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py b/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py new file mode 100644 index 0000000000..a7309f64de --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py @@ -0,0 +1,41 @@ +"""Validate conversation variable updates inside an iteration workflow. + +This test uses the ``update-conversation-variable-in-iteration`` fixture, which +routes ``sys.query`` into the conversation variable ``answer`` from within an +iteration container. The workflow should surface that updated conversation +variable in the final answer output. + +Code nodes in the fixture are mocked because their concrete outputs are not +relevant to verifying variable propagation semantics. +""" + +from .test_mock_config import MockConfigBuilder +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def test_update_conversation_variable_in_iteration(): + fixture_name = "update-conversation-variable-in-iteration" + user_query = "ensure conversation variable syncs" + + mock_config = ( + MockConfigBuilder() + .with_node_output("1759032363865", {"result": [1]}) + .with_node_output("1759032476318", {"result": ""}) + .build() + ) + + case = WorkflowTestCase( + fixture_path=fixture_name, + use_auto_mock=True, + mock_config=mock_config, + query=user_query, + expected_outputs={"answer": user_query}, + description="Conversation variable updated within iteration should flow to answer output.", + ) + + runner = TableTestRunner() + result = runner.run_test_case(case) + + assert result.success, f"Workflow execution failed: {result.error}" + assert result.actual_outputs is not None + assert result.actual_outputs.get("answer") == user_query diff --git a/api/tests/unit_tests/core/workflow/nodes/answer/test_answer.py b/api/tests/unit_tests/core/workflow/nodes/answer/test_answer.py index 79f3f45ce2..d151bbe015 100644 --- a/api/tests/unit_tests/core/workflow/nodes/answer/test_answer.py +++ b/api/tests/unit_tests/core/workflow/nodes/answer/test_answer.py @@ -3,11 +3,12 @@ import uuid from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.answer.answer_node import AnswerNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from extensions.ext_database import db from models.enums import UserFrom diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py index b34f73be5f..f040a92b6f 100644 --- a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py +++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py @@ -1,4 +1,3 @@ -from core.workflow.entities import VariablePool from core.workflow.nodes.http_request import ( BodyData, HttpRequestNodeAuthorization, @@ -7,6 +6,7 @@ from core.workflow.nodes.http_request import ( ) from core.workflow.nodes.http_request.entities import HttpRequestNodeTimeout from core.workflow.nodes.http_request.executor import Executor +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py index 61ce640edd..3ffb5c0fdf 100644 --- a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py @@ -20,8 +20,7 @@ from core.model_runtime.entities.message_entities import ( from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.variables import ArrayAnySegment, ArrayFileSegment, NoneSegment -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool -from core.workflow.graph import Graph +from core.workflow.entities import GraphInitParams from core.workflow.nodes.llm import llm_utils from core.workflow.nodes.llm.entities import ( ContextConfig, @@ -33,6 +32,7 @@ from core.workflow.nodes.llm.entities import ( ) from core.workflow.nodes.llm.file_saver import LLMFileSaver from core.workflow.nodes.llm.node import LLMNode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from models.provider import ProviderType @@ -83,14 +83,6 @@ def graph_init_params() -> GraphInitParams: ) -@pytest.fixture -def graph() -> Graph: - # TODO: This fixture uses old Graph constructor parameters that are incompatible - # with the new queue-based engine. Need to rewrite for new engine architecture. - pytest.skip("Graph fixture incompatible with new queue-based engine - needs rewrite for ResponseStreamCoordinator") - return Graph() - - @pytest.fixture def graph_runtime_state() -> GraphRuntimeState: variable_pool = VariablePool( @@ -105,7 +97,7 @@ def graph_runtime_state() -> GraphRuntimeState: @pytest.fixture def llm_node( - llm_node_data: LLMNodeData, graph_init_params: GraphInitParams, graph: Graph, graph_runtime_state: GraphRuntimeState + llm_node_data: LLMNodeData, graph_init_params: GraphInitParams, graph_runtime_state: GraphRuntimeState ) -> LLMNode: mock_file_saver = mock.MagicMock(spec=LLMFileSaver) node_config = { @@ -493,9 +485,7 @@ def test_handle_list_messages_basic(llm_node): @pytest.fixture -def llm_node_for_multimodal( - llm_node_data, graph_init_params, graph, graph_runtime_state -) -> tuple[LLMNode, LLMFileSaver]: +def llm_node_for_multimodal(llm_node_data, graph_init_params, graph_runtime_state) -> tuple[LLMNode, LLMFileSaver]: mock_file_saver: LLMFileSaver = mock.MagicMock(spec=LLMFileSaver) node_config = { "id": "1", @@ -655,7 +645,7 @@ class TestSaveMultimodalOutputAndConvertResultToMarkdown: gen = llm_node._save_multimodal_output_and_convert_result_to_markdown( contents=frozenset(["hello world"]), file_saver=mock_file_saver, file_outputs=[] ) - assert list(gen) == ["frozenset({'hello world'})"] + assert list(gen) == ["hello world"] mock_file_saver.save_binary_string.assert_not_called() mock_file_saver.save_remote_url.assert_not_called() diff --git a/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_parameter_extractor_node.py b/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_parameter_extractor_node.py index b9947d4693..b359284d00 100644 --- a/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_parameter_extractor_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_parameter_extractor_node.py @@ -212,7 +212,7 @@ class TestValidateResult: parameters=[ ParameterConfig( name="status", - type="select", # type: ignore + type="select", description="Status", required=True, options=["active", "inactive"], @@ -400,7 +400,7 @@ class TestTransformResult: parameters=[ ParameterConfig( name="status", - type="select", # type: ignore + type="select", description="Status", required=True, options=["active", "inactive"], @@ -414,7 +414,7 @@ class TestTransformResult: parameters=[ ParameterConfig( name="status", - type="select", # type: ignore + type="select", description="Status", required=True, options=["active", "inactive"], diff --git a/api/tests/unit_tests/core/workflow/nodes/test_if_else.py b/api/tests/unit_tests/core/workflow/nodes/test_if_else.py index 69e0052543..962e43a897 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_if_else.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_if_else.py @@ -7,12 +7,13 @@ import pytest from core.app.entities.app_invoke_entities import InvokeFrom from core.file import File, FileTransferMethod, FileType from core.variables import ArrayFileSegment -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.if_else.entities import IfElseNodeData from core.workflow.nodes.if_else.if_else_node import IfElseNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.utils.condition.entities import Condition, SubCondition, SubVariableCondition from extensions.ext_database import db diff --git a/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py b/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py index b942614232..55fe62ca43 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py @@ -35,7 +35,7 @@ def list_operator_node(): "extract_by": ExtractConfig(enabled=False, serial="1"), "title": "Test Title", } - node_data = ListOperatorNodeData(**config) + node_data = ListOperatorNodeData.model_validate(config) node_config = { "id": "test_node_id", "data": node_data.model_dump(), diff --git a/api/tests/unit_tests/core/workflow/nodes/test_question_classifier_node.py b/api/tests/unit_tests/core/workflow/nodes/test_question_classifier_node.py index f990280c5f..47ef289ef3 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_question_classifier_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_question_classifier_node.py @@ -17,7 +17,7 @@ def test_init_question_classifier_node_data(): "vision": {"enabled": True, "configs": {"variable_selector": ["image"], "detail": "low"}}, } - node_data = QuestionClassifierNodeData(**data) + node_data = QuestionClassifierNodeData.model_validate(data) assert node_data.query_variable_selector == ["id", "name"] assert node_data.model.provider == "openai" @@ -49,7 +49,7 @@ def test_init_question_classifier_node_data_without_vision_config(): }, } - node_data = QuestionClassifierNodeData(**data) + node_data = QuestionClassifierNodeData.model_validate(data) assert node_data.query_variable_selector == ["id", "name"] assert node_data.model.provider == "openai" diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py index 3e50d5522a..6af4777e0e 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py @@ -6,11 +6,12 @@ from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom from core.variables import ArrayStringVariable, StringVariable from core.workflow.conversation_variable_updater import ConversationVariableUpdater -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.variable_assigner.v1 import VariableAssignerNode from core.workflow.nodes.variable_assigner.v1.node_data import WriteMode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom @@ -87,7 +88,7 @@ def test_overwrite_string_variable(): "data": { "title": "test", "assigned_variable_selector": ["conversation", conversation_variable.name], - "write_mode": WriteMode.OVER_WRITE.value, + "write_mode": WriteMode.OVER_WRITE, "input_variable_selector": [DEFAULT_NODE_ID, input_variable.name], }, } @@ -189,7 +190,7 @@ def test_append_variable_to_array(): "data": { "title": "test", "assigned_variable_selector": ["conversation", conversation_variable.name], - "write_mode": WriteMode.APPEND.value, + "write_mode": WriteMode.APPEND, "input_variable_selector": [DEFAULT_NODE_ID, input_variable.name], }, } @@ -282,7 +283,7 @@ def test_clear_array(): "data": { "title": "test", "assigned_variable_selector": ["conversation", conversation_variable.name], - "write_mode": WriteMode.CLEAR.value, + "write_mode": WriteMode.CLEAR, "input_variable_selector": [], }, } diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py index b842dfdb58..80071c8616 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py @@ -4,11 +4,12 @@ from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom from core.variables import ArrayStringVariable -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.variable_assigner.v2 import VariableAssignerNode from core.workflow.nodes.variable_assigner.v2.enums import InputType, Operation +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom diff --git a/api/tests/unit_tests/core/workflow/test_system_variable.py b/api/tests/unit_tests/core/workflow/test_system_variable.py index 11d788ed79..f76e81ae55 100644 --- a/api/tests/unit_tests/core/workflow/test_system_variable.py +++ b/api/tests/unit_tests/core/workflow/test_system_variable.py @@ -46,7 +46,7 @@ class TestSystemVariableSerialization: def test_basic_deserialization(self): """Test successful deserialization from JSON structure with all fields correctly mapped.""" # Test with complete data - system_var = SystemVariable(**COMPLETE_VALID_DATA) + system_var = SystemVariable.model_validate(COMPLETE_VALID_DATA) # Verify all fields are correctly mapped assert system_var.user_id == COMPLETE_VALID_DATA["user_id"] @@ -59,7 +59,7 @@ class TestSystemVariableSerialization: assert system_var.files == [] # Test with minimal data (only required fields) - minimal_var = SystemVariable(**VALID_BASE_DATA) + minimal_var = SystemVariable.model_validate(VALID_BASE_DATA) assert minimal_var.user_id == VALID_BASE_DATA["user_id"] assert minimal_var.app_id == VALID_BASE_DATA["app_id"] assert minimal_var.workflow_id == VALID_BASE_DATA["workflow_id"] @@ -75,12 +75,12 @@ class TestSystemVariableSerialization: # Test workflow_run_id only (preferred alias) data_run_id = {**VALID_BASE_DATA, "workflow_run_id": workflow_id} - system_var1 = SystemVariable(**data_run_id) + system_var1 = SystemVariable.model_validate(data_run_id) assert system_var1.workflow_execution_id == workflow_id # Test workflow_execution_id only (direct field name) data_execution_id = {**VALID_BASE_DATA, "workflow_execution_id": workflow_id} - system_var2 = SystemVariable(**data_execution_id) + system_var2 = SystemVariable.model_validate(data_execution_id) assert system_var2.workflow_execution_id == workflow_id # Test both present - workflow_run_id should take precedence @@ -89,17 +89,17 @@ class TestSystemVariableSerialization: "workflow_execution_id": "should-be-ignored", "workflow_run_id": workflow_id, } - system_var3 = SystemVariable(**data_both) + system_var3 = SystemVariable.model_validate(data_both) assert system_var3.workflow_execution_id == workflow_id # Test neither present - should be None - system_var4 = SystemVariable(**VALID_BASE_DATA) + system_var4 = SystemVariable.model_validate(VALID_BASE_DATA) assert system_var4.workflow_execution_id is None def test_serialization_round_trip(self): """Test that serialize → deserialize produces the same result with alias handling.""" # Create original SystemVariable - original = SystemVariable(**COMPLETE_VALID_DATA) + original = SystemVariable.model_validate(COMPLETE_VALID_DATA) # Serialize to dict serialized = original.model_dump(mode="json") @@ -110,7 +110,7 @@ class TestSystemVariableSerialization: assert serialized["workflow_run_id"] == COMPLETE_VALID_DATA["workflow_run_id"] # Deserialize back - deserialized = SystemVariable(**serialized) + deserialized = SystemVariable.model_validate(serialized) # Verify all fields match after round-trip assert deserialized.user_id == original.user_id @@ -125,7 +125,7 @@ class TestSystemVariableSerialization: def test_json_round_trip(self): """Test JSON serialization/deserialization consistency with proper structure.""" # Create original SystemVariable - original = SystemVariable(**COMPLETE_VALID_DATA) + original = SystemVariable.model_validate(COMPLETE_VALID_DATA) # Serialize to JSON string json_str = original.model_dump_json() @@ -137,7 +137,7 @@ class TestSystemVariableSerialization: assert json_data["workflow_run_id"] == COMPLETE_VALID_DATA["workflow_run_id"] # Deserialize from JSON data - deserialized = SystemVariable(**json_data) + deserialized = SystemVariable.model_validate(json_data) # Verify key fields match after JSON round-trip assert deserialized.workflow_execution_id == original.workflow_execution_id @@ -149,13 +149,13 @@ class TestSystemVariableSerialization: """Test deserialization with File objects in the files field - SystemVariable specific logic.""" # Test with empty files list data_empty = {**VALID_BASE_DATA, "files": []} - system_var_empty = SystemVariable(**data_empty) + system_var_empty = SystemVariable.model_validate(data_empty) assert system_var_empty.files == [] # Test with single File object test_file = create_test_file() data_single = {**VALID_BASE_DATA, "files": [test_file]} - system_var_single = SystemVariable(**data_single) + system_var_single = SystemVariable.model_validate(data_single) assert len(system_var_single.files) == 1 assert system_var_single.files[0].filename == "test.txt" assert system_var_single.files[0].tenant_id == "test-tenant-id" @@ -179,14 +179,14 @@ class TestSystemVariableSerialization: ) data_multiple = {**VALID_BASE_DATA, "files": [file1, file2]} - system_var_multiple = SystemVariable(**data_multiple) + system_var_multiple = SystemVariable.model_validate(data_multiple) assert len(system_var_multiple.files) == 2 assert system_var_multiple.files[0].filename == "doc1.txt" assert system_var_multiple.files[1].filename == "image.jpg" # Verify files field serialization/deserialization serialized = system_var_multiple.model_dump(mode="json") - deserialized = SystemVariable(**serialized) + deserialized = SystemVariable.model_validate(serialized) assert len(deserialized.files) == 2 assert deserialized.files[0].filename == "doc1.txt" assert deserialized.files[1].filename == "image.jpg" @@ -197,7 +197,7 @@ class TestSystemVariableSerialization: # Create with workflow_run_id (alias) data_with_alias = {**VALID_BASE_DATA, "workflow_run_id": workflow_id} - system_var = SystemVariable(**data_with_alias) + system_var = SystemVariable.model_validate(data_with_alias) # Serialize and verify alias is used serialized = system_var.model_dump() @@ -205,7 +205,7 @@ class TestSystemVariableSerialization: assert "workflow_execution_id" not in serialized # Deserialize and verify field mapping - deserialized = SystemVariable(**serialized) + deserialized = SystemVariable.model_validate(serialized) assert deserialized.workflow_execution_id == workflow_id # Test JSON serialization path @@ -213,7 +213,7 @@ class TestSystemVariableSerialization: assert json_serialized["workflow_run_id"] == workflow_id assert "workflow_execution_id" not in json_serialized - json_deserialized = SystemVariable(**json_serialized) + json_deserialized = SystemVariable.model_validate(json_serialized) assert json_deserialized.workflow_execution_id == workflow_id def test_model_validator_serialization_logic(self): @@ -222,7 +222,7 @@ class TestSystemVariableSerialization: # Test direct instantiation with workflow_execution_id (should work) data1 = {**VALID_BASE_DATA, "workflow_execution_id": workflow_id} - system_var1 = SystemVariable(**data1) + system_var1 = SystemVariable.model_validate(data1) assert system_var1.workflow_execution_id == workflow_id # Test serialization of the above (should use alias) @@ -236,7 +236,7 @@ class TestSystemVariableSerialization: "workflow_execution_id": "should-be-removed", "workflow_run_id": workflow_id, } - system_var2 = SystemVariable(**data2) + system_var2 = SystemVariable.model_validate(data2) assert system_var2.workflow_execution_id == workflow_id # Verify serialization consistency @@ -248,4 +248,4 @@ def test_constructor_with_extra_key(): # Test that SystemVariable should forbid extra keys with pytest.raises(ValidationError): # This should fail because there is an unexpected key. - SystemVariable(invalid_key=1) # type: ignore + SystemVariable(invalid_key=1) diff --git a/api/tests/unit_tests/core/workflow/test_variable_pool.py b/api/tests/unit_tests/core/workflow/test_variable_pool.py index 66d9d3fc14..9733bf60eb 100644 --- a/api/tests/unit_tests/core/workflow/test_variable_pool.py +++ b/api/tests/unit_tests/core/workflow/test_variable_pool.py @@ -27,7 +27,7 @@ from core.variables.variables import ( VariableUnion, ) from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID -from core.workflow.entities import VariablePool +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable from factories.variable_factory import build_segment, segment_to_variable diff --git a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py deleted file mode 100644 index 9f8f52015b..0000000000 --- a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py +++ /dev/null @@ -1,476 +0,0 @@ -import json -from unittest.mock import MagicMock - -import pytest -from sqlalchemy.orm import Session - -from core.app.app_config.entities import AppAdditionalFeatures, WorkflowUIBasedAppConfig -from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom -from core.app.entities.queue_entities import ( - QueueNodeFailedEvent, - QueueNodeStartedEvent, - QueueNodeSucceededEvent, -) -from core.workflow.entities import ( - WorkflowExecution, - WorkflowNodeExecution, -) -from core.workflow.enums import ( - WorkflowExecutionStatus, - WorkflowNodeExecutionMetadataKey, - WorkflowNodeExecutionStatus, - WorkflowType, -) -from core.workflow.nodes import NodeType -from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository -from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository -from core.workflow.system_variable import SystemVariable -from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager -from libs.datetime_utils import naive_utc_now -from models.enums import CreatorUserRole -from models.model import AppMode -from models.workflow import Workflow, WorkflowRun - - -@pytest.fixture -def real_app_generate_entity(): - additional_features = AppAdditionalFeatures( - file_upload=None, - opening_statement=None, - suggested_questions=[], - suggested_questions_after_answer=False, - show_retrieve_source=False, - more_like_this=False, - speech_to_text=False, - text_to_speech=None, - trace_config=None, - ) - - app_config = WorkflowUIBasedAppConfig( - tenant_id="test-tenant-id", - app_id="test-app-id", - app_mode=AppMode.WORKFLOW, - additional_features=additional_features, - workflow_id="test-workflow-id", - ) - - entity = AdvancedChatAppGenerateEntity( - task_id="test-task-id", - app_config=app_config, - inputs={"query": "test query"}, - files=[], - user_id="test-user-id", - stream=False, - invoke_from=InvokeFrom.WEB_APP, - query="test query", - conversation_id="test-conversation-id", - ) - - return entity - - -@pytest.fixture -def real_workflow_system_variables(): - return SystemVariable( - query="test query", - conversation_id="test-conversation-id", - user_id="test-user-id", - app_id="test-app-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-workflow-run-id", - ) - - -@pytest.fixture -def mock_node_execution_repository(): - repo = MagicMock(spec=WorkflowNodeExecutionRepository) - return repo - - -@pytest.fixture -def mock_workflow_execution_repository(): - repo = MagicMock(spec=WorkflowExecutionRepository) - return repo - - -@pytest.fixture -def real_workflow_entity(): - return CycleManagerWorkflowInfo( - workflow_id="test-workflow-id", # Matches ID used in other fixtures - workflow_type=WorkflowType.WORKFLOW, - version="1.0.0", - graph_data={ - "nodes": [ - { - "id": "node1", - "type": "chat", # NodeType is a string enum - "name": "Chat Node", - "data": {"model": "gpt-3.5-turbo", "prompt": "test prompt"}, - } - ], - "edges": [], - }, - ) - - -@pytest.fixture -def workflow_cycle_manager( - real_app_generate_entity, - real_workflow_system_variables, - mock_workflow_execution_repository, - mock_node_execution_repository, - real_workflow_entity, -): - return WorkflowCycleManager( - application_generate_entity=real_app_generate_entity, - workflow_system_variables=real_workflow_system_variables, - workflow_info=real_workflow_entity, - workflow_execution_repository=mock_workflow_execution_repository, - workflow_node_execution_repository=mock_node_execution_repository, - ) - - -@pytest.fixture -def mock_session(): - session = MagicMock(spec=Session) - return session - - -@pytest.fixture -def real_workflow(): - workflow = Workflow() - workflow.id = "test-workflow-id" - workflow.tenant_id = "test-tenant-id" - workflow.app_id = "test-app-id" - workflow.type = "chat" - workflow.version = "1.0" - - graph_data = {"nodes": [], "edges": []} - workflow.graph = json.dumps(graph_data) - workflow.features = json.dumps({"file_upload": {"enabled": False}}) - workflow.created_by = "test-user-id" - workflow.created_at = naive_utc_now() - workflow.updated_at = naive_utc_now() - workflow._environment_variables = "{}" - workflow._conversation_variables = "{}" - - return workflow - - -@pytest.fixture -def real_workflow_run(): - workflow_run = WorkflowRun() - workflow_run.id = "test-workflow-run-id" - workflow_run.tenant_id = "test-tenant-id" - workflow_run.app_id = "test-app-id" - workflow_run.workflow_id = "test-workflow-id" - workflow_run.type = "chat" - workflow_run.triggered_from = "app-run" - workflow_run.version = "1.0" - workflow_run.graph = json.dumps({"nodes": [], "edges": []}) - workflow_run.inputs = json.dumps({"query": "test query"}) - workflow_run.status = WorkflowExecutionStatus.RUNNING - workflow_run.outputs = json.dumps({"answer": "test answer"}) - workflow_run.created_by_role = CreatorUserRole.ACCOUNT - workflow_run.created_by = "test-user-id" - workflow_run.created_at = naive_utc_now() - - return workflow_run - - -def test_init( - workflow_cycle_manager, - real_app_generate_entity, - real_workflow_system_variables, - mock_workflow_execution_repository, - mock_node_execution_repository, -): - """Test initialization of WorkflowCycleManager""" - assert workflow_cycle_manager._application_generate_entity == real_app_generate_entity - assert workflow_cycle_manager._workflow_system_variables == real_workflow_system_variables - assert workflow_cycle_manager._workflow_execution_repository == mock_workflow_execution_repository - assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository - - -def test_handle_workflow_run_start(workflow_cycle_manager): - """Test handle_workflow_run_start method""" - # Call the method - workflow_execution = workflow_cycle_manager.handle_workflow_run_start() - - # Verify the result - assert workflow_execution.workflow_id == "test-workflow-id" - - # Verify the workflow_execution_repository.save was called - workflow_cycle_manager._workflow_execution_repository.save.assert_called_once_with(workflow_execution) - - -def test_handle_workflow_run_success(workflow_cycle_manager, mock_workflow_execution_repository): - """Test handle_workflow_run_success method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-run-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache[workflow_execution.id_] = workflow_execution - - # Call the method - result = workflow_cycle_manager.handle_workflow_run_success( - workflow_run_id="test-workflow-run-id", - total_tokens=100, - total_steps=5, - outputs={"answer": "test answer"}, - ) - - # Verify the result - assert result == workflow_execution - assert result.status == WorkflowExecutionStatus.SUCCEEDED - assert result.outputs == {"answer": "test answer"} - assert result.total_tokens == 100 - assert result.total_steps == 5 - assert result.finished_at is not None - - -def test_handle_workflow_run_failed(workflow_cycle_manager, mock_workflow_execution_repository): - """Test handle_workflow_run_failed method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-run-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache[workflow_execution.id_] = workflow_execution - - # No running node executions in cache (empty cache) - - # Call the method - result = workflow_cycle_manager.handle_workflow_run_failed( - workflow_run_id="test-workflow-run-id", - total_tokens=50, - total_steps=3, - status=WorkflowExecutionStatus.FAILED, - error_message="Test error message", - ) - - # Verify the result - assert result == workflow_execution - assert result.status == WorkflowExecutionStatus.FAILED - assert result.error_message == "Test error message" - assert result.total_tokens == 50 - assert result.total_steps == 3 - assert result.finished_at is not None - - -def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_execution_repository): - """Test handle_node_execution_start method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-execution-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache[workflow_execution.id_] = workflow_execution - - # Create a mock event - event = MagicMock(spec=QueueNodeStartedEvent) - event.node_execution_id = "test-node-execution-id" - event.node_id = "test-node-id" - event.node_type = NodeType.LLM - event.node_title = "Test Node" - event.predecessor_node_id = "test-predecessor-node-id" - event.node_run_index = 1 - event.parallel_mode_run_id = "test-parallel-mode-run-id" - event.in_iteration_id = "test-iteration-id" - event.in_loop_id = "test-loop-id" - - # Call the method - result = workflow_cycle_manager.handle_node_execution_start( - workflow_execution_id=workflow_execution.id_, - event=event, - ) - - # Verify the result - assert result.workflow_id == workflow_execution.workflow_id - assert result.workflow_execution_id == workflow_execution.id_ - assert result.node_execution_id == event.node_execution_id - assert result.node_id == event.node_id - assert result.node_type == event.node_type - assert result.title == event.node_title - assert result.status == WorkflowNodeExecutionStatus.RUNNING - - # Verify save was called - workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result) - - -def test_get_workflow_execution_or_raise_error(workflow_cycle_manager, mock_workflow_execution_repository): - """Test _get_workflow_execution_or_raise_error method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-run-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache["test-workflow-run-id"] = workflow_execution - - # Call the method - result = workflow_cycle_manager._get_workflow_execution_or_raise_error("test-workflow-run-id") - - # Verify the result - assert result == workflow_execution - - # Test error case - clear cache - workflow_cycle_manager._workflow_execution_cache.clear() - - # Expect an error when execution is not found - from core.app.task_pipeline.exc import WorkflowRunNotFoundError - - with pytest.raises(WorkflowRunNotFoundError): - workflow_cycle_manager._get_workflow_execution_or_raise_error("non-existent-id") - - -def test_handle_workflow_node_execution_success(workflow_cycle_manager): - """Test handle_workflow_node_execution_success method""" - # Create a mock event - event = MagicMock(spec=QueueNodeSucceededEvent) - event.node_execution_id = "test-node-execution-id" - event.inputs = {"input": "test input"} - event.process_data = {"process": "test process"} - event.outputs = {"output": "test output"} - event.execution_metadata = {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100} - event.start_at = naive_utc_now() - - # Create a real node execution - - node_execution = WorkflowNodeExecution( - id="test-node-execution-record-id", - node_execution_id="test-node-execution-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-workflow-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - created_at=naive_utc_now(), - ) - - # Pre-populate the cache with the node execution - workflow_cycle_manager._node_execution_cache["test-node-execution-id"] = node_execution - - # Call the method - result = workflow_cycle_manager.handle_workflow_node_execution_success( - event=event, - ) - - # Verify the result - assert result == node_execution - assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED - - # Verify save was called - workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution) - - -def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_workflow_execution_repository): - """Test handle_workflow_run_partial_success method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-run-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache[workflow_execution.id_] = workflow_execution - - # Call the method - result = workflow_cycle_manager.handle_workflow_run_partial_success( - workflow_run_id="test-workflow-run-id", - total_tokens=75, - total_steps=4, - outputs={"partial_answer": "test partial answer"}, - exceptions_count=2, - ) - - # Verify the result - assert result == workflow_execution - assert result.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED - assert result.outputs == {"partial_answer": "test partial answer"} - assert result.total_tokens == 75 - assert result.total_steps == 4 - assert result.exceptions_count == 2 - assert result.finished_at is not None - - -def test_handle_workflow_node_execution_failed(workflow_cycle_manager): - """Test handle_workflow_node_execution_failed method""" - # Create a mock event - event = MagicMock(spec=QueueNodeFailedEvent) - event.node_execution_id = "test-node-execution-id" - event.inputs = {"input": "test input"} - event.process_data = {"process": "test process"} - event.outputs = {"output": "test output"} - event.execution_metadata = {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100} - event.start_at = naive_utc_now() - event.error = "Test error message" - - # Create a real node execution - - node_execution = WorkflowNodeExecution( - id="test-node-execution-record-id", - node_execution_id="test-node-execution-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-workflow-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - created_at=naive_utc_now(), - ) - - # Pre-populate the cache with the node execution - workflow_cycle_manager._node_execution_cache["test-node-execution-id"] = node_execution - - # Call the method - result = workflow_cycle_manager.handle_workflow_node_execution_failed( - event=event, - ) - - # Verify the result - assert result == node_execution - assert result.status == WorkflowNodeExecutionStatus.FAILED - assert result.error == "Test error message" - - # Verify save was called - workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution) diff --git a/api/tests/unit_tests/core/workflow/test_workflow_entry.py b/api/tests/unit_tests/core/workflow/test_workflow_entry.py index 324f58abf6..75de5c455f 100644 --- a/api/tests/unit_tests/core/workflow/test_workflow_entry.py +++ b/api/tests/unit_tests/core/workflow/test_workflow_entry.py @@ -7,7 +7,7 @@ from core.workflow.constants import ( CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, ) -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.workflow_entry import WorkflowEntry diff --git a/api/tests/unit_tests/core/workflow/test_workflow_entry_redis_channel.py b/api/tests/unit_tests/core/workflow/test_workflow_entry_redis_channel.py index c3d59aaf3f..bc55d3fccf 100644 --- a/api/tests/unit_tests/core/workflow/test_workflow_entry_redis_channel.py +++ b/api/tests/unit_tests/core/workflow/test_workflow_entry_redis_channel.py @@ -3,8 +3,8 @@ from unittest.mock import MagicMock, patch from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphRuntimeState, VariablePool from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.workflow_entry import WorkflowEntry from models.enums import UserFrom diff --git a/api/tests/unit_tests/extensions/storage/test_supabase_storage.py b/api/tests/unit_tests/extensions/storage/test_supabase_storage.py index 958072223e..476f87269c 100644 --- a/api/tests/unit_tests/extensions/storage/test_supabase_storage.py +++ b/api/tests/unit_tests/extensions/storage/test_supabase_storage.py @@ -172,73 +172,31 @@ class TestSupabaseStorage: assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] mock_client.storage.from_().download.assert_called_with("test.txt") - def test_exists_with_list_containing_items(self, storage_with_mock_client): - """Test exists returns True when list() returns items (using len() > 0).""" + def test_exists_returns_true_when_file_found(self, storage_with_mock_client): + """Test exists returns True when list() returns items.""" storage, mock_client = storage_with_mock_client - # Mock list return with special object that has count() method - mock_list_result = Mock() - mock_list_result.count.return_value = 1 - mock_client.storage.from_().list.return_value = mock_list_result + mock_client.storage.from_().list.return_value = [{"name": "test.txt"}] result = storage.exists("test.txt") assert result is True - # from_ gets called during init too, so just check it was called with the right bucket assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") + mock_client.storage.from_().list.assert_called_with(path="test.txt") - def test_exists_with_count_method_greater_than_zero(self, storage_with_mock_client): - """Test exists returns True when list result has count() > 0.""" + def test_exists_returns_false_when_file_not_found(self, storage_with_mock_client): + """Test exists returns False when list() returns an empty list.""" storage, mock_client = storage_with_mock_client - # Mock list return with count() method - mock_list_result = Mock() - mock_list_result.count.return_value = 1 - mock_client.storage.from_().list.return_value = mock_list_result - - result = storage.exists("test.txt") - - assert result is True - # Verify the correct calls were made - assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - mock_list_result.count.assert_called() - - def test_exists_with_count_method_zero(self, storage_with_mock_client): - """Test exists returns False when list result has count() == 0.""" - storage, mock_client = storage_with_mock_client - - # Mock list return with count() method returning 0 - mock_list_result = Mock() - mock_list_result.count.return_value = 0 - mock_client.storage.from_().list.return_value = mock_list_result + mock_client.storage.from_().list.return_value = [] result = storage.exists("test.txt") assert result is False - # Verify the correct calls were made assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - mock_list_result.count.assert_called() + mock_client.storage.from_().list.assert_called_with(path="test.txt") - def test_exists_with_empty_list(self, storage_with_mock_client): - """Test exists returns False when list() returns empty list.""" - storage, mock_client = storage_with_mock_client - - # Mock list return with special object that has count() method returning 0 - mock_list_result = Mock() - mock_list_result.count.return_value = 0 - mock_client.storage.from_().list.return_value = mock_list_result - - result = storage.exists("test.txt") - - assert result is False - # Verify the correct calls were made - assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - - def test_delete_calls_remove_with_filename(self, storage_with_mock_client): + def test_delete_calls_remove_with_filename_in_list(self, storage_with_mock_client): """Test delete calls remove([...]) (some client versions require a list).""" storage, mock_client = storage_with_mock_client @@ -247,7 +205,7 @@ class TestSupabaseStorage: storage.delete(filename) mock_client.storage.from_.assert_called_once_with("test-bucket") - mock_client.storage.from_().remove.assert_called_once_with(filename) + mock_client.storage.from_().remove.assert_called_once_with([filename]) def test_bucket_exists_returns_true_when_bucket_found(self): """Test bucket_exists returns True when bucket is found in list.""" diff --git a/api/tests/unit_tests/factories/test_build_from_mapping.py b/api/tests/unit_tests/factories/test_build_from_mapping.py index 39280c9267..77c4956c04 100644 --- a/api/tests/unit_tests/factories/test_build_from_mapping.py +++ b/api/tests/unit_tests/factories/test_build_from_mapping.py @@ -150,6 +150,42 @@ def test_build_from_remote_url(mock_http_head): assert file.size == 2048 +@pytest.mark.parametrize( + ("file_type", "should_pass", "expected_error"), + [ + ("image", True, None), + ("document", False, "Detected file type does not match the specified type"), + ("video", False, "Detected file type does not match the specified type"), + ], +) +def test_build_from_remote_url_strict_validation(mock_http_head, file_type, should_pass, expected_error): + """Test strict type validation for remote_url.""" + mapping = { + "transfer_method": "remote_url", + "url": TEST_REMOTE_URL, + "type": file_type, + } + if should_pass: + file = build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, strict_type_validation=True) + assert file.type == FileType(file_type) + else: + with pytest.raises(ValueError, match=expected_error): + build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, strict_type_validation=True) + + +def test_build_from_remote_url_without_strict_validation(mock_http_head): + """Test that remote_url allows type mismatch when strict_type_validation is False.""" + mapping = { + "transfer_method": "remote_url", + "url": TEST_REMOTE_URL, + "type": "document", + } + file = build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, strict_type_validation=False) + assert file.transfer_method == FileTransferMethod.REMOTE_URL + assert file.type == FileType.DOCUMENT + assert file.filename == "remote_test.jpg" + + def test_tool_file_not_found(): """Test ToolFile not found in database.""" with patch("factories.file_factory.db.session.scalar", return_value=None): diff --git a/api/tests/unit_tests/libs/test_custom_inputs.py b/api/tests/unit_tests/libs/test_custom_inputs.py new file mode 100644 index 0000000000..7e4c3b4ff0 --- /dev/null +++ b/api/tests/unit_tests/libs/test_custom_inputs.py @@ -0,0 +1,68 @@ +"""Unit tests for custom input types.""" + +import pytest + +from libs.custom_inputs import time_duration + + +class TestTimeDuration: + """Test time_duration input validator.""" + + def test_valid_days(self): + """Test valid days format.""" + result = time_duration("7d") + assert result == "7d" + + def test_valid_hours(self): + """Test valid hours format.""" + result = time_duration("4h") + assert result == "4h" + + def test_valid_minutes(self): + """Test valid minutes format.""" + result = time_duration("30m") + assert result == "30m" + + def test_valid_seconds(self): + """Test valid seconds format.""" + result = time_duration("30s") + assert result == "30s" + + def test_uppercase_conversion(self): + """Test uppercase units are converted to lowercase.""" + result = time_duration("7D") + assert result == "7d" + + result = time_duration("4H") + assert result == "4h" + + def test_invalid_format_no_unit(self): + """Test invalid format without unit.""" + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("7") + + def test_invalid_format_wrong_unit(self): + """Test invalid format with wrong unit.""" + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("7days") + + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("7x") + + def test_invalid_format_no_number(self): + """Test invalid format without number.""" + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("d") + + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("abc") + + def test_empty_string(self): + """Test empty string.""" + with pytest.raises(ValueError, match="Time duration cannot be empty"): + time_duration("") + + def test_none(self): + """Test None value.""" + with pytest.raises(ValueError, match="Time duration cannot be empty"): + time_duration(None) diff --git a/api/tests/unit_tests/libs/test_external_api.py b/api/tests/unit_tests/libs/test_external_api.py index a9edb913ea..9aa157a651 100644 --- a/api/tests/unit_tests/libs/test_external_api.py +++ b/api/tests/unit_tests/libs/test_external_api.py @@ -2,7 +2,9 @@ from flask import Blueprint, Flask from flask_restx import Resource from werkzeug.exceptions import BadRequest, Unauthorized +from constants import COOKIE_NAME_ACCESS_TOKEN, COOKIE_NAME_CSRF_TOKEN, COOKIE_NAME_REFRESH_TOKEN from core.errors.error import AppInvokeQuotaExceededError +from libs.exception import BaseHTTPException from libs.external_api import ExternalApi @@ -12,36 +14,36 @@ def _create_api_app(): api = ExternalApi(bp) @api.route("/bad-request") - class Bad(Resource): # type: ignore - def get(self): # type: ignore + class Bad(Resource): + def get(self): raise BadRequest("invalid input") @api.route("/unauth") - class Unauth(Resource): # type: ignore - def get(self): # type: ignore + class Unauth(Resource): + def get(self): raise Unauthorized("auth required") @api.route("/value-error") - class ValErr(Resource): # type: ignore - def get(self): # type: ignore + class ValErr(Resource): + def get(self): raise ValueError("boom") @api.route("/quota") - class Quota(Resource): # type: ignore - def get(self): # type: ignore + class Quota(Resource): + def get(self): raise AppInvokeQuotaExceededError("quota exceeded") @api.route("/general") - class Gen(Resource): # type: ignore - def get(self): # type: ignore + class Gen(Resource): + def get(self): raise RuntimeError("oops") # Note: We avoid altering default_mediatype to keep normal error paths # Special 400 message rewrite @api.route("/json-empty") - class JsonEmpty(Resource): # type: ignore - def get(self): # type: ignore + class JsonEmpty(Resource): + def get(self): e = BadRequest() # Force the specific message the handler rewrites e.description = "Failed to decode JSON object: Expecting value: line 1 column 1 (char 0)" @@ -49,11 +51,11 @@ def _create_api_app(): # 400 mapping payload path @api.route("/param-errors") - class ParamErrors(Resource): # type: ignore - def get(self): # type: ignore + class ParamErrors(Resource): + def get(self): e = BadRequest() # Coerce a mapping description to trigger param error shaping - e.description = {"field": "is required"} # type: ignore[assignment] + e.description = {"field": "is required"} raise e app.register_blueprint(bp, url_prefix="/api") @@ -103,7 +105,7 @@ def test_external_api_param_mapping_and_quota_and_exc_info_none(): orig_exc_info = ext.sys.exc_info try: - ext.sys.exc_info = lambda: (None, None, None) # type: ignore[assignment] + ext.sys.exc_info = lambda: (None, None, None) app = _create_api_app() client = app.test_client() @@ -120,3 +122,66 @@ def test_external_api_param_mapping_and_quota_and_exc_info_none(): assert res.status_code in (400, 429) finally: ext.sys.exc_info = orig_exc_info # type: ignore[assignment] + + +def test_unauthorized_and_force_logout_clears_cookies(): + """Test that UnauthorizedAndForceLogout error clears auth cookies""" + + class UnauthorizedAndForceLogout(BaseHTTPException): + error_code = "unauthorized_and_force_logout" + description = "Unauthorized and force logout." + code = 401 + + app = Flask(__name__) + bp = Blueprint("test", __name__) + api = ExternalApi(bp) + + @api.route("/force-logout") + class ForceLogout(Resource): # type: ignore + def get(self): # type: ignore + raise UnauthorizedAndForceLogout() + + app.register_blueprint(bp, url_prefix="/api") + client = app.test_client() + + # Set cookies first + client.set_cookie(COOKIE_NAME_ACCESS_TOKEN, "test_access_token") + client.set_cookie(COOKIE_NAME_CSRF_TOKEN, "test_csrf_token") + client.set_cookie(COOKIE_NAME_REFRESH_TOKEN, "test_refresh_token") + + # Make request that should trigger cookie clearing + res = client.get("/api/force-logout") + + # Verify response + assert res.status_code == 401 + data = res.get_json() + assert data["code"] == "unauthorized_and_force_logout" + assert data["status"] == 401 + assert "WWW-Authenticate" in res.headers + + # Verify Set-Cookie headers are present to clear cookies + set_cookie_headers = res.headers.getlist("Set-Cookie") + assert len(set_cookie_headers) == 3, f"Expected 3 Set-Cookie headers, got {len(set_cookie_headers)}" + + # Verify each cookie is being cleared (empty value and expired) + cookie_names_found = set() + for cookie_header in set_cookie_headers: + # Check for cookie names + if COOKIE_NAME_ACCESS_TOKEN in cookie_header: + cookie_names_found.add(COOKIE_NAME_ACCESS_TOKEN) + assert '""' in cookie_header or "=" in cookie_header # Empty value + assert "Expires=Thu, 01 Jan 1970" in cookie_header # Expired + elif COOKIE_NAME_CSRF_TOKEN in cookie_header: + cookie_names_found.add(COOKIE_NAME_CSRF_TOKEN) + assert '""' in cookie_header or "=" in cookie_header + assert "Expires=Thu, 01 Jan 1970" in cookie_header + elif COOKIE_NAME_REFRESH_TOKEN in cookie_header: + cookie_names_found.add(COOKIE_NAME_REFRESH_TOKEN) + assert '""' in cookie_header or "=" in cookie_header + assert "Expires=Thu, 01 Jan 1970" in cookie_header + + # Verify all three cookies are present + assert len(cookie_names_found) == 3 + assert COOKIE_NAME_ACCESS_TOKEN in cookie_names_found + assert COOKIE_NAME_CSRF_TOKEN in cookie_names_found + assert COOKIE_NAME_REFRESH_TOKEN in cookie_names_found diff --git a/api/tests/unit_tests/libs/test_flask_utils.py b/api/tests/unit_tests/libs/test_flask_utils.py index e30433bfce..9cab0db24c 100644 --- a/api/tests/unit_tests/libs/test_flask_utils.py +++ b/api/tests/unit_tests/libs/test_flask_utils.py @@ -67,7 +67,7 @@ def test_current_user_not_accessible_across_threads(login_app: Flask, test_user: # without preserve_flask_contexts result["user_accessible"] = current_user.is_authenticated except Exception as e: - result["error"] = str(e) # type: ignore + result["error"] = str(e) # Run the function in a separate thread thread = threading.Thread(target=check_user_in_thread) @@ -110,7 +110,7 @@ def test_current_user_accessible_with_preserve_flask_contexts(login_app: Flask, else: result["user_accessible"] = False except Exception as e: - result["error"] = str(e) # type: ignore + result["error"] = str(e) # Run the function in a separate thread thread = threading.Thread(target=check_user_in_thread_with_manager) diff --git a/api/tests/unit_tests/libs/test_helper.py b/api/tests/unit_tests/libs/test_helper.py index b7701055f5..85789bfa7e 100644 --- a/api/tests/unit_tests/libs/test_helper.py +++ b/api/tests/unit_tests/libs/test_helper.py @@ -11,7 +11,7 @@ class TestExtractTenantId: def test_extract_tenant_id_from_account_with_tenant(self): """Test extracting tenant_id from Account with current_tenant_id.""" # Create a mock Account object - account = Account() + account = Account(name="test", email="test@example.com") # Mock the current_tenant_id property account._current_tenant = type("MockTenant", (), {"id": "account-tenant-123"})() @@ -21,7 +21,7 @@ class TestExtractTenantId: def test_extract_tenant_id_from_account_without_tenant(self): """Test extracting tenant_id from Account without current_tenant_id.""" # Create a mock Account object - account = Account() + account = Account(name="test", email="test@example.com") account._current_tenant = None tenant_id = extract_tenant_id(account) diff --git a/api/tests/unit_tests/libs/test_json_in_md_parser.py b/api/tests/unit_tests/libs/test_json_in_md_parser.py index 53fd0bea16..953f203e89 100644 --- a/api/tests/unit_tests/libs/test_json_in_md_parser.py +++ b/api/tests/unit_tests/libs/test_json_in_md_parser.py @@ -86,3 +86,24 @@ def test_parse_and_check_json_markdown_multiple_blocks_fails(): # opening fence to the last closing fence, causing JSON decode failure. with pytest.raises(OutputParserError): parse_and_check_json_markdown(src, []) + + +def test_parse_and_check_json_markdown_handles_think_fenced_and_raw_variants(): + expected = {"keywords": ["2"], "category_id": "2", "category_name": "2"} + cases = [ + """ + ```json + [{"keywords": ["2"], "category_id": "2", "category_name": "2"}] + ```, error: Expecting value: line 1 column 1 (char 0) + """, + """ + ```json + {"keywords": ["2"], "category_id": "2", "category_name": "2"} + ```, error: Extra data: line 2 column 5 (char 66) + """, + '{"keywords": ["2"], "category_id": "2", "category_name": "2"}', + '[{"keywords": ["2"], "category_id": "2", "category_name": "2"}]', + ] + for src in cases: + obj = parse_and_check_json_markdown(src, ["keywords", "category_id", "category_name"]) + assert obj == expected diff --git a/api/tests/unit_tests/libs/test_login.py b/api/tests/unit_tests/libs/test_login.py index 39671077d4..35155b4931 100644 --- a/api/tests/unit_tests/libs/test_login.py +++ b/api/tests/unit_tests/libs/test_login.py @@ -19,10 +19,15 @@ class MockUser(UserMixin): return self._is_authenticated +def mock_csrf_check(*args, **kwargs): + return + + class TestLoginRequired: """Test cases for login_required decorator.""" @pytest.fixture + @patch("libs.login.check_csrf_token", mock_csrf_check) def setup_app(self, app: Flask): """Set up Flask app with login manager.""" # Initialize login manager @@ -39,6 +44,7 @@ class TestLoginRequired: return app + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_authenticated_user_can_access_protected_view(self, setup_app: Flask): """Test that authenticated users can access protected views.""" @@ -53,6 +59,7 @@ class TestLoginRequired: result = protected_view() assert result == "Protected content" + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_unauthenticated_user_cannot_access_protected_view(self, setup_app: Flask): """Test that unauthenticated users are redirected.""" @@ -68,6 +75,7 @@ class TestLoginRequired: assert result == "Unauthorized" setup_app.login_manager.unauthorized.assert_called_once() + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_login_disabled_allows_unauthenticated_access(self, setup_app: Flask): """Test that LOGIN_DISABLED config bypasses authentication.""" @@ -87,6 +95,7 @@ class TestLoginRequired: # Ensure unauthorized was not called setup_app.login_manager.unauthorized.assert_not_called() + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_options_request_bypasses_authentication(self, setup_app: Flask): """Test that OPTIONS requests are exempt from authentication.""" @@ -103,6 +112,7 @@ class TestLoginRequired: # Ensure unauthorized was not called setup_app.login_manager.unauthorized.assert_not_called() + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_flask_2_compatibility(self, setup_app: Flask): """Test Flask 2.x compatibility with ensure_sync.""" @@ -120,6 +130,7 @@ class TestLoginRequired: assert result == "Synced content" setup_app.ensure_sync.assert_called_once() + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_flask_1_compatibility(self, setup_app: Flask): """Test Flask 1.x compatibility without ensure_sync.""" diff --git a/api/tests/unit_tests/libs/test_oauth_base.py b/api/tests/unit_tests/libs/test_oauth_base.py index 3e0c235fff..7b7f086dac 100644 --- a/api/tests/unit_tests/libs/test_oauth_base.py +++ b/api/tests/unit_tests/libs/test_oauth_base.py @@ -16,4 +16,4 @@ def test_oauth_base_methods_raise_not_implemented(): oauth.get_raw_user_info("token") with pytest.raises(NotImplementedError): - oauth._transform_user_info({}) # type: ignore[name-defined] + oauth._transform_user_info({}) diff --git a/api/tests/unit_tests/libs/test_time_parser.py b/api/tests/unit_tests/libs/test_time_parser.py new file mode 100644 index 0000000000..83ff251272 --- /dev/null +++ b/api/tests/unit_tests/libs/test_time_parser.py @@ -0,0 +1,91 @@ +"""Unit tests for time parser utility.""" + +from datetime import UTC, datetime, timedelta + +from libs.time_parser import get_time_threshold, parse_time_duration + + +class TestParseTimeDuration: + """Test parse_time_duration function.""" + + def test_parse_days(self): + """Test parsing days.""" + result = parse_time_duration("7d") + assert result == timedelta(days=7) + + def test_parse_hours(self): + """Test parsing hours.""" + result = parse_time_duration("4h") + assert result == timedelta(hours=4) + + def test_parse_minutes(self): + """Test parsing minutes.""" + result = parse_time_duration("30m") + assert result == timedelta(minutes=30) + + def test_parse_seconds(self): + """Test parsing seconds.""" + result = parse_time_duration("30s") + assert result == timedelta(seconds=30) + + def test_parse_uppercase(self): + """Test parsing uppercase units.""" + result = parse_time_duration("7D") + assert result == timedelta(days=7) + + def test_parse_invalid_format(self): + """Test parsing invalid format.""" + result = parse_time_duration("7days") + assert result is None + + result = parse_time_duration("abc") + assert result is None + + result = parse_time_duration("7") + assert result is None + + def test_parse_empty_string(self): + """Test parsing empty string.""" + result = parse_time_duration("") + assert result is None + + def test_parse_none(self): + """Test parsing None.""" + result = parse_time_duration(None) + assert result is None + + +class TestGetTimeThreshold: + """Test get_time_threshold function.""" + + def test_get_threshold_days(self): + """Test getting threshold for days.""" + before = datetime.now(UTC) + result = get_time_threshold("7d") + after = datetime.now(UTC) + + assert result is not None + # Result should be approximately 7 days ago + expected = before - timedelta(days=7) + # Allow 1 second tolerance for test execution time + assert abs((result - expected).total_seconds()) < 1 + + def test_get_threshold_hours(self): + """Test getting threshold for hours.""" + before = datetime.now(UTC) + result = get_time_threshold("4h") + after = datetime.now(UTC) + + assert result is not None + expected = before - timedelta(hours=4) + assert abs((result - expected).total_seconds()) < 1 + + def test_get_threshold_invalid(self): + """Test getting threshold with invalid duration.""" + result = get_time_threshold("invalid") + assert result is None + + def test_get_threshold_none(self): + """Test getting threshold with None.""" + result = get_time_threshold(None) + assert result is None diff --git a/api/tests/unit_tests/libs/test_token.py b/api/tests/unit_tests/libs/test_token.py new file mode 100644 index 0000000000..6a65b5faa0 --- /dev/null +++ b/api/tests/unit_tests/libs/test_token.py @@ -0,0 +1,62 @@ +from unittest.mock import MagicMock + +from werkzeug.wrappers import Response + +from constants import COOKIE_NAME_ACCESS_TOKEN, COOKIE_NAME_WEBAPP_ACCESS_TOKEN +from libs import token +from libs.token import extract_access_token, extract_webapp_access_token, set_csrf_token_to_cookie + + +class MockRequest: + def __init__(self, headers: dict[str, str], cookies: dict[str, str], args: dict[str, str]): + self.headers: dict[str, str] = headers + self.cookies: dict[str, str] = cookies + self.args: dict[str, str] = args + + +def test_extract_access_token(): + def _mock_request(headers: dict[str, str], cookies: dict[str, str], args: dict[str, str]): + return MockRequest(headers, cookies, args) + + test_cases = [ + (_mock_request({"Authorization": "Bearer 123"}, {}, {}), "123", "123"), + (_mock_request({}, {COOKIE_NAME_ACCESS_TOKEN: "123"}, {}), "123", None), + (_mock_request({}, {}, {}), None, None), + (_mock_request({"Authorization": "Bearer_aaa 123"}, {}, {}), None, None), + (_mock_request({}, {COOKIE_NAME_WEBAPP_ACCESS_TOKEN: "123"}, {}), None, "123"), + ] + for request, expected_console, expected_webapp in test_cases: + assert extract_access_token(request) == expected_console # pyright: ignore[reportArgumentType] + assert extract_webapp_access_token(request) == expected_webapp # pyright: ignore[reportArgumentType] + + +def test_real_cookie_name_uses_host_prefix_without_domain(monkeypatch): + monkeypatch.setattr(token.dify_config, "CONSOLE_WEB_URL", "https://console.example.com", raising=False) + monkeypatch.setattr(token.dify_config, "CONSOLE_API_URL", "https://api.example.com", raising=False) + monkeypatch.setattr(token.dify_config, "COOKIE_DOMAIN", "", raising=False) + + assert token._real_cookie_name("csrf_token") == "__Host-csrf_token" + + +def test_real_cookie_name_without_host_prefix_when_domain_present(monkeypatch): + monkeypatch.setattr(token.dify_config, "CONSOLE_WEB_URL", "https://console.example.com", raising=False) + monkeypatch.setattr(token.dify_config, "CONSOLE_API_URL", "https://api.example.com", raising=False) + monkeypatch.setattr(token.dify_config, "COOKIE_DOMAIN", ".example.com", raising=False) + + assert token._real_cookie_name("csrf_token") == "csrf_token" + + +def test_set_csrf_cookie_includes_domain_when_configured(monkeypatch): + monkeypatch.setattr(token.dify_config, "CONSOLE_WEB_URL", "https://console.example.com", raising=False) + monkeypatch.setattr(token.dify_config, "CONSOLE_API_URL", "https://api.example.com", raising=False) + monkeypatch.setattr(token.dify_config, "COOKIE_DOMAIN", ".example.com", raising=False) + + response = Response() + request = MagicMock() + + set_csrf_token_to_cookie(request, response, "abc123") + + cookies = response.headers.getlist("Set-Cookie") + assert any("csrf_token=abc123" in c for c in cookies) + assert any("Domain=example.com" in c for c in cookies) + assert all("__Host-" not in c for c in cookies) diff --git a/api/tests/unit_tests/oss/__mock/tencent_cos.py b/api/tests/unit_tests/oss/__mock/tencent_cos.py index c77c5b08f3..5189b68e87 100644 --- a/api/tests/unit_tests/oss/__mock/tencent_cos.py +++ b/api/tests/unit_tests/oss/__mock/tencent_cos.py @@ -3,8 +3,8 @@ from unittest.mock import MagicMock import pytest from _pytest.monkeypatch import MonkeyPatch -from qcloud_cos import CosS3Client # type: ignore -from qcloud_cos.streambody import StreamBody # type: ignore +from qcloud_cos import CosS3Client +from qcloud_cos.streambody import StreamBody from tests.unit_tests.oss.__mock.base import ( get_example_bucket, diff --git a/api/tests/unit_tests/oss/__mock/volcengine_tos.py b/api/tests/unit_tests/oss/__mock/volcengine_tos.py index 88df59f91c..649d93a202 100644 --- a/api/tests/unit_tests/oss/__mock/volcengine_tos.py +++ b/api/tests/unit_tests/oss/__mock/volcengine_tos.py @@ -4,8 +4,8 @@ from unittest.mock import MagicMock import pytest from _pytest.monkeypatch import MonkeyPatch -from tos import TosClientV2 # type: ignore -from tos.clientv2 import DeleteObjectOutput, GetObjectOutput, HeadObjectOutput, PutObjectOutput # type: ignore +from tos import TosClientV2 +from tos.clientv2 import DeleteObjectOutput, GetObjectOutput, HeadObjectOutput, PutObjectOutput from tests.unit_tests.oss.__mock.base import ( get_example_bucket, diff --git a/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py b/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py index d289751800..303f0493bd 100644 --- a/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py +++ b/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py @@ -1,7 +1,7 @@ from unittest.mock import patch import pytest -from qcloud_cos import CosConfig # type: ignore +from qcloud_cos import CosConfig from extensions.storage.tencent_cos_storage import TencentCosStorage from tests.unit_tests.oss.__mock.base import ( diff --git a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py index 04988e85d8..a06623a69e 100644 --- a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py +++ b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py @@ -1,5 +1,7 @@ +from unittest.mock import patch + import pytest -from tos import TosClientV2 # type: ignore +from tos import TosClientV2 from extensions.storage.volcengine_tos_storage import VolcengineTosStorage from tests.unit_tests.oss.__mock.base import ( @@ -13,7 +15,13 @@ class TestVolcengineTos(BaseStorageTest): @pytest.fixture(autouse=True) def setup_method(self, setup_volcengine_tos_mock): """Executed before each test method.""" - self.storage = VolcengineTosStorage() + with patch("extensions.storage.volcengine_tos_storage.dify_config") as mock_config: + mock_config.VOLCENGINE_TOS_ACCESS_KEY = "test_access_key" + mock_config.VOLCENGINE_TOS_SECRET_KEY = "test_secret_key" + mock_config.VOLCENGINE_TOS_ENDPOINT = "test_endpoint" + mock_config.VOLCENGINE_TOS_REGION = "test_region" + self.storage = VolcengineTosStorage() + self.storage.bucket_name = get_example_bucket() self.storage.client = TosClientV2( ak="dify", diff --git a/api/tests/unit_tests/repositories/test_workflow_run_repository.py b/api/tests/unit_tests/repositories/test_workflow_run_repository.py new file mode 100644 index 0000000000..8f47f0df48 --- /dev/null +++ b/api/tests/unit_tests/repositories/test_workflow_run_repository.py @@ -0,0 +1,251 @@ +"""Unit tests for workflow run repository with status filter.""" + +import uuid +from unittest.mock import MagicMock + +import pytest +from sqlalchemy.orm import sessionmaker + +from models import WorkflowRun, WorkflowRunTriggeredFrom +from repositories.sqlalchemy_api_workflow_run_repository import DifyAPISQLAlchemyWorkflowRunRepository + + +class TestDifyAPISQLAlchemyWorkflowRunRepository: + """Test workflow run repository with status filtering.""" + + @pytest.fixture + def mock_session_maker(self): + """Create a mock session maker.""" + return MagicMock(spec=sessionmaker) + + @pytest.fixture + def repository(self, mock_session_maker): + """Create repository instance with mock session.""" + return DifyAPISQLAlchemyWorkflowRunRepository(mock_session_maker) + + def test_get_paginated_workflow_runs_without_status(self, repository, mock_session_maker): + """Test getting paginated workflow runs without status filter.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + mock_runs = [MagicMock(spec=WorkflowRun) for _ in range(3)] + mock_session.scalars.return_value.all.return_value = mock_runs + + # Act + result = repository.get_paginated_workflow_runs( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + limit=20, + last_id=None, + status=None, + ) + + # Assert + assert len(result.data) == 3 + assert result.limit == 20 + assert result.has_more is False + + def test_get_paginated_workflow_runs_with_status_filter(self, repository, mock_session_maker): + """Test getting paginated workflow runs with status filter.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + mock_runs = [MagicMock(spec=WorkflowRun, status="succeeded") for _ in range(2)] + mock_session.scalars.return_value.all.return_value = mock_runs + + # Act + result = repository.get_paginated_workflow_runs( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + limit=20, + last_id=None, + status="succeeded", + ) + + # Assert + assert len(result.data) == 2 + assert all(run.status == "succeeded" for run in result.data) + + def test_get_workflow_runs_count_without_status(self, repository, mock_session_maker): + """Test getting workflow runs count without status filter.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock the GROUP BY query results + mock_results = [ + ("succeeded", 5), + ("failed", 2), + ("running", 1), + ] + mock_session.execute.return_value.all.return_value = mock_results + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status=None, + ) + + # Assert + assert result["total"] == 8 + assert result["succeeded"] == 5 + assert result["failed"] == 2 + assert result["running"] == 1 + assert result["stopped"] == 0 + assert result["partial-succeeded"] == 0 + + def test_get_workflow_runs_count_with_status_filter(self, repository, mock_session_maker): + """Test getting workflow runs count with status filter.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock the count query for succeeded status + mock_session.scalar.return_value = 5 + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status="succeeded", + ) + + # Assert + assert result["total"] == 5 + assert result["succeeded"] == 5 + assert result["running"] == 0 + assert result["failed"] == 0 + assert result["stopped"] == 0 + assert result["partial-succeeded"] == 0 + + def test_get_workflow_runs_count_with_invalid_status(self, repository, mock_session_maker): + """Test that invalid status is still counted in total but not in any specific status.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock count query returning 0 for invalid status + mock_session.scalar.return_value = 0 + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status="invalid_status", + ) + + # Assert + assert result["total"] == 0 + assert all(result[status] == 0 for status in ["running", "succeeded", "failed", "stopped", "partial-succeeded"]) + + def test_get_workflow_runs_count_with_time_range(self, repository, mock_session_maker): + """Test getting workflow runs count with time range filter verifies SQL query construction.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock the GROUP BY query results + mock_results = [ + ("succeeded", 3), + ("running", 2), + ] + mock_session.execute.return_value.all.return_value = mock_results + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status=None, + time_range="1d", + ) + + # Assert results + assert result["total"] == 5 + assert result["succeeded"] == 3 + assert result["running"] == 2 + assert result["failed"] == 0 + + # Verify that execute was called (which means GROUP BY query was used) + assert mock_session.execute.called, "execute should have been called for GROUP BY query" + + # Verify SQL query includes time filter by checking the statement + call_args = mock_session.execute.call_args + assert call_args is not None, "execute should have been called with a statement" + + # The first argument should be the SQL statement + stmt = call_args[0][0] + # Convert to string to inspect the query + query_str = str(stmt.compile(compile_kwargs={"literal_binds": True})) + + # Verify the query includes created_at filter + # The query should have a WHERE clause with created_at comparison + assert "created_at" in query_str.lower() or "workflow_runs.created_at" in query_str.lower(), ( + "Query should include created_at filter for time range" + ) + + def test_get_workflow_runs_count_with_status_and_time_range(self, repository, mock_session_maker): + """Test getting workflow runs count with both status and time range filters verifies SQL query.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock the count query for running status within time range + mock_session.scalar.return_value = 2 + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status="running", + time_range="1d", + ) + + # Assert results + assert result["total"] == 2 + assert result["running"] == 2 + assert result["succeeded"] == 0 + assert result["failed"] == 0 + + # Verify that scalar was called (which means COUNT query was used) + assert mock_session.scalar.called, "scalar should have been called for count query" + + # Verify SQL query includes both status and time filter + call_args = mock_session.scalar.call_args + assert call_args is not None, "scalar should have been called with a statement" + + # The first argument should be the SQL statement + stmt = call_args[0][0] + # Convert to string to inspect the query + query_str = str(stmt.compile(compile_kwargs={"literal_binds": True})) + + # Verify the query includes both filters + assert "created_at" in query_str.lower() or "workflow_runs.created_at" in query_str.lower(), ( + "Query should include created_at filter for time range" + ) + assert "status" in query_str.lower() or "workflow_runs.status" in query_str.lower(), ( + "Query should include status filter" + ) diff --git a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py index fadd1ee88f..5cba43714a 100644 --- a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py +++ b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py @@ -59,12 +59,11 @@ def session(): @pytest.fixture def mock_user(): """Create a user instance for testing.""" - user = Account() + user = Account(name="test", email="test@example.com") user.id = "test-user-id" - tenant = Tenant() + tenant = Tenant(name="Test Workspace") tenant.id = "test-tenant" - tenant.name = "Test Workspace" user._current_tenant = MagicMock() user._current_tenant.id = "test-tenant" @@ -299,7 +298,7 @@ def test_to_domain_model(repository): db_model.predecessor_node_id = "test-predecessor-id" db_model.node_execution_id = "test-node-execution-id" db_model.node_id = "test-node-id" - db_model.node_type = NodeType.START.value + db_model.node_type = NodeType.START db_model.title = "Test Node" db_model.inputs = json.dumps(inputs_dict) db_model.process_data = json.dumps(process_data_dict) diff --git a/api/tests/unit_tests/services/auth/test_api_key_auth_service.py b/api/tests/unit_tests/services/auth/test_api_key_auth_service.py index d23298f096..c6c3f677fb 100644 --- a/api/tests/unit_tests/services/auth/test_api_key_auth_service.py +++ b/api/tests/unit_tests/services/auth/test_api_key_auth_service.py @@ -125,13 +125,13 @@ class TestApiKeyAuthService: mock_session.commit = Mock() args_copy = self.mock_args.copy() - original_key = args_copy["credentials"]["config"]["api_key"] # type: ignore + original_key = args_copy["credentials"]["config"]["api_key"] ApiKeyAuthService.create_provider_auth(self.tenant_id, args_copy) # Verify original key is replaced with encrypted key - assert args_copy["credentials"]["config"]["api_key"] == encrypted_key # type: ignore - assert args_copy["credentials"]["config"]["api_key"] != original_key # type: ignore + assert args_copy["credentials"]["config"]["api_key"] == encrypted_key + assert args_copy["credentials"]["config"]["api_key"] != original_key # Verify encryption function is called correctly mock_encrypter.encrypt_token.assert_called_once_with(self.tenant_id, original_key) @@ -268,7 +268,7 @@ class TestApiKeyAuthService: def test_validate_api_key_auth_args_empty_credentials(self): """Test API key auth args validation - empty credentials""" args = self.mock_args.copy() - args["credentials"] = None # type: ignore + args["credentials"] = None with pytest.raises(ValueError, match="credentials is required"): ApiKeyAuthService.validate_api_key_auth_args(args) @@ -284,7 +284,7 @@ class TestApiKeyAuthService: def test_validate_api_key_auth_args_missing_auth_type(self): """Test API key auth args validation - missing auth_type""" args = self.mock_args.copy() - del args["credentials"]["auth_type"] # type: ignore + del args["credentials"]["auth_type"] with pytest.raises(ValueError, match="auth_type is required"): ApiKeyAuthService.validate_api_key_auth_args(args) @@ -292,7 +292,7 @@ class TestApiKeyAuthService: def test_validate_api_key_auth_args_empty_auth_type(self): """Test API key auth args validation - empty auth_type""" args = self.mock_args.copy() - args["credentials"]["auth_type"] = "" # type: ignore + args["credentials"]["auth_type"] = "" with pytest.raises(ValueError, match="auth_type is required"): ApiKeyAuthService.validate_api_key_auth_args(args) @@ -380,7 +380,7 @@ class TestApiKeyAuthService: def test_validate_api_key_auth_args_dict_credentials_with_list_auth_type(self): """Test API key auth args validation - dict credentials with list auth_type""" args = self.mock_args.copy() - args["credentials"]["auth_type"] = ["api_key"] # type: ignore # list instead of string + args["credentials"]["auth_type"] = ["api_key"] # Current implementation checks if auth_type exists and is truthy, list ["api_key"] is truthy # So this should not raise exception, this test should pass diff --git a/api/tests/unit_tests/services/auth/test_auth_integration.py b/api/tests/unit_tests/services/auth/test_auth_integration.py index acfc5cc526..3832a0b8b2 100644 --- a/api/tests/unit_tests/services/auth/test_auth_integration.py +++ b/api/tests/unit_tests/services/auth/test_auth_integration.py @@ -181,14 +181,11 @@ class TestAuthIntegration: ) def test_all_providers_factory_creation(self, provider, credentials): """Test factory creation for all supported providers""" - try: - auth_class = ApiKeyAuthFactory.get_apikey_auth_factory(provider) - assert auth_class is not None + auth_class = ApiKeyAuthFactory.get_apikey_auth_factory(provider) + assert auth_class is not None - factory = ApiKeyAuthFactory(provider, credentials) - assert factory.auth is not None - except ImportError: - pytest.skip(f"Provider {provider} not implemented yet") + factory = ApiKeyAuthFactory(provider, credentials) + assert factory.auth is not None def _create_success_response(self, status_code=200): """Create successful HTTP response mock""" diff --git a/api/tests/unit_tests/services/test_account_service.py b/api/tests/unit_tests/services/test_account_service.py index 737202f8de..627a04bcd0 100644 --- a/api/tests/unit_tests/services/test_account_service.py +++ b/api/tests/unit_tests/services/test_account_service.py @@ -893,7 +893,7 @@ class TestRegisterService: mock_dify_setup.return_value = mock_dify_setup_instance # Execute test - RegisterService.setup("admin@example.com", "Admin User", "password123", "192.168.1.1") + RegisterService.setup("admin@example.com", "Admin User", "password123", "192.168.1.1", "en-US") # Verify results mock_create_account.assert_called_once_with( @@ -925,6 +925,7 @@ class TestRegisterService: "Admin User", "password123", "192.168.1.1", + "en-US", ) # Verify rollback operations were called diff --git a/api/tests/unit_tests/services/test_dataset_service_delete_dataset.py b/api/tests/unit_tests/services/test_dataset_service_delete_dataset.py new file mode 100644 index 0000000000..cc718c9997 --- /dev/null +++ b/api/tests/unit_tests/services/test_dataset_service_delete_dataset.py @@ -0,0 +1,216 @@ +from unittest.mock import Mock, patch + +import pytest + +from models.account import Account, TenantAccountRole +from models.dataset import Dataset +from services.dataset_service import DatasetService + + +class DatasetDeleteTestDataFactory: + """Factory class for creating test data and mock objects for dataset delete tests.""" + + @staticmethod + def create_dataset_mock( + dataset_id: str = "dataset-123", + tenant_id: str = "test-tenant-123", + created_by: str = "creator-456", + doc_form: str | None = None, + indexing_technique: str | None = "high_quality", + **kwargs, + ) -> Mock: + """Create a mock dataset with specified attributes.""" + dataset = Mock(spec=Dataset) + dataset.id = dataset_id + dataset.tenant_id = tenant_id + dataset.created_by = created_by + dataset.doc_form = doc_form + dataset.indexing_technique = indexing_technique + for key, value in kwargs.items(): + setattr(dataset, key, value) + return dataset + + @staticmethod + def create_user_mock( + user_id: str = "user-789", + tenant_id: str = "test-tenant-123", + role: TenantAccountRole = TenantAccountRole.ADMIN, + **kwargs, + ) -> Mock: + """Create a mock user with specified attributes.""" + user = Mock(spec=Account) + user.id = user_id + user.current_tenant_id = tenant_id + user.current_role = role + for key, value in kwargs.items(): + setattr(user, key, value) + return user + + +class TestDatasetServiceDeleteDataset: + """ + Comprehensive unit tests for DatasetService.delete_dataset method. + + This test suite covers all deletion scenarios including: + - Normal dataset deletion with documents + - Empty dataset deletion (no documents, doc_form is None) + - Dataset deletion with missing indexing_technique + - Permission checks + - Event handling + + This test suite provides regression protection for issue #27073. + """ + + @pytest.fixture + def mock_dataset_service_dependencies(self): + """Common mock setup for dataset service dependencies.""" + with ( + patch("services.dataset_service.DatasetService.get_dataset") as mock_get_dataset, + patch("services.dataset_service.DatasetService.check_dataset_permission") as mock_check_perm, + patch("extensions.ext_database.db.session") as mock_db, + patch("services.dataset_service.dataset_was_deleted") as mock_dataset_was_deleted, + ): + yield { + "get_dataset": mock_get_dataset, + "check_permission": mock_check_perm, + "db_session": mock_db, + "dataset_was_deleted": mock_dataset_was_deleted, + } + + def test_delete_dataset_with_documents_success(self, mock_dataset_service_dependencies): + """ + Test successful deletion of a dataset with documents. + + This test verifies: + - Dataset is retrieved correctly + - Permission check is performed + - dataset_was_deleted event is sent + - Dataset is deleted from database + - Method returns True + """ + # Arrange + dataset = DatasetDeleteTestDataFactory.create_dataset_mock( + doc_form="text_model", indexing_technique="high_quality" + ) + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + # Act + result = DatasetService.delete_dataset(dataset.id, user) + + # Assert + assert result is True + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id) + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].commit.assert_called_once() + + def test_delete_empty_dataset_success(self, mock_dataset_service_dependencies): + """ + Test successful deletion of an empty dataset (no documents, doc_form is None). + + This test verifies that: + - Empty datasets can be deleted without errors + - dataset_was_deleted event is sent (event handler will skip cleanup if doc_form is None) + - Dataset is deleted from database + - Method returns True + + This is the primary test for issue #27073 where deleting an empty dataset + caused internal server error due to assertion failure in event handlers. + """ + # Arrange + dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form=None, indexing_technique=None) + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + # Act + result = DatasetService.delete_dataset(dataset.id, user) + + # Assert - Verify complete deletion flow + assert result is True + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id) + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].commit.assert_called_once() + + def test_delete_dataset_with_partial_none_values(self, mock_dataset_service_dependencies): + """ + Test deletion of dataset with partial None values. + + This test verifies that datasets with partial None values (e.g., doc_form exists + but indexing_technique is None) can be deleted successfully. The event handler + will skip cleanup if any required field is None. + + Improvement based on Gemini Code Assist suggestion: Added comprehensive assertions + to verify all core deletion operations are performed, not just event sending. + """ + # Arrange + dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form="text_model", indexing_technique=None) + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + # Act + result = DatasetService.delete_dataset(dataset.id, user) + + # Assert - Verify complete deletion flow (Gemini suggestion implemented) + assert result is True + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id) + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].commit.assert_called_once() + + def test_delete_dataset_with_doc_form_none_indexing_technique_exists(self, mock_dataset_service_dependencies): + """ + Test deletion of dataset where doc_form is None but indexing_technique exists. + + This edge case can occur in certain dataset configurations and should be handled + gracefully by the event handler's conditional check. + """ + # Arrange + dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form=None, indexing_technique="high_quality") + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + # Act + result = DatasetService.delete_dataset(dataset.id, user) + + # Assert - Verify complete deletion flow + assert result is True + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id) + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].commit.assert_called_once() + + def test_delete_dataset_not_found(self, mock_dataset_service_dependencies): + """ + Test deletion attempt when dataset doesn't exist. + + This test verifies that: + - Method returns False when dataset is not found + - No deletion operations are performed + - No events are sent + """ + # Arrange + dataset_id = "non-existent-dataset" + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = None + + # Act + result = DatasetService.delete_dataset(dataset_id, user) + + # Assert + assert result is False + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset_id) + mock_dataset_service_dependencies["check_permission"].assert_not_called() + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_not_called() + mock_dataset_service_dependencies["db_session"].delete.assert_not_called() + mock_dataset_service_dependencies["db_session"].commit.assert_not_called() diff --git a/api/tests/unit_tests/services/test_metadata_bug_complete.py b/api/tests/unit_tests/services/test_metadata_bug_complete.py index 0ff1edc950..bbfa9da15e 100644 --- a/api/tests/unit_tests/services/test_metadata_bug_complete.py +++ b/api/tests/unit_tests/services/test_metadata_bug_complete.py @@ -41,7 +41,10 @@ class TestMetadataBugCompleteValidation: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): # Should crash with TypeError with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.create_metadata("dataset-123", mock_metadata_args) @@ -51,7 +54,10 @@ class TestMetadataBugCompleteValidation: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.update_metadata_name("dataset-123", "metadata-456", None) @@ -74,9 +80,11 @@ class TestMetadataBugCompleteValidation: def test_4_fixed_api_layer_rejects_null(self, app): """Test Layer 4: Fixed API configuration properly rejects null values.""" # Test Console API create endpoint (fixed) - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + ) with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): with pytest.raises(BadRequest): @@ -94,9 +102,11 @@ class TestMetadataBugCompleteValidation: def test_5_fixed_api_accepts_valid_values(self, app): """Test that fixed API still accepts valid non-null values.""" - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + ) with app.test_request_context(json={"type": "string", "name": "valid_name"}, content_type="application/json"): args = parser.parse_args() @@ -106,9 +116,11 @@ class TestMetadataBugCompleteValidation: def test_6_simulated_buggy_behavior(self, app): """Test simulating the original buggy behavior with nullable=True.""" # Simulate the old buggy configuration - buggy_parser = reqparse.RequestParser() - buggy_parser.add_argument("type", type=str, required=True, nullable=True, location="json") - buggy_parser.add_argument("name", type=str, required=True, nullable=True, location="json") + buggy_parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=True, location="json") + .add_argument("name", type=str, required=True, nullable=True, location="json") + ) with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): # This would pass in the buggy version @@ -118,7 +130,7 @@ class TestMetadataBugCompleteValidation: # But would crash when trying to create MetadataArgs with pytest.raises((ValueError, TypeError)): - MetadataArgs(**args) + MetadataArgs.model_validate(args) def test_7_end_to_end_validation_layers(self): """Test all validation layers work together correctly.""" @@ -131,7 +143,7 @@ class TestMetadataBugCompleteValidation: valid_data = {"type": "string", "name": "test_metadata"} # Should create valid Pydantic object - metadata_args = MetadataArgs(**valid_data) + metadata_args = MetadataArgs.model_validate(valid_data) assert metadata_args.type == "string" assert metadata_args.name == "test_metadata" diff --git a/api/tests/unit_tests/services/test_metadata_nullable_bug.py b/api/tests/unit_tests/services/test_metadata_nullable_bug.py index d151100cf3..c8a1a70422 100644 --- a/api/tests/unit_tests/services/test_metadata_nullable_bug.py +++ b/api/tests/unit_tests/services/test_metadata_nullable_bug.py @@ -29,7 +29,10 @@ class TestMetadataNullableBug: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): # This should crash with TypeError when calling len(None) with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.create_metadata("dataset-123", mock_metadata_args) @@ -40,7 +43,10 @@ class TestMetadataNullableBug: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): # This should crash with TypeError when calling len(None) with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.update_metadata_name("dataset-123", "metadata-456", None) @@ -48,9 +54,11 @@ class TestMetadataNullableBug: def test_api_parser_accepts_null_values(self, app): """Test that API parser configuration incorrectly accepts null values.""" # Simulate the current API parser configuration - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=True, location="json") - parser.add_argument("name", type=str, required=True, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=True, location="json") + .add_argument("name", type=str, required=True, nullable=True, location="json") + ) # Simulate request data with null values with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): @@ -66,9 +74,11 @@ class TestMetadataNullableBug: def test_integration_bug_scenario(self, app): """Test the complete bug scenario from API to service layer.""" # Step 1: API parser accepts null values (current buggy behavior) - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=True, location="json") - parser.add_argument("name", type=str, required=True, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=True, location="json") + .add_argument("name", type=str, required=True, nullable=True, location="json") + ) with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): args = parser.parse_args() @@ -76,7 +86,7 @@ class TestMetadataNullableBug: # Step 2: Try to create MetadataArgs with None values # This should fail at Pydantic validation level with pytest.raises((ValueError, TypeError)): - metadata_args = MetadataArgs(**args) + metadata_args = MetadataArgs.model_validate(args) # Step 3: If we bypass Pydantic (simulating the bug scenario) # Move this outside the request context to avoid Flask-Login issues @@ -88,7 +98,10 @@ class TestMetadataNullableBug: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): # Step 4: Service layer crashes on len(None) with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.create_metadata("dataset-123", mock_metadata_args) @@ -96,9 +109,11 @@ class TestMetadataNullableBug: def test_correct_nullable_false_configuration_works(self, app): """Test that the correct nullable=False configuration works as expected.""" # This tests the FIXED configuration - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + ) with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): # This should fail with BadRequest due to nullable=False diff --git a/api/tests/unit_tests/services/test_variable_truncator.py b/api/tests/unit_tests/services/test_variable_truncator.py index 6761f939e3..cf6fb25c1c 100644 --- a/api/tests/unit_tests/services/test_variable_truncator.py +++ b/api/tests/unit_tests/services/test_variable_truncator.py @@ -21,6 +21,7 @@ from core.file.enums import FileTransferMethod, FileType from core.file.models import File from core.variables.segments import ( ArrayFileSegment, + ArrayNumberSegment, ArraySegment, FileSegment, FloatSegment, @@ -30,6 +31,7 @@ from core.variables.segments import ( StringSegment, ) from services.variable_truncator import ( + DummyVariableTruncator, MaxDepthExceededError, TruncationResult, UnknownTypeError, @@ -596,3 +598,32 @@ class TestIntegrationScenarios: truncated_mapping, truncated = truncator.truncate_variable_mapping(mapping) assert truncated is False assert truncated_mapping == mapping + + +def test_dummy_variable_truncator_methods(): + """Test DummyVariableTruncator methods work correctly.""" + truncator = DummyVariableTruncator() + + # Test truncate_variable_mapping + test_data: dict[str, Any] = { + "key1": "value1", + "key2": ["item1", "item2"], + "large_array": list(range(2000)), + } + result, is_truncated = truncator.truncate_variable_mapping(test_data) + + assert result == test_data + assert not is_truncated + + # Test truncate method + segment = StringSegment(value="test string") + result = truncator.truncate(segment) + assert isinstance(result, TruncationResult) + assert result.result == segment + assert result.truncated is False + + segment = ArrayNumberSegment(value=list(range(2000))) + result = truncator.truncate(segment) + assert isinstance(result, TruncationResult) + assert result.result == segment + assert result.truncated is False diff --git a/api/tests/unit_tests/services/tools/test_mcp_tools_transform.py b/api/tests/unit_tests/services/tools/test_mcp_tools_transform.py index fb0139932b..7511fd6f0c 100644 --- a/api/tests/unit_tests/services/tools/test_mcp_tools_transform.py +++ b/api/tests/unit_tests/services/tools/test_mcp_tools_transform.py @@ -180,6 +180,25 @@ class TestMCPToolTransform: # Set tools data with null description mock_provider_full.tools = '[{"name": "tool1", "description": null, "inputSchema": {}}]' + # Mock the to_entity and to_api_response methods + mock_entity = Mock() + mock_entity.to_api_response.return_value = { + "name": "Test MCP Provider", + "type": ToolProviderType.MCP, + "is_team_authorization": True, + "server_url": "https://*****.com/mcp", + "provider_icon": "icon.png", + "masked_headers": {"Authorization": "Bearer *****"}, + "updated_at": 1234567890, + "labels": [], + "author": "Test User", + "description": I18nObject(en_US="Test MCP Provider Description", zh_Hans="Test MCP Provider Description"), + "icon": "icon.png", + "label": I18nObject(en_US="Test MCP Provider", zh_Hans="Test MCP Provider"), + "masked_credentials": {}, + } + mock_provider_full.to_entity.return_value = mock_entity + # Call the method with for_list=True result = ToolTransformService.mcp_provider_to_user_provider(mock_provider_full, for_list=True) @@ -198,6 +217,27 @@ class TestMCPToolTransform: # Set tools data with description mock_provider_full.tools = '[{"name": "tool1", "description": "Tool description", "inputSchema": {}}]' + # Mock the to_entity and to_api_response methods + mock_entity = Mock() + mock_entity.to_api_response.return_value = { + "name": "Test MCP Provider", + "type": ToolProviderType.MCP, + "is_team_authorization": True, + "server_url": "https://*****.com/mcp", + "provider_icon": "icon.png", + "masked_headers": {"Authorization": "Bearer *****"}, + "updated_at": 1234567890, + "labels": [], + "configuration": {"timeout": "30", "sse_read_timeout": "300"}, + "original_headers": {"Authorization": "Bearer secret-token"}, + "author": "Test User", + "description": I18nObject(en_US="Test MCP Provider Description", zh_Hans="Test MCP Provider Description"), + "icon": "icon.png", + "label": I18nObject(en_US="Test MCP Provider", zh_Hans="Test MCP Provider"), + "masked_credentials": {}, + } + mock_provider_full.to_entity.return_value = mock_entity + # Call the method with for_list=False result = ToolTransformService.mcp_provider_to_user_provider(mock_provider_full, for_list=False) @@ -205,8 +245,9 @@ class TestMCPToolTransform: assert isinstance(result, ToolProviderApiEntity) assert result.id == "server-identifier-456" # Should use server_identifier when for_list=False assert result.server_identifier == "server-identifier-456" - assert result.timeout == 30 - assert result.sse_read_timeout == 300 + assert result.configuration is not None + assert result.configuration.timeout == 30 + assert result.configuration.sse_read_timeout == 300 assert result.original_headers == {"Authorization": "Bearer secret-token"} assert len(result.tools) == 1 assert result.tools[0].description.en_US == "Tool description" diff --git a/api/tests/unit_tests/services/workflow/test_workflow_converter.py b/api/tests/unit_tests/services/workflow/test_workflow_converter.py index 2ca781bae5..63ce4c0c3c 100644 --- a/api/tests/unit_tests/services/workflow/test_workflow_converter.py +++ b/api/tests/unit_tests/services/workflow/test_workflow_converter.py @@ -107,7 +107,7 @@ def test__convert_to_http_request_node_for_chatbot(default_variables): assert body_data body_data_json = json.loads(body_data) - assert body_data_json["point"] == APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY.value + assert body_data_json["point"] == APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY body_params = body_data_json["params"] assert body_params["app_id"] == app_model.id @@ -168,7 +168,7 @@ def test__convert_to_http_request_node_for_workflow_app(default_variables): assert body_data body_data_json = json.loads(body_data) - assert body_data_json["point"] == APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY.value + assert body_data_json["point"] == APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY body_params = body_data_json["params"] assert body_params["app_id"] == app_model.id diff --git a/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py b/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py index 7e324ca4db..66361f26e0 100644 --- a/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py +++ b/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py @@ -47,7 +47,8 @@ class TestDraftVariableSaver: def test__should_variable_be_visible(self): mock_session = MagicMock(spec=Session) - mock_user = Account(id=str(uuid.uuid4())) + mock_user = Account(name="test", email="test@example.com") + mock_user.id = str(uuid.uuid4()) test_app_id = self._get_test_app_id() saver = DraftVariableSaver( session=mock_session, diff --git a/api/tests/unit_tests/tools/test_api_tool.py b/api/tests/unit_tests/tools/test_api_tool.py new file mode 100644 index 0000000000..4d5683dcbd --- /dev/null +++ b/api/tests/unit_tests/tools/test_api_tool.py @@ -0,0 +1,249 @@ +import json +import operator +from typing import TypeVar +from unittest.mock import Mock, patch + +import httpx +import pytest + +from core.tools.__base.tool_runtime import ToolRuntime +from core.tools.custom_tool.tool import ApiTool +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_bundle import ApiToolBundle +from core.tools.entities.tool_entities import ( + ToolEntity, + ToolIdentity, + ToolInvokeMessage, +) + +_T = TypeVar("_T") + + +def _get_message_by_type(msgs: list[ToolInvokeMessage], msg_type: type[_T]) -> ToolInvokeMessage | None: + return next((i for i in msgs if isinstance(i.message, msg_type)), None) + + +class TestApiToolInvoke: + """Test suite for ApiTool._invoke method to ensure JSON responses are properly serialized.""" + + def setup_method(self): + """Setup test fixtures.""" + # Create a mock tool entity + self.mock_tool_identity = ToolIdentity( + author="test", + name="test_api_tool", + label=I18nObject(en_US="Test API Tool", zh_Hans="测试API工具"), + provider="test_provider", + ) + self.mock_tool_entity = ToolEntity(identity=self.mock_tool_identity) + + # Create a mock API bundle + self.mock_api_bundle = ApiToolBundle( + server_url="https://api.example.com/test", + method="GET", + openapi={}, + operation_id="test_operation", + parameters=[], + author="test_author", + ) + + # Create a mock runtime + self.mock_runtime = Mock(spec=ToolRuntime) + self.mock_runtime.credentials = {"auth_type": "none"} + + # Create the ApiTool instance + self.api_tool = ApiTool( + entity=self.mock_tool_entity, + api_bundle=self.mock_api_bundle, + runtime=self.mock_runtime, + provider_id="test_provider", + ) + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + def test_invoke_with_json_response_creates_text_message_with_serialized_json(self, mock_get: Mock) -> None: + """Test that when upstream returns JSON, the output Text message contains JSON-serialized string.""" + # Setup mock response with JSON content + json_response_data = { + "key": "value", + "number": 123, + "nested": {"inner": "data"}, + } + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.content = json.dumps(json_response_data).encode("utf-8") + mock_response.json.return_value = json_response_data + mock_response.text = json.dumps(json_response_data) + mock_response.headers = {"content-type": "application/json"} + mock_get.return_value = mock_response + + # Invoke the tool + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Get the result from the generator + result = list(result_generator) + assert len(result) == 2 + + # Verify _invoke yields text message + text_message = _get_message_by_type(result, ToolInvokeMessage.TextMessage) + assert text_message is not None, "_invoke should yield a text message" + assert isinstance(text_message, ToolInvokeMessage) + assert text_message.type == ToolInvokeMessage.MessageType.TEXT + assert text_message.message is not None + # Verify the text contains the JSON-serialized string + # Check if message is a TextMessage + assert isinstance(text_message.message, ToolInvokeMessage.TextMessage) + # Verify it's a valid JSON string and equals to the mock response + parsed_back = json.loads(text_message.message.text) + assert parsed_back == json_response_data + + # Verify _invoke yields json message + json_message = _get_message_by_type(result, ToolInvokeMessage.JsonMessage) + assert json_message is not None, "_invoke should yield a JSON message" + assert isinstance(json_message, ToolInvokeMessage) + assert json_message.type == ToolInvokeMessage.MessageType.JSON + assert json_message.message is not None + + assert isinstance(json_message.message, ToolInvokeMessage.JsonMessage) + assert json_message.message.json_object == json_response_data + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + @pytest.mark.parametrize( + "test_case", + [ + ( + "array", + [ + {"id": 1, "name": "Item 1", "active": True}, + {"id": 2, "name": "Item 2", "active": False}, + {"id": 3, "name": "项目 3", "active": True}, + ], + ), + ( + "string", + "string", + ), + ( + "number", + 123.456, + ), + ( + "boolean", + True, + ), + ( + "null", + None, + ), + ], + ids=operator.itemgetter(0), + ) + def test_invoke_with_non_dict_json_response_creates_text_message_with_serialized_json( + self, mock_get: Mock, test_case + ) -> None: + """Test that when upstream returns a non-dict JSON, the output Text message contains JSON-serialized string.""" + # Setup mock response with non-dict JSON content + _, json_value = test_case + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.content = json.dumps(json_value).encode("utf-8") + mock_response.json.return_value = json_value + mock_response.text = json.dumps(json_value) + mock_response.headers = {"content-type": "application/json"} + mock_get.return_value = mock_response + + # Invoke the tool + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Get the result from the generator + result = list(result_generator) + assert len(result) == 1 + + # Verify _invoke yields a text message + text_message = _get_message_by_type(result, ToolInvokeMessage.TextMessage) + assert text_message is not None, "_invoke should yield a text message containing the serialized JSON." + assert isinstance(text_message, ToolInvokeMessage) + assert text_message.type == ToolInvokeMessage.MessageType.TEXT + assert text_message.message is not None + # Verify the text contains the JSON-serialized string + # Check if message is a TextMessage + assert isinstance(text_message.message, ToolInvokeMessage.TextMessage) + # Verify it's a valid JSON string + parsed_back = json.loads(text_message.message.text) + assert parsed_back == json_value + + # Verify _invoke yields json message + json_message = _get_message_by_type(result, ToolInvokeMessage.JsonMessage) + assert json_message is None, "_invoke should not yield a JSON message for JSON array response" + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + def test_invoke_with_text_response_creates_text_message_with_original_text(self, mock_get: Mock) -> None: + """Test that when upstream returns plain text, the output Text message contains the original text.""" + # Setup mock response with plain text content + text_response_data = "This is a plain text response" + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.content = text_response_data.encode("utf-8") + mock_response.json.side_effect = json.JSONDecodeError("Expecting value", "doc", 0) + mock_response.text = text_response_data + mock_response.headers = {"content-type": "text/plain"} + mock_get.return_value = mock_response + + # Invoke the tool + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Get the result from the generator + result = list(result_generator) + assert len(result) == 1 + + # Verify it's a text message with the original text + message = result[0] + assert isinstance(message, ToolInvokeMessage) + assert message.type == ToolInvokeMessage.MessageType.TEXT + assert message.message is not None + # Check if message is a TextMessage + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + assert message.message.text == text_response_data + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + def test_invoke_with_empty_response(self, mock_get: Mock) -> None: + """Test that empty responses are handled correctly.""" + # Setup mock response with empty content + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.content = b"" + mock_response.headers = {"content-type": "application/json"} + mock_get.return_value = mock_response + + # Invoke the tool + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Get the result from the generator + result = list(result_generator) + assert len(result) == 1 + + # Verify it's a text message with the empty response message + message = result[0] + assert isinstance(message, ToolInvokeMessage) + assert message.type == ToolInvokeMessage.MessageType.TEXT + assert message.message is not None + # Check if message is a TextMessage + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + assert "Empty response from the tool" in message.message.text + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + def test_invoke_with_error_response(self, mock_get: Mock) -> None: + """Test that error responses are handled correctly.""" + # Setup mock response with error status code + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.text = "Not Found" + mock_get.return_value = mock_response + + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Invoke the tool and expect an error + with pytest.raises(Exception) as exc_info: + list(result_generator) # Consume the generator to trigger the error + + # Verify the error message + assert "Request failed with status code 404" in str(exc_info.value) diff --git a/api/tests/unit_tests/utils/oauth_encryption/test_system_oauth_encryption.py b/api/tests/unit_tests/utils/oauth_encryption/test_system_oauth_encryption.py index 30990f8d50..e2607f0fb1 100644 --- a/api/tests/unit_tests/utils/oauth_encryption/test_system_oauth_encryption.py +++ b/api/tests/unit_tests/utils/oauth_encryption/test_system_oauth_encryption.py @@ -116,10 +116,10 @@ class TestSystemOAuthEncrypter: encrypter = SystemOAuthEncrypter("test_secret") with pytest.raises(Exception): # noqa: B017 - encrypter.encrypt_oauth_params(None) # type: ignore + encrypter.encrypt_oauth_params(None) with pytest.raises(Exception): # noqa: B017 - encrypter.encrypt_oauth_params("not_a_dict") # type: ignore + encrypter.encrypt_oauth_params("not_a_dict") def test_decrypt_oauth_params_basic(self): """Test basic OAuth parameters decryption""" @@ -207,12 +207,12 @@ class TestSystemOAuthEncrypter: encrypter = SystemOAuthEncrypter("test_secret") with pytest.raises(ValueError) as exc_info: - encrypter.decrypt_oauth_params(123) # type: ignore + encrypter.decrypt_oauth_params(123) assert "encrypted_data must be a string" in str(exc_info.value) with pytest.raises(ValueError) as exc_info: - encrypter.decrypt_oauth_params(None) # type: ignore + encrypter.decrypt_oauth_params(None) assert "encrypted_data must be a string" in str(exc_info.value) @@ -461,14 +461,14 @@ class TestConvenienceFunctions: """Test convenience functions with error conditions""" # Test encryption with invalid input with pytest.raises(Exception): # noqa: B017 - encrypt_system_oauth_params(None) # type: ignore + encrypt_system_oauth_params(None) # Test decryption with invalid input with pytest.raises(ValueError): decrypt_system_oauth_params("") with pytest.raises(ValueError): - decrypt_system_oauth_params(None) # type: ignore + decrypt_system_oauth_params(None) class TestErrorHandling: @@ -501,7 +501,7 @@ class TestErrorHandling: # Test non-string error with pytest.raises(ValueError) as exc_info: - encrypter.decrypt_oauth_params(123) # type: ignore + encrypter.decrypt_oauth_params(123) assert "encrypted_data must be a string" in str(exc_info.value) # Test invalid format error diff --git a/api/uv.lock b/api/uv.lock index 7ce71cd215..7cf1e047de 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -2,12 +2,18 @@ version = 1 revision = 3 requires-python = ">=3.11, <3.13" resolution-markers = [ - "python_full_version >= '3.12.4' and sys_platform == 'linux'", - "python_full_version >= '3.12.4' and sys_platform != 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and sys_platform != 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform != 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", + "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", ] [[package]] @@ -42,7 +48,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.12.15" +version = "3.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -53,42 +59,42 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/f1/8515650ac3121a9e55c7b217c60e7fae3e0134b5acfe65691781b5356929/aiohttp-3.13.0.tar.gz", hash = "sha256:378dbc57dd8cf341ce243f13fa1fa5394d68e2e02c15cd5f28eae35a70ec7f67", size = 7832348, upload-time = "2025-10-06T19:58:48.089Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, - { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, - { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, - { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, - { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, - { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, - { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, - { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, - { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, - { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, - { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, - { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, - { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, - { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, - { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, - { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, - { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, - { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, - { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, - { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, - { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, - { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, - { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, - { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, - { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, - { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, - { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, - { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, - { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, - { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, - { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, - { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, - { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/db/df80cacac46cd548a736c5535b13cc18925cf6f9f83cd128cf3839842219/aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99eb94e97a42367fef5fc11e28cb2362809d3e70837f6e60557816c7106e2e20", size = 741374, upload-time = "2025-10-06T19:55:13.095Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f9/2d6d93fd57ab4726e18a7cdab083772eda8302d682620fbf2aef48322351/aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4696665b2713021c6eba3e2b882a86013763b442577fe5d2056a42111e732eca", size = 494956, upload-time = "2025-10-06T19:55:14.687Z" }, + { url = "https://files.pythonhosted.org/packages/89/a6/e1c061b079fed04ffd6777950c82f2e8246fd08b7b3c4f56fdd47f697e5a/aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3e6a38366f7f0d0f6ed7a1198055150c52fda552b107dad4785c0852ad7685d1", size = 491154, upload-time = "2025-10-06T19:55:16.661Z" }, + { url = "https://files.pythonhosted.org/packages/fe/4d/ee8913c0d2c7da37fdc98673a342b51611eaa0871682b37b8430084e35b5/aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aab715b1a0c37f7f11f9f1f579c6fbaa51ef569e47e3c0a4644fba46077a9409", size = 1745707, upload-time = "2025-10-06T19:55:18.376Z" }, + { url = "https://files.pythonhosted.org/packages/f9/70/26b2c97e8fa68644aec43d788940984c5f3b53a8d1468d5baaa328f809c9/aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7972c82bed87d7bd8e374b60a6b6e816d75ba4f7c2627c2d14eed216e62738e1", size = 1702404, upload-time = "2025-10-06T19:55:20.098Z" }, + { url = "https://files.pythonhosted.org/packages/65/1e/c8aa3c293a0e8b18968b1b88e9bd8fb269eb67eb7449f504a4c3e175b159/aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca8313cb852af788c78d5afdea24c40172cbfff8b35e58b407467732fde20390", size = 1805519, upload-time = "2025-10-06T19:55:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/51/b6/a3753fe86249eb441768658cfc00f8c4e0913b255c13be00ddb8192775e1/aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c333a2385d2a6298265f4b3e960590f787311b87f6b5e6e21bb8375914ef504", size = 1893904, upload-time = "2025-10-06T19:55:23.462Z" }, + { url = "https://files.pythonhosted.org/packages/51/6d/7b1e020fe1d2a2be7cf0ce5e35922f345e3507cf337faa1a6563c42065c1/aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc6d5fc5edbfb8041d9607f6a417997fa4d02de78284d386bea7ab767b5ea4f3", size = 1745043, upload-time = "2025-10-06T19:55:25.208Z" }, + { url = "https://files.pythonhosted.org/packages/e6/df/aad5dce268f9d4f29759c3eeb5fb5995c569d76abb267468dc1075218d5b/aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ddedba3d0043349edc79df3dc2da49c72b06d59a45a42c1c8d987e6b8d175b8", size = 1604765, upload-time = "2025-10-06T19:55:27.157Z" }, + { url = "https://files.pythonhosted.org/packages/1c/19/a84a0e97b2da2224c8b85e1aef5cac834d07b2903c17bff1a6bdbc7041d2/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23ca762140159417a6bbc959ca1927f6949711851e56f2181ddfe8d63512b5ad", size = 1721737, upload-time = "2025-10-06T19:55:28.854Z" }, + { url = "https://files.pythonhosted.org/packages/6c/61/ca6ad390128d964a08554fd63d6df5810fb5fbc7e599cb9e617f1729ae19/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfe824d6707a5dc3c5676685f624bc0c63c40d79dc0239a7fd6c034b98c25ebe", size = 1716052, upload-time = "2025-10-06T19:55:30.563Z" }, + { url = "https://files.pythonhosted.org/packages/2a/71/769e249e6625372c7d14be79b8b8c3b0592963a09793fb3d36758e60952c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3c11fa5dd2ef773a8a5a6daa40243d83b450915992eab021789498dc87acc114", size = 1783532, upload-time = "2025-10-06T19:55:32.798Z" }, + { url = "https://files.pythonhosted.org/packages/66/64/b9cd03cdbb629bc492e4a744fbe96550a8340b0cd7a0cc4a9c90cfecd8d3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00fdfe370cffede3163ba9d3f190b32c0cfc8c774f6f67395683d7b0e48cdb8a", size = 1593072, upload-time = "2025-10-06T19:55:34.686Z" }, + { url = "https://files.pythonhosted.org/packages/24/0e/87922c8cfdbd09f5e2197e9d87714a98c99c423560d44739e3af55400fe3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6475e42ef92717a678bfbf50885a682bb360a6f9c8819fb1a388d98198fdcb80", size = 1798613, upload-time = "2025-10-06T19:55:36.393Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/a3adfe2af76e1ee9e3b5464522004b148b266bc99d7ec424ca7843d64a3c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:77da5305a410910218b99f2a963092f4277d8a9c1f429c1ff1b026d1826bd0b6", size = 1737480, upload-time = "2025-10-06T19:55:38.043Z" }, + { url = "https://files.pythonhosted.org/packages/ad/53/e124dcbd64e6365602f3493fe37a11ca5b7ac0a40822a6e2bc8260cd08e0/aiohttp-3.13.0-cp311-cp311-win32.whl", hash = "sha256:2f9d9ea547618d907f2ee6670c9a951f059c5994e4b6de8dcf7d9747b420c820", size = 429824, upload-time = "2025-10-06T19:55:39.595Z" }, + { url = "https://files.pythonhosted.org/packages/3e/bd/485d98b372a2cd6998484a93ddd401ec6b6031657661c36846a10e2a1f6e/aiohttp-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f19f7798996d4458c669bd770504f710014926e9970f4729cf55853ae200469", size = 454137, upload-time = "2025-10-06T19:55:41.617Z" }, + { url = "https://files.pythonhosted.org/packages/3a/95/7e8bdfa6e79099a086d59d42589492f1fe9d29aae3cefb58b676015ce278/aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1c272a9a18a5ecc48a7101882230046b83023bb2a662050ecb9bfcb28d9ab53a", size = 735585, upload-time = "2025-10-06T19:55:43.401Z" }, + { url = "https://files.pythonhosted.org/packages/9f/20/2f1d3ee06ee94eafe516810705219bff234d09f135d6951661661d5595ae/aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:97891a23d7fd4e1afe9c2f4473e04595e4acb18e4733b910b6577b74e7e21985", size = 490613, upload-time = "2025-10-06T19:55:45.237Z" }, + { url = "https://files.pythonhosted.org/packages/74/15/ab8600ef6dc1dcd599009a81acfed2ea407037e654d32e47e344e0b08c34/aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:475bd56492ce5f4cffe32b5533c6533ee0c406d1d0e6924879f83adcf51da0ae", size = 489750, upload-time = "2025-10-06T19:55:46.937Z" }, + { url = "https://files.pythonhosted.org/packages/33/59/752640c2b86ca987fe5703a01733b00d375e6cd2392bc7574489934e64e5/aiohttp-3.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c32ada0abb4bc94c30be2b681c42f058ab104d048da6f0148280a51ce98add8c", size = 1736812, upload-time = "2025-10-06T19:55:48.917Z" }, + { url = "https://files.pythonhosted.org/packages/3d/c6/dd6b86ddb852a7fdbcdc7a45b6bdc80178aef713c08279afcaee7a5a9f07/aiohttp-3.13.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4af1f8877ca46ecdd0bc0d4a6b66d4b2bddc84a79e2e8366bc0d5308e76bceb8", size = 1698535, upload-time = "2025-10-06T19:55:50.75Z" }, + { url = "https://files.pythonhosted.org/packages/33/e2/27c92d205b9e8cee7661670e8e9f187931b71e26d42796b153d2a0ba6949/aiohttp-3.13.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e04ab827ec4f775817736b20cdc8350f40327f9b598dec4e18c9ffdcbea88a93", size = 1766573, upload-time = "2025-10-06T19:55:53.106Z" }, + { url = "https://files.pythonhosted.org/packages/df/6a/1fc1ad71d130a30f7a207d8d958a41224c29b834463b5185efb2dbff6ad4/aiohttp-3.13.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a6d9487b9471ec36b0faedf52228cd732e89be0a2bbd649af890b5e2ce422353", size = 1865229, upload-time = "2025-10-06T19:55:55.01Z" }, + { url = "https://files.pythonhosted.org/packages/14/51/d0c1701a79fcb0109cff5304da16226581569b89a282d8e7f1549a7e3ec0/aiohttp-3.13.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e66c57416352f36bf98f6641ddadd47c93740a22af7150d3e9a1ef6e983f9a8", size = 1750379, upload-time = "2025-10-06T19:55:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3d/2ec4b934f85856de1c0c18e90adc8902adadbfac2b3c0b831bfeb7214fc8/aiohttp-3.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:469167d5372f5bb3aedff4fc53035d593884fff2617a75317740e885acd48b04", size = 1560798, upload-time = "2025-10-06T19:55:58.888Z" }, + { url = "https://files.pythonhosted.org/packages/38/56/e23d9c3e13006e599fdce3851517c70279e177871e3e567d22cf3baf5d6c/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a9f3546b503975a69b547c9fd1582cad10ede1ce6f3e313a2f547c73a3d7814f", size = 1697552, upload-time = "2025-10-06T19:56:01.172Z" }, + { url = "https://files.pythonhosted.org/packages/56/cb/caa32c2ccaeca0a3dc39129079fd2ad02f9406c3a5f7924340435b87d4cd/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6b4174fcec98601f0cfdf308ee29a6ae53c55f14359e848dab4e94009112ee7d", size = 1718609, upload-time = "2025-10-06T19:56:03.102Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c0/5911856fef9e40fd1ccbb8c54a90116875d5753a92c1cac66ce2059b390d/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a533873a7a4ec2270fb362ee5a0d3b98752e4e1dc9042b257cd54545a96bd8ed", size = 1735887, upload-time = "2025-10-06T19:56:04.841Z" }, + { url = "https://files.pythonhosted.org/packages/0e/48/8d6f4757a24c02f0a454c043556593a00645d10583859f7156db44d8b7d3/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ce887c5e54411d607ee0959cac15bb31d506d86a9bcaddf0b7e9d63325a7a802", size = 1553079, upload-time = "2025-10-06T19:56:07.197Z" }, + { url = "https://files.pythonhosted.org/packages/39/fa/e82c9445e40b50e46770702b5b6ca2f767966d53e1a5eef03583ceac6df6/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d871f6a30d43e32fc9252dc7b9febe1a042b3ff3908aa83868d7cf7c9579a59b", size = 1762750, upload-time = "2025-10-06T19:56:09.376Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e6/9d30554e7f1e700bfeae4ab6b153d5dc7441606a9ec5e929288fa93a1477/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:222c828243b4789d79a706a876910f656fad4381661691220ba57b2ab4547865", size = 1717461, upload-time = "2025-10-06T19:56:11.551Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e5/29cca547990a59ea54f0674fc01de98519fc628cfceeab6175711750eca7/aiohttp-3.13.0-cp312-cp312-win32.whl", hash = "sha256:682d2e434ff2f1108314ff7f056ce44e457f12dbed0249b24e106e385cf154b9", size = 424633, upload-time = "2025-10-06T19:56:13.316Z" }, + { url = "https://files.pythonhosted.org/packages/8b/68/46dd042d7bc62eab30bafdb8569f55ef125c3a88bb174270324224f8df56/aiohttp-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a2be20eb23888df130214b91c262a90e2de1553d6fb7de9e9010cec994c0ff2", size = 451401, upload-time = "2025-10-06T19:56:15.188Z" }, ] [[package]] @@ -118,16 +124,16 @@ wheels = [ [[package]] name = "alembic" -version = "1.16.5" +version = "1.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/ca/4dc52902cf3491892d464f5265a81e9dff094692c8a049a3ed6a05fe7ee8/alembic-1.16.5.tar.gz", hash = "sha256:a88bb7f6e513bd4301ecf4c7f2206fe93f9913f9b48dac3b78babde2d6fe765e", size = 1969868, upload-time = "2025-08-27T18:02:05.668Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/45/6f4555f2039f364c3ce31399529dcf48dd60726ff3715ad67f547d87dfd2/alembic-1.17.0.tar.gz", hash = "sha256:4652a0b3e19616b57d652b82bfa5e38bf5dbea0813eed971612671cb9e90c0fe", size = 1975526, upload-time = "2025-10-11T18:40:13.585Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/4a/4c61d4c84cfd9befb6fa08a702535b27b21fff08c946bc2f6139decbf7f7/alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3", size = 247355, upload-time = "2025-08-27T18:02:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/44/1f/38e29b06bfed7818ebba1f84904afdc8153ef7b6c7e0d8f3bc6643f5989c/alembic-1.17.0-py3-none-any.whl", hash = "sha256:80523bc437d41b35c5db7e525ad9d908f79de65c27d6a5a5eab6df348a352d99", size = 247449, upload-time = "2025-10-11T18:40:16.288Z" }, ] [[package]] @@ -333,16 +339,16 @@ wheels = [ [[package]] name = "anyio" -version = "4.10.0" +version = "4.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] @@ -377,11 +383,11 @@ wheels = [ [[package]] name = "asgiref" -version = "3.9.1" +version = "3.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870, upload-time = "2025-07-08T09:07:43.344Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/08/4dfec9b90758a59acc6be32ac82e98d1fbfc321cb5cfa410436dbacf821c/asgiref-3.10.0.tar.gz", hash = "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e", size = 37483, upload-time = "2025-10-05T09:15:06.557Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790, upload-time = "2025-07-08T09:07:41.548Z" }, + { url = "https://files.pythonhosted.org/packages/17/9c/fc2331f538fbf7eedba64b2052e99ccf9ba9d6888e2f41441ee28847004b/asgiref-3.10.0-py3-none-any.whl", hash = "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734", size = 24050, upload-time = "2025-10-05T09:15:05.11Z" }, ] [[package]] @@ -395,23 +401,23 @@ wheels = [ [[package]] name = "attrs" -version = "25.3.0" +version = "25.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] [[package]] name = "authlib" -version = "1.6.4" +version = "1.6.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/bb/73a1f1c64ee527877f64122422dafe5b87a846ccf4ac933fe21bcbb8fee8/authlib-1.6.4.tar.gz", hash = "sha256:104b0442a43061dc8bc23b133d1d06a2b0a9c2e3e33f34c4338929e816287649", size = 164046, upload-time = "2025-09-17T09:59:23.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/3f/1d3bbd0bf23bdd99276d4def22f29c27a914067b4cf66f753ff9b8bbd0f3/authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b", size = 164553, upload-time = "2025-10-02T13:36:09.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/aa/91355b5f539caf1b94f0e66ff1e4ee39373b757fce08204981f7829ede51/authlib-1.6.4-py2.py3-none-any.whl", hash = "sha256:39313d2a2caac3ecf6d8f95fbebdfd30ae6ea6ae6a6db794d976405fdd9aa796", size = 243076, upload-time = "2025-09-17T09:59:22.259Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/5082412d1ee302e9e7d80b6949bc4d2a8fa1149aaab610c5fc24709605d6/authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a", size = 243608, upload-time = "2025-10-02T13:36:07.637Z" }, ] [[package]] @@ -445,16 +451,17 @@ wheels = [ [[package]] name = "azure-storage-blob" -version = "12.13.0" +version = "12.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core" }, { name = "cryptography" }, - { name = "msrest" }, + { name = "isodate" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/93/b13bf390e940a79a399981f75ac8d2e05a70112a95ebb7b41e9b752d2921/azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884", size = 684838, upload-time = "2022-07-07T22:35:44.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332, upload-time = "2025-07-16T21:34:07.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/2a/b8246df35af68d64fb7292c93dbbde63cd25036f2f669a9d9ae59e518c76/azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3", size = 377309, upload-time = "2022-07-07T22:35:41.905Z" }, + { url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907, upload-time = "2025-07-16T21:34:09.367Z" }, ] [[package]] @@ -468,68 +475,70 @@ wheels = [ [[package]] name = "basedpyright" -version = "1.31.4" +version = "1.31.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodejs-wheel-binaries" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/53/570b03ec0445a9b2cc69788482c1d12902a9b88a9b159e449c4c537c4e3a/basedpyright-1.31.4.tar.gz", hash = "sha256:2450deb16530f7c88c1a7da04530a079f9b0b18ae1c71cb6f812825b3b82d0b1", size = 22494467, upload-time = "2025-09-03T13:05:55.817Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/ba/ed69e8df732a09c8ca469f592c8e08707fe29149735b834c276d94d4a3da/basedpyright-1.31.7.tar.gz", hash = "sha256:394f334c742a19bcc5905b2455c9f5858182866b7679a6f057a70b44b049bceb", size = 22710948, upload-time = "2025-10-11T05:12:48.3Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/40/d1047a5addcade9291685d06ef42a63c1347517018bafd82747af9da0294/basedpyright-1.31.4-py3-none-any.whl", hash = "sha256:055e4a38024bd653be12d6216c1cfdbee49a1096d342b4d5f5b4560f7714b6fc", size = 11731440, upload-time = "2025-09-03T13:05:52.308Z" }, + { url = "https://files.pythonhosted.org/packages/f8/90/ce01ad2d0afdc1b82b8b5aaba27e60d2e138e39d887e71c35c55d8f1bfcd/basedpyright-1.31.7-py3-none-any.whl", hash = "sha256:7c54beb7828c9ed0028630aaa6904f395c27e5a9f5a313aa9e91fc1d11170831", size = 11817571, upload-time = "2025-10-11T05:12:45.432Z" }, ] [[package]] name = "bce-python-sdk" -version = "0.9.45" +version = "0.9.46" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "future" }, { name = "pycryptodome" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/19/0f23aedecb980288e663ba9ce81fa1545d6331d62bd75262fca49678052d/bce_python_sdk-0.9.45.tar.gz", hash = "sha256:ba60d66e80fcd012a6362bf011fee18bca616b0005814d261aba3aa202f7025f", size = 252769, upload-time = "2025-08-28T10:24:54.303Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/57/f98bc15c12cc022ef195f689ee57ed61d8a8677bda3089c4d58fb1872d45/bce_python_sdk-0.9.46.tar.gz", hash = "sha256:4bf01b22e6d172ccd94aa201f8bc6f2a98d0da4784160e77cfacfcc71c2686be", size = 253806, upload-time = "2025-09-15T06:51:52.753Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/1f/d3fd91808a1f4881b4072424390d38e85707edd75ed5d9cea2a0299a7a7a/bce_python_sdk-0.9.45-py3-none-any.whl", hash = "sha256:cce3ca7ad4de8be2cc0722c1d6a7db7be6f2833f8d9ca7f892c572e6ff78a959", size = 352012, upload-time = "2025-08-28T10:24:52.387Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f5/20e9ab324b22a77970c57bc8267e586e85e2aa1277d80f2c58ca8a39a13e/bce_python_sdk-0.9.46-py3-none-any.whl", hash = "sha256:655074da6592ce8b036f605d9a272bfdcd1f515eb2f8e3f0333bb7cc62f700cb", size = 352622, upload-time = "2025-09-15T06:51:50.811Z" }, ] [[package]] name = "bcrypt" -version = "4.3.0" +version = "5.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697, upload-time = "2025-02-28T01:24:09.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019, upload-time = "2025-02-28T01:23:05.838Z" }, - { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174, upload-time = "2025-02-28T01:23:07.274Z" }, - { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870, upload-time = "2025-02-28T01:23:09.151Z" }, - { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601, upload-time = "2025-02-28T01:23:11.461Z" }, - { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660, upload-time = "2025-02-28T01:23:12.989Z" }, - { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083, upload-time = "2025-02-28T01:23:14.5Z" }, - { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237, upload-time = "2025-02-28T01:23:16.686Z" }, - { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737, upload-time = "2025-02-28T01:23:18.897Z" }, - { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741, upload-time = "2025-02-28T01:23:21.041Z" }, - { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472, upload-time = "2025-02-28T01:23:23.183Z" }, - { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606, upload-time = "2025-02-28T01:23:25.361Z" }, - { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867, upload-time = "2025-02-28T01:23:26.875Z" }, - { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589, upload-time = "2025-02-28T01:23:28.381Z" }, - { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794, upload-time = "2025-02-28T01:23:30.187Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969, upload-time = "2025-02-28T01:23:31.945Z" }, - { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158, upload-time = "2025-02-28T01:23:34.161Z" }, - { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285, upload-time = "2025-02-28T01:23:35.765Z" }, - { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583, upload-time = "2025-02-28T01:23:38.021Z" }, - { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896, upload-time = "2025-02-28T01:23:39.575Z" }, - { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492, upload-time = "2025-02-28T01:23:40.901Z" }, - { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213, upload-time = "2025-02-28T01:23:42.653Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162, upload-time = "2025-02-28T01:23:43.964Z" }, - { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856, upload-time = "2025-02-28T01:23:46.011Z" }, - { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726, upload-time = "2025-02-28T01:23:47.575Z" }, - { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664, upload-time = "2025-02-28T01:23:49.059Z" }, - { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128, upload-time = "2025-02-28T01:23:50.399Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598, upload-time = "2025-02-28T01:23:51.775Z" }, - { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, - { url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103, upload-time = "2025-02-28T01:24:00.764Z" }, - { url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513, upload-time = "2025-02-28T01:24:02.243Z" }, - { url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685, upload-time = "2025-02-28T01:24:04.512Z" }, - { url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110, upload-time = "2025-02-28T01:24:05.896Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" }, + { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" }, ] [[package]] @@ -546,11 +555,11 @@ wheels = [ [[package]] name = "billiard" -version = "4.2.1" +version = "4.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031, upload-time = "2024-09-21T13:40:22.491Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/6a/1405343016bce8354b29d90aad6b0bf6485b5e60404516e4b9a3a9646cf0/billiard-4.2.2.tar.gz", hash = "sha256:e815017a062b714958463e07ba15981d802dc53d41c5b69d28c5a7c238f8ecf3", size = 155592, upload-time = "2025-09-20T14:44:40.456Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766, upload-time = "2024-09-21T13:40:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/a6/80/ef8dff49aae0e4430f81842f7403e14e0ca59db7bbaf7af41245b67c6b25/billiard-4.2.2-py3-none-any.whl", hash = "sha256:4bc05dcf0d1cc6addef470723aac2a6232f3c7ed7475b0b580473a9145829457", size = 86896, upload-time = "2025-09-20T14:44:39.157Z" }, ] [[package]] @@ -578,16 +587,16 @@ wheels = [ [[package]] name = "boto3-stubs" -version = "1.40.35" +version = "1.40.51" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/24/18/6a64ff9603845d635f6167b6d9a3f9a6e658d8a28eef36f8423eb5a99ae1/boto3_stubs-1.40.35.tar.gz", hash = "sha256:2d6f2dbe6e9b42deb7b8fbeed051461e7906903f26e99634d00be45cc40db41a", size = 100819, upload-time = "2025-09-19T19:42:36.372Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/4d/b07f9ee0fe432fa8ec6dc368ee7a0409e2b6d9df2c5a2a88265c9b6fd878/boto3_stubs-1.40.51.tar.gz", hash = "sha256:0281e820813a310954e15fb7c1d470c24c34c1cccc7b1ddad977fa293a1080a9", size = 100890, upload-time = "2025-10-13T19:25:36.126Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/d4/d744260908ad55903baefa086a3c9cabc50bfafd63c3f2d0e05688378013/boto3_stubs-1.40.35-py3-none-any.whl", hash = "sha256:2bb44e6c17831650a28e3e00bf5be0a6ba771fce08724ba978ffcd06a7bca7e3", size = 69689, upload-time = "2025-09-19T19:42:30.08Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2e/4476431f11fc3bf7a7e0f4f5c275f17607aa127da7c0d8685a4dc6bf6291/boto3_stubs-1.40.51-py3-none-any.whl", hash = "sha256:896d0ffaa298ce1749eea1a54946320a0f4e07c6912f8e1f8c0744a708ee25a4", size = 69709, upload-time = "2025-10-13T19:25:23.116Z" }, ] [package.optional-dependencies] @@ -611,14 +620,14 @@ wheels = [ [[package]] name = "botocore-stubs" -version = "1.40.29" +version = "1.40.51" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/5c/49b2860e2a26b7383d5915374e61d962a3853e3fd569e4370444f0b902c0/botocore_stubs-1.40.29.tar.gz", hash = "sha256:324669d5ed7b5f7271bf3c3ea7208191b1d183f17d7e73398f11fef4a31fdf6b", size = 42742, upload-time = "2025-09-11T20:22:35.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/ca/429fadb6e037cb7b300d508a0b24b59a71961db12539e21749cbec7e7422/botocore_stubs-1.40.51.tar.gz", hash = "sha256:8ddbeb1f68e39382533bb53f3b968d29e640406016af00ad8bbd6e1a2bd59536", size = 42249, upload-time = "2025-10-13T20:26:57.777Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/3c/f901ca6c4d66e0bebbfc56e614fc214416db72c613f768ee2fc84ffdbff4/botocore_stubs-1.40.29-py3-none-any.whl", hash = "sha256:84cbcc6328dddaa1f825830f7dec8fa0dcd3bac8002211322e8529cbfb5eaddd", size = 66843, upload-time = "2025-09-11T20:22:32.576Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b9/5f1296bc46f293f284a1a6259f3c1f21f4161088dc6f70428698841b56a7/botocore_stubs-1.40.51-py3-none-any.whl", hash = "sha256:9a028104979205c9be0b68bb59ba679e4fe452e017eec3d40f6c2b41c590a73c", size = 66541, upload-time = "2025-10-13T20:26:55.559Z" }, ] [[package]] @@ -693,7 +702,7 @@ name = "brotlicffi" version = "1.1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi" }, + { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/95/9d/70caa61192f570fcf0352766331b735afa931b4c6bc9a348a0925cc13288/brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13", size = 465192, upload-time = "2023-09-14T14:22:40.707Z" } wheels = [ @@ -773,11 +782,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.8.3" +version = "2025.10.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, ] [[package]] @@ -926,14 +935,14 @@ wheels = [ [[package]] name = "click" -version = "8.2.1" +version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] @@ -1076,7 +1085,7 @@ wheels = [ [[package]] name = "cos-python-sdk-v5" -version = "1.9.30" +version = "1.9.38" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "crcmod" }, @@ -1085,7 +1094,10 @@ dependencies = [ { name = "six" }, { name = "xmltodict" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c4/f2/be99b41433b33a76896680920fca621f191875ca410a66778015e47a501b/cos-python-sdk-v5-1.9.30.tar.gz", hash = "sha256:a23fd090211bf90883066d90cd74317860aa67c6d3aa80fe5e44b18c7e9b2a81", size = 108384, upload-time = "2024-06-14T08:02:37.063Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/3c/d208266fec7cc3221b449e236b87c3fc1999d5ac4379d4578480321cfecc/cos_python_sdk_v5-1.9.38.tar.gz", hash = "sha256:491a8689ae2f1a6f04dacba66a877b2c8d361456f9cfd788ed42170a1cbf7a9f", size = 98092, upload-time = "2025-07-22T07:56:20.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/c8/c9c156aa3bc7caba9b4f8a2b6abec3da6263215988f3fec0ea843f137a10/cos_python_sdk_v5-1.9.38-py3-none-any.whl", hash = "sha256:1d3dd3be2bd992b2e9c2dcd018e2596aa38eab022dbc86b4a5d14c8fc88370e6", size = 92601, upload-time = "2025-08-17T05:12:30.867Z" }, +] [[package]] name = "couchbase" @@ -1141,32 +1153,33 @@ toml = [ [[package]] name = "crc32c" -version = "2.7.1" +version = "2.7.1.post0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/4c/4e40cc26347ac8254d3f25b9f94710b8e8df24ee4dddc1ba41907a88a94d/crc32c-2.7.1.tar.gz", hash = "sha256:f91b144a21eef834d64178e01982bb9179c354b3e9e5f4c803b0e5096384968c", size = 45712, upload-time = "2024-09-24T06:20:17.553Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/02/5e49cc17a5f6f8cb78b55dd57d50b36416e69051c29bba1eab3e86a01927/crc32c-2.7.1.post0.tar.gz", hash = "sha256:dcaa776413af5790cc55561469cd76306e97b325fe4aa195db535fb3f328e709", size = 46574, upload-time = "2025-10-13T02:06:16.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/8e/2f37f46368bbfd50edfc11b96f0aa135699034b1b020966c70ebaff3463b/crc32c-2.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:19e03a50545a3ef400bd41667d5525f71030488629c57d819e2dd45064f16192", size = 49672, upload-time = "2024-09-24T06:18:18.032Z" }, - { url = "https://files.pythonhosted.org/packages/ed/b8/e52f7c4b045b871c2984d70f37c31d4861b533a8082912dfd107a96cf7c1/crc32c-2.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c03286b1e5ce9bed7090084f206aacd87c5146b4b10de56fe9e86cbbbf851cf", size = 37155, upload-time = "2024-09-24T06:18:19.373Z" }, - { url = "https://files.pythonhosted.org/packages/25/ee/0cfa82a68736697f3c7e435ba658c2ef8c997f42b89f6ab4545efe1b2649/crc32c-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ebbf144a1a56a532b353e81fa0f3edca4f4baa1bf92b1dde2c663a32bb6a15", size = 35372, upload-time = "2024-09-24T06:18:20.983Z" }, - { url = "https://files.pythonhosted.org/packages/aa/92/c878aaba81c431fcd93a059e9f6c90db397c585742793f0bf6e0c531cc67/crc32c-2.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96b794fd11945298fdd5eb1290a812efb497c14bc42592c5c992ca077458eeba", size = 54879, upload-time = "2024-09-24T06:18:23.085Z" }, - { url = "https://files.pythonhosted.org/packages/5b/f5/ab828ab3907095e06b18918408748950a9f726ee2b37be1b0839fb925ee1/crc32c-2.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df7194dd3c0efb5a21f5d70595b7a8b4fd9921fbbd597d6d8e7a11eca3e2d27", size = 52588, upload-time = "2024-09-24T06:18:24.463Z" }, - { url = "https://files.pythonhosted.org/packages/6a/2b/9e29e9ac4c4213d60491db09487125db358cd9263490fbadbd55e48fbe03/crc32c-2.7.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d698eec444b18e296a104d0b9bb6c596c38bdcb79d24eba49604636e9d747305", size = 53674, upload-time = "2024-09-24T06:18:25.624Z" }, - { url = "https://files.pythonhosted.org/packages/79/ed/df3c4c14bf1b29f5c9b52d51fb6793e39efcffd80b2941d994e8f7f5f688/crc32c-2.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e07cf10ef852d219d179333fd706d1c415626f1f05e60bd75acf0143a4d8b225", size = 54691, upload-time = "2024-09-24T06:18:26.578Z" }, - { url = "https://files.pythonhosted.org/packages/0c/47/4917af3c9c1df2fff28bbfa6492673c9adeae5599dcc207bbe209847489c/crc32c-2.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d2a051f296e6e92e13efee3b41db388931cdb4a2800656cd1ed1d9fe4f13a086", size = 52896, upload-time = "2024-09-24T06:18:28.174Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6f/26fc3dda5835cda8f6cd9d856afe62bdeae428de4c34fea200b0888e8835/crc32c-2.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1738259802978cdf428f74156175da6a5fdfb7256f647fdc0c9de1bc6cd7173", size = 53554, upload-time = "2024-09-24T06:18:29.104Z" }, - { url = "https://files.pythonhosted.org/packages/56/3e/6f39127f7027c75d130c0ba348d86a6150dff23761fbc6a5f71659f4521e/crc32c-2.7.1-cp311-cp311-win32.whl", hash = "sha256:f7786d219a1a1bf27d0aa1869821d11a6f8e90415cfffc1e37791690d4a848a1", size = 38370, upload-time = "2024-09-24T06:18:30.013Z" }, - { url = "https://files.pythonhosted.org/packages/c9/fb/1587c2705a3a47a3d0067eecf9a6fec510761c96dec45c7b038fb5c8ff46/crc32c-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:887f6844bb3ad35f0778cd10793ad217f7123a5422e40041231b8c4c7329649d", size = 39795, upload-time = "2024-09-24T06:18:31.324Z" }, - { url = "https://files.pythonhosted.org/packages/1d/02/998dc21333413ce63fe4c1ca70eafe61ca26afc7eb353f20cecdb77d614e/crc32c-2.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f7d1c4e761fe42bf856130daf8b2658df33fe0ced3c43dadafdfeaa42b57b950", size = 49568, upload-time = "2024-09-24T06:18:32.425Z" }, - { url = "https://files.pythonhosted.org/packages/9c/3e/e3656bfa76e50ef87b7136fef2dbf3c46e225629432fc9184fdd7fd187ff/crc32c-2.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:73361c79a6e4605204457f19fda18b042a94508a52e53d10a4239da5fb0f6a34", size = 37019, upload-time = "2024-09-24T06:18:34.097Z" }, - { url = "https://files.pythonhosted.org/packages/0b/7d/5ff9904046ad15a08772515db19df43107bf5e3901a89c36a577b5f40ba0/crc32c-2.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd778fc8ac0ed2ffbfb122a9aa6a0e409a8019b894a1799cda12c01534493e0", size = 35373, upload-time = "2024-09-24T06:18:35.02Z" }, - { url = "https://files.pythonhosted.org/packages/4d/41/4aedc961893f26858ab89fc772d0eaba91f9870f19eaa933999dcacb94ec/crc32c-2.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ef661b34e9f25991fface7f9ad85e81bbc1b3fe3b916fd58c893eabe2fa0b8", size = 54675, upload-time = "2024-09-24T06:18:35.954Z" }, - { url = "https://files.pythonhosted.org/packages/d6/63/8cabf09b7e39b9fec8f7010646c8b33057fc8d67e6093b3cc15563d23533/crc32c-2.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571aa4429444b5d7f588e4377663592145d2d25eb1635abb530f1281794fc7c9", size = 52386, upload-time = "2024-09-24T06:18:36.896Z" }, - { url = "https://files.pythonhosted.org/packages/79/13/13576941bf7cf95026abae43d8427c812c0054408212bf8ed490eda846b0/crc32c-2.7.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c02a3bd67dea95cdb25844aaf44ca2e1b0c1fd70b287ad08c874a95ef4bb38db", size = 53495, upload-time = "2024-09-24T06:18:38.099Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b6/55ffb26d0517d2d6c6f430ce2ad36ae7647c995c5bfd7abce7f32bb2bad1/crc32c-2.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d17637c4867672cb8adeea007294e3c3df9d43964369516cfe2c1f47ce500a", size = 54456, upload-time = "2024-09-24T06:18:39.051Z" }, - { url = "https://files.pythonhosted.org/packages/c2/1a/5562e54cb629ecc5543d3604dba86ddfc7c7b7bf31d64005b38a00d31d31/crc32c-2.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4a400ac3c69a32e180d8753fd7ec7bccb80ade7ab0812855dce8a208e72495f", size = 52647, upload-time = "2024-09-24T06:18:40.021Z" }, - { url = "https://files.pythonhosted.org/packages/48/ec/ce4138eaf356cd9aae60bbe931755e5e0151b3eca5f491fce6c01b97fd59/crc32c-2.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:588587772e55624dd9c7a906ec9e8773ae0b6ac5e270fc0bc84ee2758eba90d5", size = 53332, upload-time = "2024-09-24T06:18:40.925Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b5/144b42cd838a901175a916078781cb2c3c9f977151c9ba085aebd6d15b22/crc32c-2.7.1-cp312-cp312-win32.whl", hash = "sha256:9f14b60e5a14206e8173dd617fa0c4df35e098a305594082f930dae5488da428", size = 38371, upload-time = "2024-09-24T06:18:42.711Z" }, - { url = "https://files.pythonhosted.org/packages/ae/c4/7929dcd5d9b57db0cce4fe6f6c191049380fc6d8c9b9f5581967f4ec018e/crc32c-2.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:7c810a246660a24dc818047dc5f89c7ce7b2814e1e08a8e99993f4103f7219e8", size = 39805, upload-time = "2024-09-24T06:18:43.6Z" }, + { url = "https://files.pythonhosted.org/packages/77/e6/5723311e6320b35d7755fef8ab10c5d4b55173ce11888e8ddaf62b63091f/crc32c-2.7.1.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:65124edce1903484b19109da542d8671a814bdd2cc4006847701449a1f137869", size = 64759, upload-time = "2025-10-13T02:05:01.212Z" }, + { url = "https://files.pythonhosted.org/packages/94/ac/f9550d21a4434b5dad9124ccd6b7cee97ce66bc0cb91a605bf01d9c2475d/crc32c-2.7.1.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a177ee47782f9b83002b08c4d4ba57a6e31dcd96be89d1c6b71f599d9c06bba6", size = 61419, upload-time = "2025-10-13T02:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/ef/39/82874155870dc42c917ff842073c2714955c3c3d0629579a7ca3db1bbcb1/crc32c-2.7.1.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65ce2c3f1938b1310c1d592ac97f5e32b2cb67b67ae54ec89e710605f01e91cb", size = 59951, upload-time = "2025-10-13T02:05:03.268Z" }, + { url = "https://files.pythonhosted.org/packages/b9/24/42aa97aac254adeafaa44297654a520db1922dcab4a07bbb965b41d52b66/crc32c-2.7.1.post0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c57ac2129a4adc56b8898c524a33525f008a346edc5df2b1ab7b7bfc4e80bbe", size = 78633, upload-time = "2025-10-13T02:05:04.429Z" }, + { url = "https://files.pythonhosted.org/packages/ac/76/a0c8683b9c38e260d23eb8d419d3ca39277e5e640521e9cc7ca407633ee4/crc32c-2.7.1.post0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3dcdcc73ea9f5e5e32cf1d0868315c62274f8f8fb2a1356e6bf2e958fc7f5bc9", size = 80187, upload-time = "2025-10-13T02:05:05.254Z" }, + { url = "https://files.pythonhosted.org/packages/16/05/978a558f580294e521dc432656e8d1b955ddc73f22870d7e767ff9c1a8d4/crc32c-2.7.1.post0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7cc745faf51a57c056021c005766cd8bacd818213ef424064118747c99a16d70", size = 79263, upload-time = "2025-10-13T02:05:06.121Z" }, + { url = "https://files.pythonhosted.org/packages/cd/48/9efdc54175f56b5aea24fbd9076759e052ca828713590a6bf60f822d40f7/crc32c-2.7.1.post0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1220cf7e97f453e07f998574e663e822e5602ed591b9a2fd436bf65dcae26168", size = 78527, upload-time = "2025-10-13T02:05:06.978Z" }, + { url = "https://files.pythonhosted.org/packages/0f/88/5accf5fb60559df04d0863496a86b912af37e6b7d160d458da25e473a881/crc32c-2.7.1.post0-cp311-cp311-win32.whl", hash = "sha256:d5087f2bc6954b38dc1ceac9b2ea9c9c4956b4e8ce82d965f4c474aefac2d061", size = 63277, upload-time = "2025-10-13T02:05:07.852Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b2/c037161956d00324198a94962788b5e6a6e76b892d96205b15a37bea0c81/crc32c-2.7.1.post0-cp311-cp311-win_amd64.whl", hash = "sha256:feda0b536b1310b0535085835564918df6ba415e0b230734e1386deb7c614c02", size = 65029, upload-time = "2025-10-13T02:05:08.658Z" }, + { url = "https://files.pythonhosted.org/packages/7d/00/243cc1b15bcadf72bd71cf9a33d425715726b95b5f37a85b306d495362f4/crc32c-2.7.1.post0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4eda225a4c49901b9baf1af2aec19dd614c527bac81e02c52d1b9f1d6f6d244c", size = 64820, upload-time = "2025-10-13T02:05:09.476Z" }, + { url = "https://files.pythonhosted.org/packages/6e/76/e63deacf3e5dcd38764a1a617fd25749ea83fe20ff42a7912a855a975a0f/crc32c-2.7.1.post0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e304b07182b915fa9ab5340b51a6845d45331974d73b80a1710405ec8f0b4d44", size = 61474, upload-time = "2025-10-13T02:05:10.44Z" }, + { url = "https://files.pythonhosted.org/packages/c2/96/a341802b0a84fc00f9eca4e7dfdc0f41a69fc226b62ea1661468d4812800/crc32c-2.7.1.post0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1bbd4d2272aa7bdc5527fc3130caf31819e5efad19b6abd7158859f1cc808923", size = 59963, upload-time = "2025-10-13T02:05:11.271Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8a/5e1f6789239935a95a6fb579e5f20dc4032265c5de215cec841d369ad188/crc32c-2.7.1.post0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:eea5fe4f477249f19201b2c1ac9f0df70987593b0dd0e0d15521480500d18455", size = 78461, upload-time = "2025-10-13T02:05:12.077Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7a/bf07239d7f55cf94ad6979de1f97105cdcfa1b73cf8818a5461f37043962/crc32c-2.7.1.post0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc97ce3c913eded8f4d19d5da7492ebb7ab7de1eb05749c8e5c48f4999e263e0", size = 79963, upload-time = "2025-10-13T02:05:13.343Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/09a11007d66767a1d339424560386c99323e904e5e7f0e75ff4a13156d3c/crc32c-2.7.1.post0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c13bdb21cc11fc2e9b7387fe726e65993f79407b3e4b8c107ee345e9c6cfe399", size = 79040, upload-time = "2025-10-13T02:05:14.216Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ca/4f8d8832524a70f39a20302e171782368fd66474e792b2aaf6bc9bb1ba9d/crc32c-2.7.1.post0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f9edc07f0617c212d700e31fc6437811b3036f84931e9b837a14169dd0e8d65", size = 78319, upload-time = "2025-10-13T02:05:15.303Z" }, + { url = "https://files.pythonhosted.org/packages/6d/41/63331e510e31928ae5af30fa3d40bca86b8b7c38164b5b59a57cdb8b5a2e/crc32c-2.7.1.post0-cp312-cp312-win32.whl", hash = "sha256:6d205730d184b5ba9a37ee855883b536e40dbf13817d15e4bab4997149c59d82", size = 63286, upload-time = "2025-10-13T02:05:16.181Z" }, + { url = "https://files.pythonhosted.org/packages/ed/3f/05cb1cd66b98f7165b8d181a164ef2c16b7ef0019a191e6ff8defa4df327/crc32c-2.7.1.post0-cp312-cp312-win_amd64.whl", hash = "sha256:f8c1584fe841883300cd3cb0e8341da5a4c954fc2dcf9e0eb15d3b697d90930e", size = 65034, upload-time = "2025-10-13T02:05:17.078Z" }, + { url = "https://files.pythonhosted.org/packages/54/7f/18a4262600e9f772d2b2d10adff4b002d64e5eaa6f0da5e6ded16086e8ad/crc32c-2.7.1.post0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:700d0637f620be903b596fd145d25664c0e821b9b24d462eaa3beeacb906478f", size = 60777, upload-time = "2025-10-13T02:06:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/1b/de/d9a6fdee4b1058b1922b1395814e010e85cb2c1a6ddb1388cbf7523a9f8f/crc32c-2.7.1.post0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:213aa16faf30c267579f9b76cfc572162fccd537095a5533e329318c2e5da589", size = 59663, upload-time = "2025-10-13T02:06:11.844Z" }, + { url = "https://files.pythonhosted.org/packages/4e/86/9e71dd8847ee075504a7ab69a101ab7dff7fd46cc22dbbef242ceeb187bf/crc32c-2.7.1.post0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e1b2b232edd75f3281ab059d2811e4ac674931a1889e0070a2fc73d93c0f204", size = 62539, upload-time = "2025-10-13T02:06:13.075Z" }, + { url = "https://files.pythonhosted.org/packages/67/c0/905905212c0aec771d81df4d88f87008dadeecd6ad628d1e17f9a5acd7dd/crc32c-2.7.1.post0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2e76c1e536f2408c5c5ce796e1a89ef252a438aa011c1f31048aa0783b75626", size = 63248, upload-time = "2025-10-13T02:06:14.764Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/6bdc8b946c6db999a0318e620a0f50e2099d9cba3d9c9de05932d12795a5/crc32c-2.7.1.post0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a1ea03ed177cb022d859ce86bac6044d5cd68dcf7e22f022e288a96f2bd6fa2f", size = 65049, upload-time = "2025-10-13T02:06:15.672Z" }, ] [[package]] @@ -1177,43 +1190,49 @@ sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c [[package]] name = "cryptography" -version = "45.0.7" +version = "46.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/35/c495bffc2056f2dadb32434f1feedd79abde2a7f8363e1974afa9c33c7e2/cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971", size = 744980, upload-time = "2025-09-01T11:15:03.146Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/9b/e301418629f7bfdf72db9e80ad6ed9d1b83c487c471803eaa6464c511a01/cryptography-46.0.2.tar.gz", hash = "sha256:21b6fc8c71a3f9a604f028a329e5560009cc4a3a828bfea5fcba8eb7647d88fe", size = 749293, upload-time = "2025-10-01T00:29:11.856Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/91/925c0ac74362172ae4516000fe877912e33b5983df735ff290c653de4913/cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee", size = 7041105, upload-time = "2025-09-01T11:13:59.684Z" }, - { url = "https://files.pythonhosted.org/packages/fc/63/43641c5acce3a6105cf8bd5baeceeb1846bb63067d26dae3e5db59f1513a/cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6", size = 4205799, upload-time = "2025-09-01T11:14:02.517Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/c238dd9107f10bfde09a4d1c52fd38828b1aa353ced11f358b5dd2507d24/cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339", size = 4430504, upload-time = "2025-09-01T11:14:04.522Z" }, - { url = "https://files.pythonhosted.org/packages/62/62/24203e7cbcc9bd7c94739428cd30680b18ae6b18377ae66075c8e4771b1b/cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8", size = 4209542, upload-time = "2025-09-01T11:14:06.309Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e3/e7de4771a08620eef2389b86cd87a2c50326827dea5528feb70595439ce4/cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf", size = 3889244, upload-time = "2025-09-01T11:14:08.152Z" }, - { url = "https://files.pythonhosted.org/packages/96/b8/bca71059e79a0bb2f8e4ec61d9c205fbe97876318566cde3b5092529faa9/cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513", size = 4461975, upload-time = "2025-09-01T11:14:09.755Z" }, - { url = "https://files.pythonhosted.org/packages/58/67/3f5b26937fe1218c40e95ef4ff8d23c8dc05aa950d54200cc7ea5fb58d28/cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3", size = 4209082, upload-time = "2025-09-01T11:14:11.229Z" }, - { url = "https://files.pythonhosted.org/packages/0e/e4/b3e68a4ac363406a56cf7b741eeb80d05284d8c60ee1a55cdc7587e2a553/cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3", size = 4460397, upload-time = "2025-09-01T11:14:12.924Z" }, - { url = "https://files.pythonhosted.org/packages/22/49/2c93f3cd4e3efc8cb22b02678c1fad691cff9dd71bb889e030d100acbfe0/cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6", size = 4337244, upload-time = "2025-09-01T11:14:14.431Z" }, - { url = "https://files.pythonhosted.org/packages/04/19/030f400de0bccccc09aa262706d90f2ec23d56bc4eb4f4e8268d0ddf3fb8/cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd", size = 4568862, upload-time = "2025-09-01T11:14:16.185Z" }, - { url = "https://files.pythonhosted.org/packages/29/56/3034a3a353efa65116fa20eb3c990a8c9f0d3db4085429040a7eef9ada5f/cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8", size = 2936578, upload-time = "2025-09-01T11:14:17.638Z" }, - { url = "https://files.pythonhosted.org/packages/b3/61/0ab90f421c6194705a99d0fa9f6ee2045d916e4455fdbb095a9c2c9a520f/cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443", size = 3405400, upload-time = "2025-09-01T11:14:18.958Z" }, - { url = "https://files.pythonhosted.org/packages/63/e8/c436233ddf19c5f15b25ace33979a9dd2e7aa1a59209a0ee8554179f1cc0/cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2", size = 7021824, upload-time = "2025-09-01T11:14:20.954Z" }, - { url = "https://files.pythonhosted.org/packages/bc/4c/8f57f2500d0ccd2675c5d0cc462095adf3faa8c52294ba085c036befb901/cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691", size = 4202233, upload-time = "2025-09-01T11:14:22.454Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ac/59b7790b4ccaed739fc44775ce4645c9b8ce54cbec53edf16c74fd80cb2b/cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59", size = 4423075, upload-time = "2025-09-01T11:14:24.287Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/d4f07ea21434bf891faa088a6ac15d6d98093a66e75e30ad08e88aa2b9ba/cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4", size = 4204517, upload-time = "2025-09-01T11:14:25.679Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ac/924a723299848b4c741c1059752c7cfe09473b6fd77d2920398fc26bfb53/cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3", size = 3882893, upload-time = "2025-09-01T11:14:27.1Z" }, - { url = "https://files.pythonhosted.org/packages/83/dc/4dab2ff0a871cc2d81d3ae6d780991c0192b259c35e4d83fe1de18b20c70/cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1", size = 4450132, upload-time = "2025-09-01T11:14:28.58Z" }, - { url = "https://files.pythonhosted.org/packages/12/dd/b2882b65db8fc944585d7fb00d67cf84a9cef4e77d9ba8f69082e911d0de/cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27", size = 4204086, upload-time = "2025-09-01T11:14:30.572Z" }, - { url = "https://files.pythonhosted.org/packages/5d/fa/1d5745d878048699b8eb87c984d4ccc5da4f5008dfd3ad7a94040caca23a/cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17", size = 4449383, upload-time = "2025-09-01T11:14:32.046Z" }, - { url = "https://files.pythonhosted.org/packages/36/8b/fc61f87931bc030598e1876c45b936867bb72777eac693e905ab89832670/cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b", size = 4332186, upload-time = "2025-09-01T11:14:33.95Z" }, - { url = "https://files.pythonhosted.org/packages/0b/11/09700ddad7443ccb11d674efdbe9a832b4455dc1f16566d9bd3834922ce5/cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c", size = 4561639, upload-time = "2025-09-01T11:14:35.343Z" }, - { url = "https://files.pythonhosted.org/packages/71/ed/8f4c1337e9d3b94d8e50ae0b08ad0304a5709d483bfcadfcc77a23dbcb52/cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5", size = 2926552, upload-time = "2025-09-01T11:14:36.929Z" }, - { url = "https://files.pythonhosted.org/packages/bc/ff/026513ecad58dacd45d1d24ebe52b852165a26e287177de1d545325c0c25/cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90", size = 3392742, upload-time = "2025-09-01T11:14:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/99/4e/49199a4c82946938a3e05d2e8ad9482484ba48bbc1e809e3d506c686d051/cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde", size = 3584634, upload-time = "2025-09-01T11:14:50.593Z" }, - { url = "https://files.pythonhosted.org/packages/16/ce/5f6ff59ea9c7779dba51b84871c19962529bdcc12e1a6ea172664916c550/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34", size = 4149533, upload-time = "2025-09-01T11:14:52.091Z" }, - { url = "https://files.pythonhosted.org/packages/ce/13/b3cfbd257ac96da4b88b46372e662009b7a16833bfc5da33bb97dd5631ae/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9", size = 4385557, upload-time = "2025-09-01T11:14:53.551Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c5/8c59d6b7c7b439ba4fc8d0cab868027fd095f215031bc123c3a070962912/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae", size = 4149023, upload-time = "2025-09-01T11:14:55.022Z" }, - { url = "https://files.pythonhosted.org/packages/55/32/05385c86d6ca9ab0b4d5bb442d2e3d85e727939a11f3e163fc776ce5eb40/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b", size = 4385722, upload-time = "2025-09-01T11:14:57.319Z" }, - { url = "https://files.pythonhosted.org/packages/23/87/7ce86f3fa14bc11a5a48c30d8103c26e09b6465f8d8e9d74cf7a0714f043/cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63", size = 3332908, upload-time = "2025-09-01T11:14:58.78Z" }, + { url = "https://files.pythonhosted.org/packages/e0/98/7a8df8c19a335c8028414738490fc3955c0cecbfdd37fcc1b9c3d04bd561/cryptography-46.0.2-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3e32ab7dd1b1ef67b9232c4cf5e2ee4cd517d4316ea910acaaa9c5712a1c663", size = 7261255, upload-time = "2025-10-01T00:27:22.947Z" }, + { url = "https://files.pythonhosted.org/packages/c6/38/b2adb2aa1baa6706adc3eb746691edd6f90a656a9a65c3509e274d15a2b8/cryptography-46.0.2-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1fd1a69086926b623ef8126b4c33d5399ce9e2f3fac07c9c734c2a4ec38b6d02", size = 4297596, upload-time = "2025-10-01T00:27:25.258Z" }, + { url = "https://files.pythonhosted.org/packages/e4/27/0f190ada240003119488ae66c897b5e97149292988f556aef4a6a2a57595/cryptography-46.0.2-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb7fb9cd44c2582aa5990cf61a4183e6f54eea3172e54963787ba47287edd135", size = 4450899, upload-time = "2025-10-01T00:27:27.458Z" }, + { url = "https://files.pythonhosted.org/packages/85/d5/e4744105ab02fdf6bb58ba9a816e23b7a633255987310b4187d6745533db/cryptography-46.0.2-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9066cfd7f146f291869a9898b01df1c9b0e314bfa182cef432043f13fc462c92", size = 4300382, upload-time = "2025-10-01T00:27:29.091Z" }, + { url = "https://files.pythonhosted.org/packages/33/fb/bf9571065c18c04818cb07de90c43fc042c7977c68e5de6876049559c72f/cryptography-46.0.2-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:97e83bf4f2f2c084d8dd792d13841d0a9b241643151686010866bbd076b19659", size = 4017347, upload-time = "2025-10-01T00:27:30.767Z" }, + { url = "https://files.pythonhosted.org/packages/35/72/fc51856b9b16155ca071080e1a3ad0c3a8e86616daf7eb018d9565b99baa/cryptography-46.0.2-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:4a766d2a5d8127364fd936572c6e6757682fc5dfcbdba1632d4554943199f2fa", size = 4983500, upload-time = "2025-10-01T00:27:32.741Z" }, + { url = "https://files.pythonhosted.org/packages/c1/53/0f51e926799025e31746d454ab2e36f8c3f0d41592bc65cb9840368d3275/cryptography-46.0.2-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fab8f805e9675e61ed8538f192aad70500fa6afb33a8803932999b1049363a08", size = 4482591, upload-time = "2025-10-01T00:27:34.869Z" }, + { url = "https://files.pythonhosted.org/packages/86/96/4302af40b23ab8aa360862251fb8fc450b2a06ff24bc5e261c2007f27014/cryptography-46.0.2-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:1e3b6428a3d56043bff0bb85b41c535734204e599c1c0977e1d0f261b02f3ad5", size = 4300019, upload-time = "2025-10-01T00:27:37.029Z" }, + { url = "https://files.pythonhosted.org/packages/9b/59/0be12c7fcc4c5e34fe2b665a75bc20958473047a30d095a7657c218fa9e8/cryptography-46.0.2-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:1a88634851d9b8de8bb53726f4300ab191d3b2f42595e2581a54b26aba71b7cc", size = 4950006, upload-time = "2025-10-01T00:27:40.272Z" }, + { url = "https://files.pythonhosted.org/packages/55/1d/42fda47b0111834b49e31590ae14fd020594d5e4dadd639bce89ad790fba/cryptography-46.0.2-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:be939b99d4e091eec9a2bcf41aaf8f351f312cd19ff74b5c83480f08a8a43e0b", size = 4482088, upload-time = "2025-10-01T00:27:42.668Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/60f583f69aa1602c2bdc7022dae86a0d2b837276182f8c1ec825feb9b874/cryptography-46.0.2-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f13b040649bc18e7eb37936009b24fd31ca095a5c647be8bb6aaf1761142bd1", size = 4425599, upload-time = "2025-10-01T00:27:44.616Z" }, + { url = "https://files.pythonhosted.org/packages/d1/57/d8d4134cd27e6e94cf44adb3f3489f935bde85f3a5508e1b5b43095b917d/cryptography-46.0.2-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bdc25e4e01b261a8fda4e98618f1c9515febcecebc9566ddf4a70c63967043b", size = 4697458, upload-time = "2025-10-01T00:27:46.209Z" }, + { url = "https://files.pythonhosted.org/packages/d1/2b/531e37408573e1da33adfb4c58875013ee8ac7d548d1548967d94a0ae5c4/cryptography-46.0.2-cp311-abi3-win32.whl", hash = "sha256:8b9bf67b11ef9e28f4d78ff88b04ed0929fcd0e4f70bb0f704cfc32a5c6311ee", size = 3056077, upload-time = "2025-10-01T00:27:48.424Z" }, + { url = "https://files.pythonhosted.org/packages/a8/cd/2f83cafd47ed2dc5a3a9c783ff5d764e9e70d3a160e0df9a9dcd639414ce/cryptography-46.0.2-cp311-abi3-win_amd64.whl", hash = "sha256:758cfc7f4c38c5c5274b55a57ef1910107436f4ae842478c4989abbd24bd5acb", size = 3512585, upload-time = "2025-10-01T00:27:50.521Z" }, + { url = "https://files.pythonhosted.org/packages/00/36/676f94e10bfaa5c5b86c469ff46d3e0663c5dc89542f7afbadac241a3ee4/cryptography-46.0.2-cp311-abi3-win_arm64.whl", hash = "sha256:218abd64a2e72f8472c2102febb596793347a3e65fafbb4ad50519969da44470", size = 2927474, upload-time = "2025-10-01T00:27:52.91Z" }, + { url = "https://files.pythonhosted.org/packages/d5/bb/fa95abcf147a1b0bb94d95f53fbb09da77b24c776c5d87d36f3d94521d2c/cryptography-46.0.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a08e7401a94c002e79dc3bc5231b6558cd4b2280ee525c4673f650a37e2c7685", size = 7248090, upload-time = "2025-10-01T00:28:22.846Z" }, + { url = "https://files.pythonhosted.org/packages/b7/66/f42071ce0e3ffbfa80a88feadb209c779fda92a23fbc1e14f74ebf72ef6b/cryptography-46.0.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d30bc11d35743bf4ddf76674a0a369ec8a21f87aaa09b0661b04c5f6c46e8d7b", size = 4293123, upload-time = "2025-10-01T00:28:25.072Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/1fdbd2e5c1ba822828d250e5a966622ef00185e476d1cd2726b6dd135e53/cryptography-46.0.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bca3f0ce67e5a2a2cf524e86f44697c4323a86e0fd7ba857de1c30d52c11ede1", size = 4439524, upload-time = "2025-10-01T00:28:26.808Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c1/5e4989a7d102d4306053770d60f978c7b6b1ea2ff8c06e0265e305b23516/cryptography-46.0.2-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff798ad7a957a5021dcbab78dfff681f0cf15744d0e6af62bd6746984d9c9e9c", size = 4297264, upload-time = "2025-10-01T00:28:29.327Z" }, + { url = "https://files.pythonhosted.org/packages/28/78/b56f847d220cb1d6d6aef5a390e116ad603ce13a0945a3386a33abc80385/cryptography-46.0.2-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cb5e8daac840e8879407acbe689a174f5ebaf344a062f8918e526824eb5d97af", size = 4011872, upload-time = "2025-10-01T00:28:31.479Z" }, + { url = "https://files.pythonhosted.org/packages/e1/80/2971f214b066b888944f7b57761bf709ee3f2cf805619a18b18cab9b263c/cryptography-46.0.2-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:3f37aa12b2d91e157827d90ce78f6180f0c02319468a0aea86ab5a9566da644b", size = 4978458, upload-time = "2025-10-01T00:28:33.267Z" }, + { url = "https://files.pythonhosted.org/packages/a5/84/0cb0a2beaa4f1cbe63ebec4e97cd7e0e9f835d0ba5ee143ed2523a1e0016/cryptography-46.0.2-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e38f203160a48b93010b07493c15f2babb4e0f2319bbd001885adb3f3696d21", size = 4472195, upload-time = "2025-10-01T00:28:36.039Z" }, + { url = "https://files.pythonhosted.org/packages/30/8b/2b542ddbf78835c7cd67b6fa79e95560023481213a060b92352a61a10efe/cryptography-46.0.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d19f5f48883752b5ab34cff9e2f7e4a7f216296f33714e77d1beb03d108632b6", size = 4296791, upload-time = "2025-10-01T00:28:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/78/12/9065b40201b4f4876e93b9b94d91feb18de9150d60bd842a16a21565007f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:04911b149eae142ccd8c9a68892a70c21613864afb47aba92d8c7ed9cc001023", size = 4939629, upload-time = "2025-10-01T00:28:39.654Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9e/6507dc048c1b1530d372c483dfd34e7709fc542765015425f0442b08547f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8b16c1ede6a937c291d41176934268e4ccac2c6521c69d3f5961c5a1e11e039e", size = 4471988, upload-time = "2025-10-01T00:28:41.822Z" }, + { url = "https://files.pythonhosted.org/packages/b1/86/d025584a5f7d5c5ec8d3633dbcdce83a0cd579f1141ceada7817a4c26934/cryptography-46.0.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:747b6f4a4a23d5a215aadd1d0b12233b4119c4313df83ab4137631d43672cc90", size = 4422989, upload-time = "2025-10-01T00:28:43.608Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/536370418b38a15a61bbe413006b79dfc3d2b4b0eafceb5581983f973c15/cryptography-46.0.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6b275e398ab3a7905e168c036aad54b5969d63d3d9099a0a66cc147a3cc983be", size = 4685578, upload-time = "2025-10-01T00:28:45.361Z" }, + { url = "https://files.pythonhosted.org/packages/15/52/ea7e2b1910f547baed566c866fbb86de2402e501a89ecb4871ea7f169a81/cryptography-46.0.2-cp38-abi3-win32.whl", hash = "sha256:0b507c8e033307e37af61cb9f7159b416173bdf5b41d11c4df2e499a1d8e007c", size = 3036711, upload-time = "2025-10-01T00:28:47.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/9e/171f40f9c70a873e73c2efcdbe91e1d4b1777a03398fa1c4af3c56a2477a/cryptography-46.0.2-cp38-abi3-win_amd64.whl", hash = "sha256:f9b2dc7668418fb6f221e4bf701f716e05e8eadb4f1988a2487b11aedf8abe62", size = 3500007, upload-time = "2025-10-01T00:28:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/1aabe338149a7d0f52c3e30f2880b20027ca2a485316756ed6f000462db3/cryptography-46.0.2-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1d3b3edd145953832e09607986f2bd86f85d1dc9c48ced41808b18009d9f30e5", size = 3714495, upload-time = "2025-10-01T00:28:57.222Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0a/0d10eb970fe3e57da9e9ddcfd9464c76f42baf7b3d0db4a782d6746f788f/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fe245cf4a73c20592f0f48da39748b3513db114465be78f0a36da847221bd1b4", size = 4243379, upload-time = "2025-10-01T00:28:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/7d/60/e274b4d41a9eb82538b39950a74ef06e9e4d723cb998044635d9deb1b435/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2b9cad9cf71d0c45566624ff76654e9bae5f8a25970c250a26ccfc73f8553e2d", size = 4409533, upload-time = "2025-10-01T00:29:00.785Z" }, + { url = "https://files.pythonhosted.org/packages/19/9a/fb8548f762b4749aebd13b57b8f865de80258083fe814957f9b0619cfc56/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9bd26f2f75a925fdf5e0a446c0de2714f17819bf560b44b7480e4dd632ad6c46", size = 4243120, upload-time = "2025-10-01T00:29:02.515Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/883f24147fd4a0c5cab74ac7e36a1ff3094a54ba5c3a6253d2ff4b19255b/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:7282d8f092b5be7172d6472f29b0631f39f18512a3642aefe52c3c0e0ccfad5a", size = 4408940, upload-time = "2025-10-01T00:29:04.42Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b5/c5e179772ec38adb1c072b3aa13937d2860509ba32b2462bf1dda153833b/cryptography-46.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c4b93af7920cdf80f71650769464ccf1fb49a4b56ae0024173c24c48eb6b1612", size = 3438518, upload-time = "2025-10-01T00:29:06.139Z" }, ] [[package]] @@ -1273,11 +1292,10 @@ wheels = [ [[package]] name = "dify-api" -version = "1.9.0" +version = "1.9.2" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, - { name = "authlib" }, { name = "azure-identity" }, { name = "beautifulsoup4" }, { name = "boto3" }, @@ -1308,10 +1326,8 @@ dependencies = [ { name = "json-repair" }, { name = "langfuse" }, { name = "langsmith" }, - { name = "mailchimp-transactional" }, { name = "markdown" }, { name = "numpy" }, - { name = "openai" }, { name = "openpyxl" }, { name = "opentelemetry-api" }, { name = "opentelemetry-distro" }, @@ -1322,8 +1338,8 @@ dependencies = [ { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-celery" }, { name = "opentelemetry-instrumentation-flask" }, + { name = "opentelemetry-instrumentation-httpx" }, { name = "opentelemetry-instrumentation-redis" }, - { name = "opentelemetry-instrumentation-requests" }, { name = "opentelemetry-instrumentation-sqlalchemy" }, { name = "opentelemetry-propagator-b3" }, { name = "opentelemetry-proto" }, @@ -1333,7 +1349,6 @@ dependencies = [ { name = "opik" }, { name = "packaging" }, { name = "pandas", extra = ["excel", "output-formatting", "performance"] }, - { name = "pandoc" }, { name = "psycogreen" }, { name = "psycopg2-binary" }, { name = "pycryptodome" }, @@ -1357,6 +1372,7 @@ dependencies = [ { name = "transformers" }, { name = "unstructured", extra = ["docx", "epub", "md", "ppt", "pptx"] }, { name = "weave" }, + { name = "weaviate-client" }, { name = "webvtt-py" }, { name = "yarl" }, ] @@ -1379,6 +1395,7 @@ dev = [ { name = "pytest-cov" }, { name = "pytest-env" }, { name = "pytest-mock" }, + { name = "pytest-timeout" }, { name = "ruff" }, { name = "scipy-stubs" }, { name = "sseclient-py" }, @@ -1417,8 +1434,6 @@ dev = [ { name = "types-pyyaml" }, { name = "types-redis" }, { name = "types-regex" }, - { name = "types-requests" }, - { name = "types-requests-oauthlib" }, { name = "types-setuptools" }, { name = "types-shapely" }, { name = "types-simplejson" }, @@ -1451,6 +1466,7 @@ vdb = [ { name = "couchbase" }, { name = "elasticsearch" }, { name = "mo-vector" }, + { name = "mysql-connector-python" }, { name = "opensearch-py" }, { name = "oracledb" }, { name = "pgvecto-rs", extra = ["sqlalchemy"] }, @@ -1471,7 +1487,6 @@ vdb = [ [package.metadata] requires-dist = [ { name = "arize-phoenix-otel", specifier = "~=0.9.2" }, - { name = "authlib", specifier = "==1.6.4" }, { name = "azure-identity", specifier = "==1.16.1" }, { name = "beautifulsoup4", specifier = "==4.12.2" }, { name = "boto3", specifier = "==1.35.99" }, @@ -1480,7 +1495,7 @@ requires-dist = [ { name = "celery", specifier = "~=5.5.2" }, { name = "chardet", specifier = "~=5.1.0" }, { name = "flask", specifier = "~=3.1.2" }, - { name = "flask-compress", specifier = "~=1.17" }, + { name = "flask-compress", specifier = ">=1.17,<1.18" }, { name = "flask-cors", specifier = "~=6.0.0" }, { name = "flask-login", specifier = "~=0.6.3" }, { name = "flask-migrate", specifier = "~=4.0.7" }, @@ -1502,10 +1517,8 @@ requires-dist = [ { name = "json-repair", specifier = ">=0.41.1" }, { name = "langfuse", specifier = "~=2.51.3" }, { name = "langsmith", specifier = "~=0.1.77" }, - { name = "mailchimp-transactional", specifier = "~=1.0.50" }, { name = "markdown", specifier = "~=3.5.1" }, { name = "numpy", specifier = "~=1.26.4" }, - { name = "openai", specifier = "~=1.61.0" }, { name = "openpyxl", specifier = "~=3.1.5" }, { name = "opentelemetry-api", specifier = "==1.27.0" }, { name = "opentelemetry-distro", specifier = "==0.48b0" }, @@ -1516,24 +1529,23 @@ requires-dist = [ { name = "opentelemetry-instrumentation", specifier = "==0.48b0" }, { name = "opentelemetry-instrumentation-celery", specifier = "==0.48b0" }, { name = "opentelemetry-instrumentation-flask", specifier = "==0.48b0" }, + { name = "opentelemetry-instrumentation-httpx", specifier = "==0.48b0" }, { name = "opentelemetry-instrumentation-redis", specifier = "==0.48b0" }, - { name = "opentelemetry-instrumentation-requests", specifier = "==0.48b0" }, { name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.48b0" }, { name = "opentelemetry-propagator-b3", specifier = "==1.27.0" }, { name = "opentelemetry-proto", specifier = "==1.27.0" }, { name = "opentelemetry-sdk", specifier = "==1.27.0" }, { name = "opentelemetry-semantic-conventions", specifier = "==0.48b0" }, { name = "opentelemetry-util-http", specifier = "==0.48b0" }, - { name = "opik", specifier = "~=1.7.25" }, + { name = "opik", specifier = "~=1.8.72" }, { name = "packaging", specifier = "~=23.2" }, { name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" }, - { name = "pandoc", specifier = "~=2.4" }, { name = "psycogreen", specifier = "~=1.0.2" }, { name = "psycopg2-binary", specifier = "~=2.9.6" }, { name = "pycryptodome", specifier = "==3.19.1" }, { name = "pydantic", specifier = "~=2.11.4" }, { name = "pydantic-extra-types", specifier = "~=2.10.3" }, - { name = "pydantic-settings", specifier = "~=2.9.1" }, + { name = "pydantic-settings", specifier = "~=2.11.0" }, { name = "pyjwt", specifier = "~=2.10.1" }, { name = "pypdfium2", specifier = "==4.30.0" }, { name = "python-docx", specifier = "~=1.1.0" }, @@ -1551,6 +1563,7 @@ requires-dist = [ { name = "transformers", specifier = "~=4.56.1" }, { name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.16.1" }, { name = "weave", specifier = "~=0.51.0" }, + { name = "weaviate-client", specifier = "==4.17.0" }, { name = "webvtt-py", specifier = "~=0.5.1" }, { name = "yarl", specifier = "~=1.18.3" }, ] @@ -1573,10 +1586,11 @@ dev = [ { name = "pytest-cov", specifier = "~=4.1.0" }, { name = "pytest-env", specifier = "~=1.1.3" }, { name = "pytest-mock", specifier = "~=3.14.0" }, - { name = "ruff", specifier = "~=0.12.3" }, + { name = "pytest-timeout", specifier = ">=2.4.0" }, + { name = "ruff", specifier = "~=0.14.0" }, { name = "scipy-stubs", specifier = ">=1.15.3.0" }, { name = "sseclient-py", specifier = ">=1.8.0" }, - { name = "testcontainers", specifier = "~=4.10.0" }, + { name = "testcontainers", specifier = "~=4.13.2" }, { name = "ty", specifier = "~=0.0.1a19" }, { name = "types-aiofiles", specifier = "~=24.1.0" }, { name = "types-beautifulsoup4", specifier = "~=4.12.0" }, @@ -1611,8 +1625,6 @@ dev = [ { name = "types-pyyaml", specifier = "~=6.0.12" }, { name = "types-redis", specifier = ">=4.6.0.20241004" }, { name = "types-regex", specifier = "~=2024.11.6" }, - { name = "types-requests", specifier = "~=2.32.0" }, - { name = "types-requests-oauthlib", specifier = "~=2.0.0" }, { name = "types-setuptools", specifier = ">=80.9.0" }, { name = "types-shapely", specifier = "~=2.0.0" }, { name = "types-simplejson", specifier = ">=3.20.0" }, @@ -1622,10 +1634,10 @@ dev = [ { name = "types-ujson", specifier = ">=5.10.0" }, ] storage = [ - { name = "azure-storage-blob", specifier = "==12.13.0" }, + { name = "azure-storage-blob", specifier = "==12.26.0" }, { name = "bce-python-sdk", specifier = "~=0.9.23" }, - { name = "cos-python-sdk-v5", specifier = "==1.9.30" }, - { name = "esdk-obs-python", specifier = "==3.24.6.1" }, + { name = "cos-python-sdk-v5", specifier = "==1.9.38" }, + { name = "esdk-obs-python", specifier = "==3.25.8" }, { name = "google-cloud-storage", specifier = "==2.16.0" }, { name = "opendal", specifier = "~=0.46.0" }, { name = "oss2", specifier = "==2.18.5" }, @@ -1645,8 +1657,9 @@ vdb = [ { name = "couchbase", specifier = "~=4.3.0" }, { name = "elasticsearch", specifier = "==8.14.0" }, { name = "mo-vector", specifier = "~=0.1.13" }, + { name = "mysql-connector-python", specifier = ">=9.3.0" }, { name = "opensearch-py", specifier = "==2.4.0" }, - { name = "oracledb", specifier = "==3.0.0" }, + { name = "oracledb", specifier = "==3.3.0" }, { name = "pgvecto-rs", extras = ["sqlalchemy"], specifier = "~=0.2.1" }, { name = "pgvector", specifier = "==0.2.5" }, { name = "pymilvus", specifier = "~=2.5.0" }, @@ -1658,7 +1671,7 @@ vdb = [ { name = "tidb-vector", specifier = "==0.0.9" }, { name = "upstash-vector", specifier = "==0.6.0" }, { name = "volcengine-compat", specifier = "~=1.0.0" }, - { name = "weaviate-client", specifier = "~=3.24.0" }, + { name = "weaviate-client", specifier = "==4.17.0" }, { name = "xinference-client", specifier = "~=1.2.2" }, ] @@ -1728,18 +1741,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] -[[package]] -name = "ecdsa" -version = "0.19.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, -] - [[package]] name = "elastic-transport" version = "8.17.1" @@ -1767,21 +1768,23 @@ wheels = [ [[package]] name = "emoji" -version = "2.14.1" +version = "2.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/7d/01cddcbb6f5cc0ba72e00ddf9b1fa206c802d557fd0a20b18e130edf1336/emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b", size = 597182, upload-time = "2025-01-16T06:31:24.983Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/78/0d2db9382c92a163d7095fc08efff7800880f830a152cfced40161e7638d/emoji-2.15.0.tar.gz", hash = "sha256:eae4ab7d86456a70a00a985125a03263a5eac54cd55e51d7e184b1ed3b6757e4", size = 615483, upload-time = "2025-09-21T12:13:02.755Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/db/a0335710caaa6d0aebdaa65ad4df789c15d89b7babd9a30277838a7d9aac/emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b", size = 590617, upload-time = "2025-01-16T06:31:23.526Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/4b5aaaabddfacfe36ba7768817bd1f71a7a810a43705e531f3ae4c690767/emoji-2.15.0-py3-none-any.whl", hash = "sha256:205296793d66a89d88af4688fa57fd6496732eb48917a87175a023c8138995eb", size = 608433, upload-time = "2025-09-21T12:13:01.197Z" }, ] [[package]] name = "esdk-obs-python" -version = "3.24.6.1" +version = "3.25.8" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "crcmod" }, { name = "pycryptodome" }, + { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/af/d83276f9e288bd6a62f44d67ae1eafd401028ba1b2b643ae4014b51da5bd/esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0", size = 85798, upload-time = "2024-07-26T13:13:22.467Z" } +sdist = { url = "https://files.pythonhosted.org/packages/40/99/52362d6e081a642d6de78f6ab53baa5e3f82f2386c48954e18ee7b4ab22b/esdk-obs-python-3.25.8.tar.gz", hash = "sha256:aeded00b27ecd5a25ffaec38a2cc9416b51923d48db96c663f1a735f859b5273", size = 96302, upload-time = "2025-09-01T11:35:20.432Z" } [[package]] name = "et-xmlfile" @@ -1816,16 +1819,46 @@ wheels = [ [[package]] name = "fastapi" -version = "0.116.1" +version = "0.119.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/f9/5c5bcce82a7997cc0eb8c47b7800f862f6b56adc40486ed246e5010d443b/fastapi-0.119.0.tar.gz", hash = "sha256:451082403a2c1f0b99c6bd57c09110ed5463856804c8078d38e5a1f1035dbbb7", size = 336756, upload-time = "2025-10-11T17:13:40.53Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, + { url = "https://files.pythonhosted.org/packages/ce/70/584c4d7cad80f5e833715c0a29962d7c93b4d18eed522a02981a6d1b6ee5/fastapi-0.119.0-py3-none-any.whl", hash = "sha256:90a2e49ed19515320abb864df570dd766be0662c5d577688f1600170f7f73cf2", size = 107095, upload-time = "2025-10-11T17:13:39.048Z" }, +] + +[[package]] +name = "fastuuid" +version = "0.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/80/3c16a1edad2e6cd82fbd15ac998cc1b881f478bf1f80ca717d941c441874/fastuuid-0.13.5.tar.gz", hash = "sha256:d4976821ab424d41542e1ea39bc828a9d454c3f8a04067c06fca123c5b95a1a1", size = 18255, upload-time = "2025-09-26T09:05:38.281Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/ab/9351bfc04ff2144115758233130b5469993d3d379323903a4634cb9c78c1/fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c122558ca4b5487e2bd0863467e4ccfe636afd1274803741487d48f2e32ea0e1", size = 493910, upload-time = "2025-09-26T09:12:36.995Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ab/84fac529cc12a03d49595e70ac459380f7cb12c70f0fe401781b276f9e94/fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d7abd42a03a17a681abddd19aa4d44ca2747138cf8a48373b395cf1341a10de2", size = 252621, upload-time = "2025-09-26T09:12:22.222Z" }, + { url = "https://files.pythonhosted.org/packages/7f/9d/f4c734d7b74a04ca695781c58a1376f07b206fe2849e58e7778d476a0e94/fastuuid-0.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2705cf7c2d6f7c03053404b75a4c44f872a73f6f9d5ea34f1dc6bba400c4a97c", size = 244269, upload-time = "2025-09-26T09:08:31.921Z" }, + { url = "https://files.pythonhosted.org/packages/5b/da/b42b7eb84523d69cfe9dac82950e105061c8d59f4d4d2cc3e170dbd20937/fastuuid-0.13.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d220a056fcbad25932c1f25304261198612f271f4d150b2a84e81adb877daf7", size = 271528, upload-time = "2025-09-26T09:12:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/45/6eee36929119e9544b0906fd6591e685d682e4b51cfad4c25d96ccf04009/fastuuid-0.13.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f29f93b5a0c5f5579f97f77d5319e9bfefd61d8678ec59d850201544faf33bf", size = 272168, upload-time = "2025-09-26T09:07:04.238Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ac/75b70f13515e12194a25b0459dd8a8a33de4ab0a92142f0776d21e41ca84/fastuuid-0.13.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:399d86623fb806151b1feb9fdd818ebfc1d50387199a35f7264f98dfc1540af5", size = 290948, upload-time = "2025-09-26T09:07:53.433Z" }, + { url = "https://files.pythonhosted.org/packages/76/30/1801326a5b433aafc04eae906e6b005e8a3d1120fd996409fe88124edb06/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:689e8795a1edd573b2c9a455024e4edf605a9690339bba29709857f7180894ea", size = 452932, upload-time = "2025-09-26T09:09:28.017Z" }, + { url = "https://files.pythonhosted.org/packages/61/2a/080b6b2ac4ef2ead54a7463ae4162d66a52867bbd4447ad5354427b82ae2/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:25e82c4a1734da168b36f7308e397afbe9c9b353799a9c69563a605f11dd4641", size = 468384, upload-time = "2025-09-26T09:08:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d3/4a3ffcaf8d874f7f208dad7e98ded7c5359b6599073960e3aa0530ca6139/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f62299e3cca69aad6a6fb37e26e45055587954d498ad98903fea24382377ea0e", size = 444815, upload-time = "2025-09-26T09:06:38.691Z" }, + { url = "https://files.pythonhosted.org/packages/9d/a0/08dd8663f7bff3e9c0b2416708b01d1fb65f52bcd4bce18760f77c4735fd/fastuuid-0.13.5-cp311-cp311-win32.whl", hash = "sha256:68227f2230381b89fb1ad362ca6e433de85c6c11c36312b41757cad47b8a8e32", size = 144897, upload-time = "2025-09-26T09:14:53.695Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e2/2c2a37dcc56e2323c6214c38c8faac22f9d03d98c481f8a40843e0b9526a/fastuuid-0.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:4a32306982bd031cb20d5d1a726b7b958a55babebd2300ce6c8e352d3496e931", size = 150523, upload-time = "2025-09-26T09:12:24.031Z" }, + { url = "https://files.pythonhosted.org/packages/21/36/434f137c5970cac19e57834e1f7680e85301619d49891618c00666700c61/fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35fe8045e866bc6846f8de6fa05acb1de0c32478048484a995e96d31e21dff2a", size = 494638, upload-time = "2025-09-26T09:14:58.695Z" }, + { url = "https://files.pythonhosted.org/packages/ca/3c/083de2ac007b2b305523b9c006dba5051e5afd87a626ef1a39f76e2c6b82/fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:02a460333f52d731a006d18a52ef6fcb2d295a1f5b1a5938d30744191b2f77b7", size = 253138, upload-time = "2025-09-26T09:13:33.283Z" }, + { url = "https://files.pythonhosted.org/packages/73/5e/630cffa1c8775db526e39e9e4c5c7db0c27be0786bb21ba82c912ae19f63/fastuuid-0.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:74b0e4f8c307b9f477a5d7284db4431ce53a3c1e3f4173db7a97db18564a6202", size = 244521, upload-time = "2025-09-26T09:14:40.682Z" }, + { url = "https://files.pythonhosted.org/packages/4d/51/55d78705f4fbdadf88fb40f382f508d6c7a4941ceddd7825fafebb4cc778/fastuuid-0.13.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6955a99ef455c2986f3851f4e0ccc35dec56ac1a7720f2b92e88a75d6684512e", size = 271557, upload-time = "2025-09-26T09:15:09.75Z" }, + { url = "https://files.pythonhosted.org/packages/6a/2b/1b89e90a8635e5587ccdbbeb169c590672ce7637880f2c047482a0359950/fastuuid-0.13.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10c77b826738c1a27dcdaa92ea4dc1ec9d869748a99e1fde54f1379553d4854", size = 272334, upload-time = "2025-09-26T09:07:48.865Z" }, + { url = "https://files.pythonhosted.org/packages/0c/06/4c8207894eeb30414999e5c3f66ac039bc4003437eb4060d8a1bceb4cc6f/fastuuid-0.13.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb25dccbeb249d16d5e664f65f17ebec05136821d5ef462c4110e3f76b86fb86", size = 290594, upload-time = "2025-09-26T09:12:54.124Z" }, + { url = "https://files.pythonhosted.org/packages/50/69/96d221931a31d77a47cc2487bdfacfb3091edfc2e7a04b1795df1aec05df/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5becc646a3eeafb76ce0a6783ba190cd182e3790a8b2c78ca9db2b5e87af952", size = 452835, upload-time = "2025-09-26T09:14:00.994Z" }, + { url = "https://files.pythonhosted.org/packages/25/ef/bf045f0a47dcec96247497ef3f7a31d86ebc074330e2dccc34b8dbc0468a/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:69b34363752d06e9bb0dbdf02ae391ec56ac948c6f2eb00be90dad68e80774b9", size = 468225, upload-time = "2025-09-26T09:13:38.585Z" }, + { url = "https://files.pythonhosted.org/packages/30/46/4817ab5a3778927155a4bde92540d4c4fa996161ec8b8e080c8928b0984e/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57d0768afcad0eab8770c9b8cf904716bd3c547e8b9a4e755ee8a673b060a3a3", size = 444907, upload-time = "2025-09-26T09:14:30.163Z" }, + { url = "https://files.pythonhosted.org/packages/80/27/ab284117ce4dc9b356a7196bdbf220510285f201d27f1f078592cdc8187b/fastuuid-0.13.5-cp312-cp312-win32.whl", hash = "sha256:8ac6c6f5129d52eaa6ef9ea4b6e2f7c69468a053f3ab8e439661186b9c06bb85", size = 145415, upload-time = "2025-09-26T09:08:59.494Z" }, + { url = "https://files.pythonhosted.org/packages/f4/0c/f970a4222773b248931819f8940800b760283216ca3dda173ed027e94bdd/fastuuid-0.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:ad630e97715beefef07ec37c9c162336e500400774e2c1cbe1a0df6f80d15b9a", size = 150840, upload-time = "2025-09-26T09:13:46.115Z" }, ] [[package]] @@ -1842,11 +1875,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.19.1" +version = "3.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, ] [[package]] @@ -1877,17 +1910,18 @@ wheels = [ [[package]] name = "flask-compress" -version = "1.18" +version = "1.17" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "brotli", marker = "platform_python_implementation != 'PyPy'" }, { name = "brotlicffi", marker = "platform_python_implementation == 'PyPy'" }, { name = "flask" }, - { name = "pyzstd" }, + { name = "zstandard" }, + { name = "zstandard", marker = "platform_python_implementation == 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/77/7d3c1b071e29c09bd796a84f95442f3c75f24a1f2a9f2c86c857579ab4ec/flask_compress-1.18.tar.gz", hash = "sha256:fdbae1bd8e334dfdc8b19549829163987c796fafea7fa1c63f9a4add23c8413a", size = 16571, upload-time = "2025-07-11T14:08:13.496Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/1f/260db5a4517d59bfde7b4a0d71052df68fb84983bda9231100e3b80f5989/flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8", size = 15733, upload-time = "2024-10-14T08:13:33.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/d8/953232867e42b5b91899e9c6c4a2b89218a5fbbdbbb4493f48729770de81/flask_compress-1.18-py3-none-any.whl", hash = "sha256:9c3b7defbd0f29a06e51617b910eab07bd4db314507e4edc4c6b02a2e139fda9", size = 9340, upload-time = "2025-07-11T14:08:12.275Z" }, + { url = "https://files.pythonhosted.org/packages/f7/54/ff08f947d07c0a8a5d8f1c8e57b142c97748ca912b259db6467ab35983cd/Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20", size = 8723, upload-time = "2024-10-14T08:13:31.726Z" }, ] [[package]] @@ -1945,19 +1979,19 @@ wheels = [ [[package]] name = "flask-restx" -version = "1.3.0" +version = "1.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aniso8601" }, { name = "flask" }, { name = "importlib-resources" }, { name = "jsonschema" }, - { name = "pytz" }, + { name = "referencing" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/4c/2e7d84e2b406b47cf3bf730f521efe474977b404ee170d8ea68dc37e6733/flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728", size = 2814072, upload-time = "2023-12-10T14:48:55.575Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/89/9b9ca58cbb8e9ec46f4a510ba93878e0c88d518bf03c350e3b1b7ad85cbe/flask-restx-1.3.2.tar.gz", hash = "sha256:0ae13d77e7d7e4dce513970cfa9db45364aef210e99022de26d2b73eb4dbced5", size = 2814719, upload-time = "2025-09-23T20:34:25.21Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/bf/1907369f2a7ee614dde5152ff8f811159d357e77962aa3f8c2e937f63731/flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691", size = 2798683, upload-time = "2023-12-10T14:48:53.293Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3f/b82cd8e733a355db1abb8297afbf59ec972c00ef90bf8d4eed287958b204/flask_restx-1.3.2-py2.py3-none-any.whl", hash = "sha256:6e035496e8223668044fc45bf769e526352fd648d9e159bd631d94fd645a687b", size = 2799859, upload-time = "2025-09-23T20:34:23.055Z" }, ] [[package]] @@ -1975,54 +2009,52 @@ wheels = [ [[package]] name = "flatbuffers" -version = "25.2.10" +version = "25.9.23" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170, upload-time = "2025-02-11T04:26:46.257Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/1f/3ee70b0a55137442038f2a33469cc5fddd7e0ad2abf83d7497c18a2b6923/flatbuffers-25.9.23.tar.gz", hash = "sha256:676f9fa62750bb50cf531b42a0a2a118ad8f7f797a511eda12881c016f093b12", size = 22067, upload-time = "2025-09-24T05:25:30.106Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953, upload-time = "2025-02-11T04:26:44.484Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1b/00a78aa2e8fbd63f9af08c9c19e6deb3d5d66b4dda677a0f61654680ee89/flatbuffers-25.9.23-py2.py3-none-any.whl", hash = "sha256:255538574d6cb6d0a79a17ec8bc0d30985913b87513a01cce8bcdb6b4c44d0e2", size = 30869, upload-time = "2025-09-24T05:25:28.912Z" }, ] [[package]] name = "frozenlist" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, - { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, - { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, - { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, - { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, - { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, - { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, - { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, - { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, - { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, - { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, - { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, - { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, - { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, - { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, - { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, - { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, - { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, - { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, - { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, - { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, - { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, - { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, - { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, - { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, - { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, - { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] [[package]] @@ -2400,51 +2432,51 @@ wheels = [ [[package]] name = "grimp" -version = "3.11" +version = "3.12" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/5e/1be34b2aed713fca8b9274805fc295d54f9806fccbfb15451fdb60066b23/grimp-3.11.tar.gz", hash = "sha256:920d069a6c591b830d661e0f7e78743d276e05df1072dc139fc2ee314a5e723d", size = 844989, upload-time = "2025-09-01T07:25:34.148Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/a4/463903a1cfbc19d3e7125d6614bb900df2b34dd675c7d93544d154819d2b/grimp-3.12.tar.gz", hash = "sha256:1a733b1d719c42bd2fada58240975fa7d09936b57120c34b64cfb31e42701010", size = 845594, upload-time = "2025-10-09T09:51:02.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/f1/39fa82cf6738cea7ae454a739a0b4a233ccc2905e2506821cdcad85fef1c/grimp-3.11-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8271906dadd01f9a866c411aa8c4f15cf0469d8476734d3672f55d1fdad05ddf", size = 2015949, upload-time = "2025-09-01T07:24:38.836Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a2/19209b8680899034c74340c115770b3f0fe6186b2a8779ce3e578aa3ab30/grimp-3.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb20844c1ec8729627dcbf8ca18fe6e2fb0c0cd34683c6134cd89542538d12a1", size = 1929047, upload-time = "2025-09-01T07:24:31.813Z" }, - { url = "https://files.pythonhosted.org/packages/ee/b1/cef086ed0fc3c1b2bba413f55cae25ebdd3ff11bc683639ba8fc29b09d7b/grimp-3.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e39c47886320b2980d14f31351377d824683748d5982c34283461853b5528102", size = 2093705, upload-time = "2025-09-01T07:23:18.927Z" }, - { url = "https://files.pythonhosted.org/packages/92/4a/6945c6a5267d01d2e321ba622d1fc138552bd2a69d220c6baafb60a128da/grimp-3.11-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1add91bf2e024321c770f1271799576d22a3f7527ed662e304f40e73c6a14138", size = 2045422, upload-time = "2025-09-01T07:23:31.571Z" }, - { url = "https://files.pythonhosted.org/packages/49/1a/4bfb34cd6cbf4d712305c2f452e650772cbc43773f1484513375e9b83a31/grimp-3.11-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bb0bc0995de10135d3b5dc5dbe1450d88a0fa7331ec7885db31569ad61e4d9", size = 2194719, upload-time = "2025-09-01T07:24:13.206Z" }, - { url = "https://files.pythonhosted.org/packages/d6/93/e6d9f9a1fbc78df685b9e970c28d3339ae441f7da970567d65b63c7a199e/grimp-3.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9152657e63ad0dee6029fe612d5550fb1c029c987b496a53a4d49246e772bd7b", size = 2391047, upload-time = "2025-09-01T07:23:48.095Z" }, - { url = "https://files.pythonhosted.org/packages/0f/44/f28d0a88161a55751da335b22d252ef6e2fa3fa9e5111f5a5b26caa66e8f/grimp-3.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352ba7f1aba578315dddb00eff873e3fbc0c7386b3d64bbc1fe8e28d2e12eda2", size = 2241597, upload-time = "2025-09-01T07:24:00.354Z" }, - { url = "https://files.pythonhosted.org/packages/15/89/2957413b54c047e87f8ea6611929ef0bbaedbab00399166119b5a164a430/grimp-3.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1291a323bbf30b0387ee547655a693b034376d9354797a076c53839966149e3", size = 2153283, upload-time = "2025-09-01T07:24:22.706Z" }, - { url = "https://files.pythonhosted.org/packages/3d/83/69162edb2c49fff21a42fca68f51fbb93006a1b6a10c0f329a61a7a943e8/grimp-3.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d4b47faa3a35ccee75039343267d990f03c7f39af8abe01a99f41c83339c5df4", size = 2269299, upload-time = "2025-09-01T07:24:45.272Z" }, - { url = "https://files.pythonhosted.org/packages/5f/22/1bbf95e4bab491a847f0409d19d9c343a8c361ab1f2921b13318278d937a/grimp-3.11-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:cae0cc48584389df4f2ff037373cec5dbd4f3c7025583dc69724d5c453fc239b", size = 2305354, upload-time = "2025-09-01T07:24:57.413Z" }, - { url = "https://files.pythonhosted.org/packages/1f/fd/2d40ed913744202e5d7625936f8bd9e1d44d1a062abbfc25858e7c9acd6a/grimp-3.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3ba13bd9e58349c48a6d420a62f244b3eee2c47aedf99db64c44ba67d07e64d6", size = 2299647, upload-time = "2025-09-01T07:25:10.188Z" }, - { url = "https://files.pythonhosted.org/packages/15/be/6e721a258045285193a16f4be9e898f7df5cc28f0b903eb010d8a7035841/grimp-3.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2ee94b2a0ec7e8ca90d63a724d77527632ab3825381610bd36891fbcc49071", size = 2323713, upload-time = "2025-09-01T07:25:22.678Z" }, - { url = "https://files.pythonhosted.org/packages/5e/ad/0ae7a1753f4d60d5a9bebefd112bb83ef115541ec7b509565a9fbb712d60/grimp-3.11-cp311-cp311-win32.whl", hash = "sha256:b4810484e05300bc3dfffaeaaa89c07dcfd6e1712ddcbe2e14911c0da5737d40", size = 1707055, upload-time = "2025-09-01T07:25:43.719Z" }, - { url = "https://files.pythonhosted.org/packages/df/b7/af81165c2144043293b0729d6be92885c52a38aadff16e6ac9418baab30f/grimp-3.11-cp311-cp311-win_amd64.whl", hash = "sha256:31b9b8fd334dc959d3c3b0d7761f805decb628c4eac98ff7707c8b381576e48f", size = 1809864, upload-time = "2025-09-01T07:25:36.724Z" }, - { url = "https://files.pythonhosted.org/packages/06/ad/271c0f2b49be72119ad3724e4da3ba607c533c8aa2709078a51f21428fab/grimp-3.11-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2731b03deeea57ec3722325c3ebfa25b6ec4bc049d6b5a853ac45bb173843537", size = 2011143, upload-time = "2025-09-01T07:24:40.113Z" }, - { url = "https://files.pythonhosted.org/packages/40/85/858811346c77bbbe6e62ffaa5367f46990a30a47e77ce9f6c0f3d65a42bd/grimp-3.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39953c320e235e2fb7f0ad10b066ddd526ab26bc54b09dd45620999898ab2b33", size = 1927855, upload-time = "2025-09-01T07:24:33.468Z" }, - { url = "https://files.pythonhosted.org/packages/27/f8/5ce51d2fb641e25e187c10282a30f6c7f680dcc5938e0eb5670b7a08c735/grimp-3.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b363da88aa8aca5edc008c4473def9015f31d293493ca6c7e211a852b5ada6c", size = 2093246, upload-time = "2025-09-01T07:23:20.091Z" }, - { url = "https://files.pythonhosted.org/packages/09/17/217490c0d59bfcf254cb15c82d8292d6e67717cfa1b636a29f6368f59147/grimp-3.11-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dded52a319d31de2178a6e2f26da188b0974748e27af430756b3991478443b12", size = 2044921, upload-time = "2025-09-01T07:23:33.118Z" }, - { url = "https://files.pythonhosted.org/packages/04/85/54e5c723b2bd19c343c358866cc6359a38ccf980cf128ea2d7dfb5f59384/grimp-3.11-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9763b80ca072ec64384fae1ba54f18a00e88a36f527ba8dcf2e8456019e77de", size = 2195131, upload-time = "2025-09-01T07:24:14.496Z" }, - { url = "https://files.pythonhosted.org/packages/fd/15/8188cd73fff83055c1dca6e20c8315e947e2564ceaaf8b957b3ca7e1fa93/grimp-3.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e351c159834c84f723cfa1252f1b23d600072c362f4bfdc87df7eed9851004a", size = 2391156, upload-time = "2025-09-01T07:23:49.283Z" }, - { url = "https://files.pythonhosted.org/packages/c2/51/f2372c04b9b6e4628752ed9fc801bb05f968c8c4c4b28d78eb387ab96545/grimp-3.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19f2ab56e647cf65a2d6e8b2e02d5055b1a4cff72aee961cbd78afa0e9a1f698", size = 2245104, upload-time = "2025-09-01T07:24:01.54Z" }, - { url = "https://files.pythonhosted.org/packages/83/6d/bf4948b838bfc7d8c3f1da50f1bb2a8c44984af75845d41420aaa1b3f234/grimp-3.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30cc197decec63168a15c6c8a65ee8f2f095b4a7bf14244a4ed24e48b272843a", size = 2153265, upload-time = "2025-09-01T07:24:23.971Z" }, - { url = "https://files.pythonhosted.org/packages/52/18/ce2ff3f67adc286de245372b4ac163b10544635e1a86a2bc402502f1b721/grimp-3.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be27e9ecc4f8a9f96e5a09e8588b5785de289a70950b7c0c4b2bcafc96156a18", size = 2268265, upload-time = "2025-09-01T07:24:46.505Z" }, - { url = "https://files.pythonhosted.org/packages/23/b0/dc28cb7e01f578424c9efbb9a47273b14e5d3a2283197d019cbb5e6c3d4f/grimp-3.11-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab72874999a5a309a39ec91168f7e76c0acb7a81af2cc463431029202a661a5d", size = 2304895, upload-time = "2025-09-01T07:24:58.743Z" }, - { url = "https://files.pythonhosted.org/packages/9e/00/48916bf8284fc48f559ea4a9ccd47bd598493eac74dbb74c676780b664e7/grimp-3.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:55b08122a2896207ff09ffe349ad9f440a4382c092a7405191ac0512977a328f", size = 2299337, upload-time = "2025-09-01T07:25:11.886Z" }, - { url = "https://files.pythonhosted.org/packages/35/f9/6bcab18cdf1186185a6ae9abb4a5dcc43e19d46bc431becca65ac0ba1a71/grimp-3.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:54e6e5417bcd7ad44439ad1b8ef9e85f65332dcc42c9fbdbaf566da127a32d3d", size = 2322913, upload-time = "2025-09-01T07:25:24.529Z" }, - { url = "https://files.pythonhosted.org/packages/92/19/023e45fe46603172df7c55ced127bc74fcd14b8f87505ea31ea6ae9f86bc/grimp-3.11-cp312-cp312-win32.whl", hash = "sha256:41d67c29a8737b4dd7ffe11deedc6f1cfea3ce1b845a72a20c4938e8dd85b2fa", size = 1707368, upload-time = "2025-09-01T07:25:45.096Z" }, - { url = "https://files.pythonhosted.org/packages/71/ef/3cbe04829d7416f4b3c06b096ad1972622443bd11833da4d98178da22637/grimp-3.11-cp312-cp312-win_amd64.whl", hash = "sha256:c3c6fc76e1e5db2733800490ee4d46a710a5b4ac23eaa8a2313489a6e7bc60e2", size = 1811752, upload-time = "2025-09-01T07:25:38.071Z" }, - { url = "https://files.pythonhosted.org/packages/bd/6b/dca73b704e87609b4fb5170d97ae1e17fe25ffb4e8a6dee4ac21c31da9f4/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c634e77d4ee9959b618ca0526cb95d8eeaa7d716574d270fd4d880243e4e76", size = 2095005, upload-time = "2025-09-01T07:23:27.57Z" }, - { url = "https://files.pythonhosted.org/packages/35/f1/a7be1b866811eafa0798316baf988347cac10acaea1f48dbc4bc536bc82a/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41b55e2246aed2bd2f8a6c334b5c91c737d35fec9d1c1cd86884bff1b482ab9b", size = 2046301, upload-time = "2025-09-01T07:23:41.046Z" }, - { url = "https://files.pythonhosted.org/packages/d7/c5/15071e06972f2a04ccf7c0b9f6d0cd5851a7badc59ba3df5c4036af32275/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6400eff472b205787f5fc73d2b913534c5f1ddfacd5fbcacf9b0f46e3843898", size = 2194815, upload-time = "2025-09-01T07:24:20.256Z" }, - { url = "https://files.pythonhosted.org/packages/9f/27/73a08f322adeef2a3c2d22adb7089a0e6a134dae340293be265e70471166/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ddd0db48f1168bc430adae3b5457bf32bb9c7d479791d5f9f640fe752256d65", size = 2388925, upload-time = "2025-09-01T07:23:56.658Z" }, - { url = "https://files.pythonhosted.org/packages/9d/1b/4b372addef06433b37b035006cf102bc2767c3d573916a5ce6c9b50c96f5/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e744a031841413c06bd6e118e853b1e0f2d19a5081eee7c09bb7c4c8868ca81b", size = 2242506, upload-time = "2025-09-01T07:24:09.133Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2a/d618a74aa66a585ed09eebed981d71f6310ccd0c85fecdefca6a660338e3/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf5d4cbd033803ba433f445385f070759730f64f0798c75a11a3d60e7642bb9c", size = 2154028, upload-time = "2025-09-01T07:24:29.086Z" }, - { url = "https://files.pythonhosted.org/packages/2b/74/50255cc0af7b8a742d00b72ee6d825da8ce52b036260ee84d1e9e27a7fc7/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:70cf9196180226384352360ba02e1f7634e00e8e999a65087f4e7383ece78afb", size = 2270008, upload-time = "2025-09-01T07:24:53.195Z" }, - { url = "https://files.pythonhosted.org/packages/42/a0/1f441584ce68b9b818cb18f8bad2aa7bef695853f2711fb648526e0237b9/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:e5a9df811aeb2f3d764070835f9ac65f240af154ba9ba23bda7a4c4d4ad46744", size = 2306660, upload-time = "2025-09-01T07:25:06.031Z" }, - { url = "https://files.pythonhosted.org/packages/35/e9/c1b61b030b286c7c117024676d88db52cdf8b504e444430d813170a6b9f6/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:23ceffc0a19e7b85107b137435fadd3d15a3883cbe0b65d7f93f3b33a6805af7", size = 2300281, upload-time = "2025-09-01T07:25:18.5Z" }, - { url = "https://files.pythonhosted.org/packages/44/d0/124a230725e1bff859c0ad193d6e2a64d2d1273d6ae66e04138dbd0f1ca6/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e57baac1360b90b944e2fd0321b490650113e5b927d013b26e220c2889f6f275", size = 2324348, upload-time = "2025-09-01T07:25:31.409Z" }, + { url = "https://files.pythonhosted.org/packages/0f/b5/1c89600bf181d41502aed51b73b3a5889158dee35c534f51df3666779587/grimp-3.12-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e6c02e51eebfcf71146d42f47c9ce353ac1902ae446e18d0e663ab9fdaa0496c", size = 2062043, upload-time = "2025-10-09T09:49:57.035Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/bab32c5e26949a82299853ccb28ee30a7899d0355b0d209b535eb03bc04e/grimp-3.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:79bc2b0ff6072c43c0ddc4479b25b7a8198795486478cfe3be0503b2c7d32c7f", size = 1981378, upload-time = "2025-10-09T09:49:49.237Z" }, + { url = "https://files.pythonhosted.org/packages/b5/03/b9f7e465488e8593de9a1e88355c3cfba04c02c3a34a6b02cbe946e0d587/grimp-3.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3986f11a9dd4167a2943cf6e80b458c0a825b48609713736cc8f2de135000810", size = 2130579, upload-time = "2025-10-09T09:48:36.035Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d0/81c776327354f32f86f321dd8468b32ba6b52dc3511d912d24c4fac96da4/grimp-3.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7a2abe55844f9dad25499ff9456d680496f390d160b6b3a4e5aeabc0183813b4", size = 2091201, upload-time = "2025-10-09T09:48:52.57Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7e/116ac4c1e4407a123fba4bb076b2e880643d70b3f4f1621c3323b5d66e12/grimp-3.12-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e59112d0f557335b619bcf10263d11873579230bd3df4a4b19224ec18e7212d6", size = 2240782, upload-time = "2025-10-09T09:49:30.915Z" }, + { url = "https://files.pythonhosted.org/packages/06/7f/89bbec1241a8504499975f0f08befea0cf3d27c52f9808602fff8075c639/grimp-3.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b858e2e5a489c36710322970aa82bfbd3f1c4107c8564960629a59d2f17a53d0", size = 2423143, upload-time = "2025-10-09T09:49:05.18Z" }, + { url = "https://files.pythonhosted.org/packages/86/d7/2f416439b624b2a91bf2e0e456f58d74d51aa7ad239099cf4a8911d952c0/grimp-3.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d46cc1222dd301e0be371b97f0cdecae178089704e8a285e3edd4750ec46270a", size = 2303850, upload-time = "2025-10-09T09:49:19.073Z" }, + { url = "https://files.pythonhosted.org/packages/60/bd/8c2f48c26151eb9a65bc41f01004b43cb1b31791ffb61758d40d2f6b485a/grimp-3.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef06822f75856af28e7fcc580034043c543b1c99b07d2bd467bd173a7f10691", size = 2168571, upload-time = "2025-10-09T09:49:39.844Z" }, + { url = "https://files.pythonhosted.org/packages/5a/45/01a839434ff88be24317aa52cc1ba158833bd1d071efe0da1b14838af024/grimp-3.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4c19f1cba8a95c898473dd18f9c81358019d67f87f140b0b8401550e6d21c5a3", size = 2310869, upload-time = "2025-10-09T09:50:05.153Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7b/0dc45fdc15562c2faf8a95a8685d3805d27decdef6fcfb66d9b577ed2f12/grimp-3.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:600e8dbc1cd9c6decbc22089730221c65591b7ba5f89751d07fc7ad014d99aa1", size = 2353397, upload-time = "2025-10-09T09:50:17.755Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ec/07734ecc4f1489ffc071417f7bc881c939bcfdfba10eb585bce510ede1b2/grimp-3.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:259ba53b82cfb9c2c2d097b2237970c4e9903fa2d0b664b7e12329d9a64924f9", size = 2350166, upload-time = "2025-10-09T09:50:32.237Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f5/45d80e2fa205066a484f0c1a667a249408a49bb3b665d62677f879920aa0/grimp-3.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a593549b1f66b1c12574e71f9e8c0073b372888c6b6706e2617bba2713ae28c2", size = 2360590, upload-time = "2025-10-09T09:50:49.961Z" }, + { url = "https://files.pythonhosted.org/packages/e6/f2/7ab1bc4d613189183c17741ff0d03490d9749eb5130b8b56e82ed77098b0/grimp-3.12-cp311-cp311-win32.whl", hash = "sha256:356ee969443f06c6c3a270f5a7221f946f0cb135a8b8ece2009990b293504bb3", size = 1748183, upload-time = "2025-10-09T09:51:13.503Z" }, + { url = "https://files.pythonhosted.org/packages/91/62/195f37a68d07fab40c8934ae8e39f9ff1f9a5bf3e375059b9cf14ccba302/grimp-3.12-cp311-cp311-win_amd64.whl", hash = "sha256:75e1f0d74f3a242a1c34e464d775c36b1c8b9d8c92b35f46f221e73e9b2f0065", size = 1851099, upload-time = "2025-10-09T09:51:04.747Z" }, + { url = "https://files.pythonhosted.org/packages/12/ac/0f55980a59c07439a965d3975f1cf3a6574f7d773910b9d6924790e0dddf/grimp-3.12-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:af399fc0ffddfbd7ea6c2e8546be1ab5284ee800f15a445705bdda5d63501b34", size = 2058862, upload-time = "2025-10-09T09:49:58.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b1/5fdcb1db7cb3253c78d87a0b8c3f7f9c5214b273861300b51c897c55e6b8/grimp-3.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f08358acbaf9a4b324537bf344fd2d76b5f9b6f1bfaf9a431e9453fc0eaee5f", size = 1977586, upload-time = "2025-10-09T09:49:50.49Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b9/e5f6d265b71430f9641daa9476cde8c23549e396c558b39a0bdc7fee824f/grimp-3.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eeb1616cafe9074fcb390fcfc01e6e5a0e0ddd5acb9dd37579985b2879c239a", size = 2130610, upload-time = "2025-10-09T09:48:38.472Z" }, + { url = "https://files.pythonhosted.org/packages/da/e1/2d0601c9aac2ab7340504e85ca4cd55f2991501a03e421bec78f53a07478/grimp-3.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99e648e299f7cd3daaee2cb745192e7ea159c7d38df76b4dcca12a2ef68a3ede", size = 2092775, upload-time = "2025-10-09T09:48:53.841Z" }, + { url = "https://files.pythonhosted.org/packages/db/a1/e63315477127ed8f31a1a93911d084bf704d6e126ca27650e3c3389701a6/grimp-3.12-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b24c5ce351030d1f83e69acd76a06863dd87041ceb25572339f7334e210cbc4", size = 2239336, upload-time = "2025-10-09T09:49:32.185Z" }, + { url = "https://files.pythonhosted.org/packages/f2/09/cd76d35121f053a95a58fc5830756c62e5c9de74aa4e16b4dc27ce6ada2c/grimp-3.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd40a5ec09d1dfafaae88b53231ab79378183e2e9a03e7b26b7a30133d027d8a", size = 2421851, upload-time = "2025-10-09T09:49:06.893Z" }, + { url = "https://files.pythonhosted.org/packages/40/46/e8390a7c5ed85b4dbeff4e873f1ece8d9acf72d72f084b397ccc2facfa3b/grimp-3.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aebdfad66d6f4e8b0f7364ce0429d208be3510918097f969428165074d3103e", size = 2304849, upload-time = "2025-10-09T09:49:20.695Z" }, + { url = "https://files.pythonhosted.org/packages/bd/81/f73edbc48a283f634233b6153ac43e4e7b9f58108ffc19da803b0015cb60/grimp-3.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76fd06be98d6bea9ea8a804da22c80accf1d277fe04abd5f3dff05d087f056f7", size = 2168655, upload-time = "2025-10-09T09:49:41.118Z" }, + { url = "https://files.pythonhosted.org/packages/84/1a/8fa5752f725b8872010627bd10e1aedccdb406c3b4118ec3fe127155284e/grimp-3.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a73a42a43e268ac5b196386beae1ec646f4572409e731bccf2a99ab4ed5c46bf", size = 2311124, upload-time = "2025-10-09T09:50:06.477Z" }, + { url = "https://files.pythonhosted.org/packages/83/a0/02d6b2a86289a4ac73f44f59aaee43c1dc936c984204c73d2affe4570eb6/grimp-3.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:af990af7d5e64f484d12cdefacfaaed4ea9418ac4d0a5a928953fd91aaf8df80", size = 2354216, upload-time = "2025-10-09T09:50:19.114Z" }, + { url = "https://files.pythonhosted.org/packages/7b/48/0368289f5bbdf943a48305824b30411b35ef2c7cd8edf2bad48d67b3897e/grimp-3.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:82ee28c1e9835572af2c733f7e5913a44193c53ae8ca488039164593b4a750fa", size = 2348372, upload-time = "2025-10-09T09:50:37.479Z" }, + { url = "https://files.pythonhosted.org/packages/26/73/b4f90b4926791d720f6069fc8c8b3e204721d1db839a1c00fbcee1e2a36d/grimp-3.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afdceaea00e305909cb30d68e91b94fcf71d1a7234052549ea31148785a03a52", size = 2361167, upload-time = "2025-10-09T09:50:51.733Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ae/94d34c732d531c7165c8942d7995495aac64e9bb5c28cc6751349eacdcde/grimp-3.12-cp312-cp312-win32.whl", hash = "sha256:40f8e048254d2437dffcd383d2301a82c35d9a3082e878b707d87a6e8c539614", size = 1747179, upload-time = "2025-10-09T09:51:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/48bc396ee2f36e72d5c50ba8b4d7f817fc2cdac7b9ab77d2b097f50a4447/grimp-3.12-cp312-cp312-win_amd64.whl", hash = "sha256:199172d17f22199bf400a0bd5c4985784622201e887a023fe799ca3f3437dedf", size = 1850691, upload-time = "2025-10-09T09:51:05.984Z" }, + { url = "https://files.pythonhosted.org/packages/d9/31/c72e53a46692dc8358cff1af1a9494430a0fecd4c3f2d0d8e9c2eb5e828d/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:567d037a3db083e54bee621daba59a2e01fd1391364ae0a0c737995f6eed910b", size = 2131392, upload-time = "2025-10-09T09:48:46.857Z" }, + { url = "https://files.pythonhosted.org/packages/39/10/15e43be32734baaebeee090dca16f06ea5ba933b209b8e1c0d5986dabb32/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9b4cc756c91c3d8582ee70b5e013c0e34fdb31c7f808cefe9d15509c45fec31e", size = 2092481, upload-time = "2025-10-09T09:49:00.754Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4a/c9349dee284c2d9384714741896f0f84a1d66011a69cdc364e4d94e188b1/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bd47f9a8619cb8966f18cb6faf5f6cb8d35ade99312477dd8e9de3a9ae4cb7", size = 2242260, upload-time = "2025-10-09T09:49:37.183Z" }, + { url = "https://files.pythonhosted.org/packages/d8/63/3935823f89c12320840bbf018858eeaca7d5285f9769a48921587a88adeb/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f30e01855c67a39857c87e6c0eafe5e8891010a35e06cf2145f2cfce8ea9780", size = 2422371, upload-time = "2025-10-09T09:49:14.616Z" }, + { url = "https://files.pythonhosted.org/packages/71/8e/5a75c2335a2dc61738b19318dcdd16392015a984211e3d0b9f6679dc6c89/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d07e825f6b052186dabd8dbbcc7e008a3b56e551725e2ba47169fe1e4bde76ac", size = 2304257, upload-time = "2025-10-09T09:49:26.908Z" }, + { url = "https://files.pythonhosted.org/packages/40/99/462d86bc9401a39859f272b867331a678f4b5324a539dc771bdae6d36309/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f1a1289d4282be2891ada75ec5d3099e856518c4236b1196e367b630485f8ce", size = 2169360, upload-time = "2025-10-09T09:49:46.575Z" }, + { url = "https://files.pythonhosted.org/packages/d0/07/6d2929f05dae189265633588819d990df35644ad74b6ec74207091dff18d/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:85136b555aeb7d3965fdb40af4e4af2011f911b0fde8c20979bf4db7b06455f5", size = 2312280, upload-time = "2025-10-09T09:50:13.491Z" }, + { url = "https://files.pythonhosted.org/packages/5c/47/7e49417e2c496da0b6141e711dca40726d2b30a0adc6db9d04b74c7bafa7/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:963efd6ec86e7b47fde835b2526b6be7a3f489857a1cd47a747c94b3e670550a", size = 2354449, upload-time = "2025-10-09T09:50:27.596Z" }, + { url = "https://files.pythonhosted.org/packages/2c/08/2e1db56797e4e26334b3ee4ef1a5fbf56155d74a0318215ed4dcad02ef43/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:c9e2ee478b66f0e20c92af6123142ffd6b604c36e9b3a8d391ea9172cc18b6b3", size = 2350545, upload-time = "2025-10-09T09:50:45.623Z" }, + { url = "https://files.pythonhosted.org/packages/37/78/53594064f11b0ae9e72b3e9df5c055f00c5bff44962f7b777846504fc50d/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e8826362d4e403aa2e03d480e3e4d64284a6b6ccafc2c5777bb2bed2535bdc4e", size = 2361926, upload-time = "2025-10-09T09:50:58.605Z" }, ] [[package]] @@ -2463,30 +2495,33 @@ wheels = [ [[package]] name = "grpcio" -version = "1.74.0" +version = "1.75.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/77/b2f06db9f240a5abeddd23a0e49eae2b6ac54d85f0e5267784ce02269c3b/grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31", size = 5487368, upload-time = "2025-07-24T18:53:03.548Z" }, - { url = "https://files.pythonhosted.org/packages/48/99/0ac8678a819c28d9a370a663007581744a9f2a844e32f0fa95e1ddda5b9e/grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4", size = 10999804, upload-time = "2025-07-24T18:53:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/45/c6/a2d586300d9e14ad72e8dc211c7aecb45fe9846a51e558c5bca0c9102c7f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce", size = 5987667, upload-time = "2025-07-24T18:53:07.157Z" }, - { url = "https://files.pythonhosted.org/packages/c9/57/5f338bf56a7f22584e68d669632e521f0de460bb3749d54533fc3d0fca4f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3", size = 6655612, upload-time = "2025-07-24T18:53:09.244Z" }, - { url = "https://files.pythonhosted.org/packages/82/ea/a4820c4c44c8b35b1903a6c72a5bdccec92d0840cf5c858c498c66786ba5/grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182", size = 6219544, upload-time = "2025-07-24T18:53:11.221Z" }, - { url = "https://files.pythonhosted.org/packages/a4/17/0537630a921365928f5abb6d14c79ba4dcb3e662e0dbeede8af4138d9dcf/grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d", size = 6334863, upload-time = "2025-07-24T18:53:12.925Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a6/85ca6cb9af3f13e1320d0a806658dca432ff88149d5972df1f7b51e87127/grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f", size = 7019320, upload-time = "2025-07-24T18:53:15.002Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a7/fe2beab970a1e25d2eff108b3cf4f7d9a53c185106377a3d1989216eba45/grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4", size = 6514228, upload-time = "2025-07-24T18:53:16.999Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c2/2f9c945c8a248cebc3ccda1b7a1bf1775b9d7d59e444dbb18c0014e23da6/grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b", size = 3817216, upload-time = "2025-07-24T18:53:20.564Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d1/a9cf9c94b55becda2199299a12b9feef0c79946b0d9d34c989de6d12d05d/grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11", size = 4495380, upload-time = "2025-07-24T18:53:22.058Z" }, - { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" }, - { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" }, - { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" }, - { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" }, - { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" }, - { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" }, - { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" }, - { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" }, - { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" }, + { url = "https://files.pythonhosted.org/packages/0c/3c/35ca9747473a306bfad0cee04504953f7098527cd112a4ab55c55af9e7bd/grpcio-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:573855ca2e58e35032aff30bfbd1ee103fbcf4472e4b28d4010757700918e326", size = 5709761, upload-time = "2025-09-26T09:01:28.528Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2c/ecbcb4241e4edbe85ac2663f885726fea0e947767401288b50d8fdcb9200/grpcio-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6a4996a2c8accc37976dc142d5991adf60733e223e5c9a2219e157dc6a8fd3a2", size = 11496691, upload-time = "2025-09-26T09:01:31.214Z" }, + { url = "https://files.pythonhosted.org/packages/81/40/bc07aee2911f0d426fa53fe636216100c31a8ea65a400894f280274cb023/grpcio-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1ea1bbe77ecbc1be00af2769f4ae4a88ce93be57a4f3eebd91087898ed749f9", size = 6296084, upload-time = "2025-09-26T09:01:34.596Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d1/10c067f6c67396cbf46448b80f27583b5e8c4b46cdfbe18a2a02c2c2f290/grpcio-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e5b425aee54cc5e3e3c58f00731e8a33f5567965d478d516d35ef99fd648ab68", size = 6950403, upload-time = "2025-09-26T09:01:36.736Z" }, + { url = "https://files.pythonhosted.org/packages/3f/42/5f628abe360b84dfe8dd8f32be6b0606dc31dc04d3358eef27db791ea4d5/grpcio-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0049a7bf547dafaeeb1db17079ce79596c298bfe308fc084d023c8907a845b9a", size = 6470166, upload-time = "2025-09-26T09:01:39.474Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/a24035080251324019882ee2265cfde642d6476c0cf8eb207fc693fcebdc/grpcio-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b8ea230c7f77c0a1a3208a04a1eda164633fb0767b4cefd65a01079b65e5b1f", size = 7107828, upload-time = "2025-09-26T09:01:41.782Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/d18b984c1c9ba0318e3628dbbeb6af77a5007f02abc378c845070f2d3edd/grpcio-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:36990d629c3c9fb41e546414e5af52d0a7af37ce7113d9682c46d7e2919e4cca", size = 8045421, upload-time = "2025-09-26T09:01:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/4bf9aacff45deca5eac5562547ed212556b831064da77971a4e632917da3/grpcio-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b10ad908118d38c2453ade7ff790e5bce36580c3742919007a2a78e3a1e521ca", size = 7503290, upload-time = "2025-09-26T09:01:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/d8d69d10223cb54c887a2180bd29fe5fa2aec1d4995c8821f7aa6eaf72e4/grpcio-1.75.1-cp311-cp311-win32.whl", hash = "sha256:d6be2b5ee7bea656c954dcf6aa8093c6f0e6a3ef9945c99d99fcbfc88c5c0bfe", size = 3950631, upload-time = "2025-09-26T09:01:51.23Z" }, + { url = "https://files.pythonhosted.org/packages/8a/40/7b8642d45fff6f83300c24eaac0380a840e5e7fe0e8d80afd31b99d7134e/grpcio-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:61c692fb05956b17dd6d1ab480f7f10ad0536dba3bc8fd4e3c7263dc244ed772", size = 4646131, upload-time = "2025-09-26T09:01:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, + { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, + { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, ] [[package]] @@ -2568,17 +2603,17 @@ wheels = [ [[package]] name = "hf-xet" -version = "1.1.9" +version = "1.1.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/0f/5b60fc28ee7f8cc17a5114a584fd6b86e11c3e0a6e142a7f97a161e9640a/hf_xet-1.1.9.tar.gz", hash = "sha256:c99073ce404462e909f1d5839b2d14a3827b8fe75ed8aed551ba6609c026c803", size = 484242, upload-time = "2025-08-27T23:05:19.441Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97", size = 487910, upload-time = "2025-09-12T20:10:27.12Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/12/56e1abb9a44cdef59a411fe8a8673313195711b5ecce27880eb9c8fa90bd/hf_xet-1.1.9-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:a3b6215f88638dd7a6ff82cb4e738dcbf3d863bf667997c093a3c990337d1160", size = 2762553, upload-time = "2025-08-27T23:05:15.153Z" }, - { url = "https://files.pythonhosted.org/packages/3a/e6/2d0d16890c5f21b862f5df3146519c182e7f0ae49b4b4bf2bd8a40d0b05e/hf_xet-1.1.9-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9b486de7a64a66f9a172f4b3e0dfe79c9f0a93257c501296a2521a13495a698a", size = 2623216, upload-time = "2025-08-27T23:05:13.778Z" }, - { url = "https://files.pythonhosted.org/packages/81/42/7e6955cf0621e87491a1fb8cad755d5c2517803cea174229b0ec00ff0166/hf_xet-1.1.9-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c5a840c2c4e6ec875ed13703a60e3523bc7f48031dfd750923b2a4d1a5fc3c", size = 3186789, upload-time = "2025-08-27T23:05:12.368Z" }, - { url = "https://files.pythonhosted.org/packages/df/8b/759233bce05457f5f7ec062d63bbfd2d0c740b816279eaaa54be92aa452a/hf_xet-1.1.9-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:96a6139c9e44dad1c52c52520db0fffe948f6bce487cfb9d69c125f254bb3790", size = 3088747, upload-time = "2025-08-27T23:05:10.439Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3c/28cc4db153a7601a996985bcb564f7b8f5b9e1a706c7537aad4b4809f358/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ad1022e9a998e784c97b2173965d07fe33ee26e4594770b7785a8cc8f922cd95", size = 3251429, upload-time = "2025-08-27T23:05:16.471Z" }, - { url = "https://files.pythonhosted.org/packages/84/17/7caf27a1d101bfcb05be85850d4aa0a265b2e1acc2d4d52a48026ef1d299/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:86754c2d6d5afb11b0a435e6e18911a4199262fe77553f8c50d75e21242193ea", size = 3354643, upload-time = "2025-08-27T23:05:17.828Z" }, - { url = "https://files.pythonhosted.org/packages/cd/50/0c39c9eed3411deadcc98749a6699d871b822473f55fe472fad7c01ec588/hf_xet-1.1.9-cp37-abi3-win_amd64.whl", hash = "sha256:5aad3933de6b725d61d51034e04174ed1dce7a57c63d530df0014dea15a40127", size = 2804797, upload-time = "2025-08-27T23:05:20.77Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d", size = 2761466, upload-time = "2025-09-12T20:10:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b", size = 2623807, upload-time = "2025-09-12T20:10:21.118Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435", size = 3186960, upload-time = "2025-09-12T20:10:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c", size = 3087167, upload-time = "2025-09-12T20:10:17.255Z" }, + { url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06", size = 3248612, upload-time = "2025-09-12T20:10:24.093Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f", size = 3353360, upload-time = "2025-09-12T20:10:25.563Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045", size = 2804691, upload-time = "2025-09-12T20:10:28.433Z" }, ] [[package]] @@ -2668,24 +2703,24 @@ wheels = [ [[package]] name = "httptools" -version = "0.6.4" +version = "0.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029, upload-time = "2024-10-16T19:44:18.427Z" }, - { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492, upload-time = "2024-10-16T19:44:19.515Z" }, - { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891, upload-time = "2024-10-16T19:44:21.067Z" }, - { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788, upload-time = "2024-10-16T19:44:22.958Z" }, - { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214, upload-time = "2024-10-16T19:44:24.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120, upload-time = "2024-10-16T19:44:26.295Z" }, - { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565, upload-time = "2024-10-16T19:44:29.188Z" }, - { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, - { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, - { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, - { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, ] [[package]] @@ -2714,16 +2749,16 @@ socks = [ [[package]] name = "httpx-sse" -version = "0.4.1" +version = "0.4.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, ] [[package]] name = "huggingface-hub" -version = "0.34.4" +version = "0.35.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -2735,9 +2770,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/c9/bdbe19339f76d12985bc03572f330a01a93c04dffecaaea3061bdd7fb892/huggingface_hub-0.34.4.tar.gz", hash = "sha256:a4228daa6fb001be3f4f4bdaf9a0db00e1739235702848df00885c9b5742c85c", size = 459768, upload-time = "2025-08-08T09:14:52.365Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/7e/a0a97de7c73671863ca6b3f61fa12518caf35db37825e43d63a70956738c/huggingface_hub-0.35.3.tar.gz", hash = "sha256:350932eaa5cc6a4747efae85126ee220e4ef1b54e29d31c3b45c5612ddf0b32a", size = 461798, upload-time = "2025-09-29T14:29:58.625Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/7b/bb06b061991107cd8783f300adff3e7b7f284e330fd82f507f2a1417b11d/huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a", size = 561452, upload-time = "2025-08-08T09:14:50.159Z" }, + { url = "https://files.pythonhosted.org/packages/31/a0/651f93d154cb72323358bf2bbae3e642bdb5d2f1bfc874d096f7cb159fa0/huggingface_hub-0.35.3-py3-none-any.whl", hash = "sha256:0e3a01829c19d86d03793e4577816fe3bdfc1602ac62c7fb220d593d351224ba", size = 564262, upload-time = "2025-09-29T14:29:55.813Z" }, ] [[package]] @@ -2763,38 +2798,38 @@ wheels = [ [[package]] name = "hypothesis" -version = "6.138.15" +version = "6.140.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/68/adc338edec178cf6c08b4843ea2b2d639d47bed4b06ea9331433b71acc0a/hypothesis-6.138.15.tar.gz", hash = "sha256:6b0e1aa182eacde87110995a3543530d69ef411f642162a656efcd46c2823ad1", size = 466116, upload-time = "2025-09-08T05:34:15.956Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/7f/946343e32881b56adc0eba64e428ad2f85251f9ef16e3e4ec1b6ab80199b/hypothesis-6.140.3.tar.gz", hash = "sha256:4f4a09bf77af21e0cc3dffed1ea639812dc75d38f81308ec9fb0e33f8557b0cb", size = 466925, upload-time = "2025-10-04T22:29:44.499Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/49/911eb0cd17884a7a6f510e78acf0a70592e414d194695a0c7c1db91645b2/hypothesis-6.138.15-py3-none-any.whl", hash = "sha256:b7cf743d461c319eb251a13c8e1dcf00f4ef7085e4ab5bf5abf102b2a5ffd694", size = 533621, upload-time = "2025-09-08T05:34:12.272Z" }, + { url = "https://files.pythonhosted.org/packages/65/2a/0553ac2a8af432df92f2ffc05ca97e7ed64e00c97a371b019ae2690de325/hypothesis-6.140.3-py3-none-any.whl", hash = "sha256:a2cfff51641a58a56081f5c90ae1da6ccf3d043404f411805f7f0e0d75742d0e", size = 534534, upload-time = "2025-10-04T22:29:40.635Z" }, ] [[package]] name = "idna" -version = "3.10" +version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] name = "import-linter" -version = "2.4" +version = "2.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "grimp" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/db/33/e3c29beb4d8a33cfacdbe2858a3a4533694a0c1d0c060daaa761eff6d929/import_linter-2.4.tar.gz", hash = "sha256:4888fde83dd18bdbecd57ea1a98a1f3d52c6b6507d700f89f8678b44306c0ab4", size = 29942, upload-time = "2025-08-15T06:57:23.423Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/fd/49913b98fdeb5a8a120ca756abfc9aa7fdef7c20da1d728173e98ce11160/import_linter-2.5.2.tar.gz", hash = "sha256:d8f2dc6432975cc35edc4cc0bfcf1b811f05500b377ce0c3f62729d68f46c698", size = 159664, upload-time = "2025-10-09T10:53:24.635Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/11/2c108fc1138e506762db332c4a7ebc589cb379bc443939a81ec738b4cf73/import_linter-2.4-py3-none-any.whl", hash = "sha256:2ad6d5a164cdcd5ebdda4172cf0169f73dde1a8925ef7216672c321cd38f8499", size = 42355, upload-time = "2025-08-15T06:57:22.221Z" }, + { url = "https://files.pythonhosted.org/packages/c1/f4/f20eeb9e6ab178ce011457cd936877202556f14b7af3ef2b3c3e26f3758a/import_linter-2.5.2-py3-none-any.whl", hash = "sha256:a70b64c2451dc6b96ff9ef5af4e3f6a2c8b63532a66a3c96a7c31ca086b10003", size = 44140, upload-time = "2025-10-09T10:53:23.367Z" }, ] [[package]] @@ -2874,34 +2909,35 @@ wheels = [ [[package]] name = "jiter" -version = "0.10.0" +version = "0.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/dd/6cefc6bd68b1c3c979cecfa7029ab582b57690a31cd2f346c4d0ce7951b6/jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978", size = 317473, upload-time = "2025-05-18T19:03:25.942Z" }, - { url = "https://files.pythonhosted.org/packages/be/cf/fc33f5159ce132be1d8dd57251a1ec7a631c7df4bd11e1cd198308c6ae32/jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc", size = 321971, upload-time = "2025-05-18T19:03:27.255Z" }, - { url = "https://files.pythonhosted.org/packages/68/a4/da3f150cf1d51f6c472616fb7650429c7ce053e0c962b41b68557fdf6379/jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d", size = 345574, upload-time = "2025-05-18T19:03:28.63Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/6e8d412e60ff06b186040e77da5f83bc158e9735759fcae65b37d681f28b/jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2", size = 371028, upload-time = "2025-05-18T19:03:30.292Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d9/9ee86173aae4576c35a2f50ae930d2ccb4c4c236f6cb9353267aa1d626b7/jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61", size = 491083, upload-time = "2025-05-18T19:03:31.654Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2c/f955de55e74771493ac9e188b0f731524c6a995dffdcb8c255b89c6fb74b/jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db", size = 388821, upload-time = "2025-05-18T19:03:33.184Z" }, - { url = "https://files.pythonhosted.org/packages/81/5a/0e73541b6edd3f4aada586c24e50626c7815c561a7ba337d6a7eb0a915b4/jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5", size = 352174, upload-time = "2025-05-18T19:03:34.965Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c0/61eeec33b8c75b31cae42be14d44f9e6fe3ac15a4e58010256ac3abf3638/jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606", size = 391869, upload-time = "2025-05-18T19:03:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/41/22/5beb5ee4ad4ef7d86f5ea5b4509f680a20706c4a7659e74344777efb7739/jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605", size = 523741, upload-time = "2025-05-18T19:03:38.168Z" }, - { url = "https://files.pythonhosted.org/packages/ea/10/768e8818538e5817c637b0df52e54366ec4cebc3346108a4457ea7a98f32/jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5", size = 514527, upload-time = "2025-05-18T19:03:39.577Z" }, - { url = "https://files.pythonhosted.org/packages/73/6d/29b7c2dc76ce93cbedabfd842fc9096d01a0550c52692dfc33d3cc889815/jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7", size = 210765, upload-time = "2025-05-18T19:03:41.271Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c9/d394706deb4c660137caf13e33d05a031d734eb99c051142e039d8ceb794/jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812", size = 209234, upload-time = "2025-05-18T19:03:42.918Z" }, - { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262, upload-time = "2025-05-18T19:03:44.637Z" }, - { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124, upload-time = "2025-05-18T19:03:46.341Z" }, - { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330, upload-time = "2025-05-18T19:03:47.596Z" }, - { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670, upload-time = "2025-05-18T19:03:49.334Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057, upload-time = "2025-05-18T19:03:50.66Z" }, - { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372, upload-time = "2025-05-18T19:03:51.98Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038, upload-time = "2025-05-18T19:03:53.703Z" }, - { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538, upload-time = "2025-05-18T19:03:55.046Z" }, - { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557, upload-time = "2025-05-18T19:03:56.386Z" }, - { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202, upload-time = "2025-05-18T19:03:57.675Z" }, - { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781, upload-time = "2025-05-18T19:03:59.025Z" }, - { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176, upload-time = "2025-05-18T19:04:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, + { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, + { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, + { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, ] [[package]] @@ -2924,11 +2960,11 @@ wheels = [ [[package]] name = "json-repair" -version = "0.50.1" +version = "0.52.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/71/6d57ed93e43e98cdd124e82ab6231c6817f06a10743e7ae4bc6f66d03a02/json_repair-0.50.1.tar.gz", hash = "sha256:4ee69bc4be7330fbb90a3f19e890852c5fe1ceacec5ed1d2c25cdeeebdfaec76", size = 34864, upload-time = "2025-09-06T05:43:34.331Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/63/2c3c3c8cc1c28a0a20a9ab0eff5439c989ce3cc5956d8a4c7cf1eae0a06e/json_repair-0.52.0.tar.gz", hash = "sha256:0eee59cb3145b462b0734d4cf3246b797686caa669d52eee8dd30e09ea6d7876", size = 35384, upload-time = "2025-10-05T17:18:12.387Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/be/b1e05740d9c6f333dab67910f3894e2e2416c1ef00f9f7e20a327ab1f396/json_repair-0.50.1-py3-none-any.whl", hash = "sha256:9b78358bb7572a6e0b8effe7a8bd8cb959a3e311144842b1d2363fe39e2f13c5", size = 26020, upload-time = "2025-09-06T05:43:32.718Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7f/3a4e456da9a0f9ac54d9842ed51e96960826a98456f0826a9b3e808713c4/json_repair-0.52.0-py3-none-any.whl", hash = "sha256:c783069906a456f62e2a553fbef32a420a4745ff943e2014411728edcc7bf60a", size = 26350, upload-time = "2025-10-05T17:18:10.859Z" }, ] [[package]] @@ -2984,13 +3020,12 @@ wheels = [ [[package]] name = "kubernetes" -version = "33.1.0" +version = "34.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "durationpy" }, { name = "google-auth" }, - { name = "oauthlib" }, { name = "python-dateutil" }, { name = "pyyaml" }, { name = "requests" }, @@ -2999,9 +3034,9 @@ dependencies = [ { name = "urllib3" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779, upload-time = "2025-06-09T21:57:58.521Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/55/3f880ef65f559cbed44a9aa20d3bdbc219a2c3a3bac4a30a513029b03ee9/kubernetes-34.1.0.tar.gz", hash = "sha256:8fe8edb0b5d290a2f3ac06596b23f87c658977d46b5f8df9d0f4ea83d0003912", size = 1083771, upload-time = "2025-09-29T20:23:49.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/65f7d563aa4a62dd58777e8f6aa882f15db53b14eb29aba0c28a20f7eb26/kubernetes-34.1.0-py2.py3-none-any.whl", hash = "sha256:bffba2272534e224e6a7a74d582deb0b545b7c9879d2cd9e4aae9481d1f2cc2a", size = 2008380, upload-time = "2025-09-29T20:23:47.684Z" }, ] [[package]] @@ -3049,88 +3084,92 @@ wheels = [ [[package]] name = "litellm" -version = "1.63.7" +version = "1.77.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "click" }, + { name = "fastuuid" }, { name = "httpx" }, { name = "importlib-metadata" }, { name = "jinja2" }, { name = "jsonschema" }, { name = "openai" }, + { name = "pondpond" }, { name = "pydantic" }, { name = "python-dotenv" }, { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5c/7a/6c1994a239abd1b335001a46ae47fa055a24c493b6de19a9fa1872187fe9/litellm-1.63.7.tar.gz", hash = "sha256:2fbd7236d5e5379eee18556857ed62a5ed49f4f09e03ff33cf15932306b984f1", size = 6598034, upload-time = "2025-03-12T19:26:40.915Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/b7/0d3c6dbcff3064238d123f90ae96764a85352f3f5caab6695a55007fd019/litellm-1.77.4.tar.gz", hash = "sha256:ce652e10ecf5b36767bfdf58e53b2802e22c3de383b03554e6ee1a4a66fa743d", size = 10330773, upload-time = "2025-09-24T17:52:44.876Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/44/255c7ecb8b6f3f730a37422736509c21cb1bf4da66cc060d872005bda9f5/litellm-1.63.7-py3-none-any.whl", hash = "sha256:fbdee39a894506c68f158c6b4e0079f9e9c023441fff7215e7b8e42162dba0a7", size = 6909807, upload-time = "2025-03-12T19:26:37.788Z" }, + { url = "https://files.pythonhosted.org/packages/3c/32/90f8587818d146d604ed6eec95f96378363fda06b14817399cc68853383e/litellm-1.77.4-py3-none-any.whl", hash = "sha256:66c2bb776f1e19ceddfa977a2bbf7f05e6f26c4b1fec8b2093bd171d842701b8", size = 9138493, upload-time = "2025-09-24T17:52:40.764Z" }, ] [[package]] name = "llvmlite" -version = "0.44.0" +version = "0.45.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/8d/5baf1cef7f9c084fb35a8afbde88074f0d6a727bc63ef764fe0e7543ba40/llvmlite-0.45.1.tar.gz", hash = "sha256:09430bb9d0bb58fc45a45a57c7eae912850bedc095cd0810a57de109c69e1c32", size = 185600, upload-time = "2025-10-01T17:59:52.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/e2/86b245397052386595ad726f9742e5223d7aea999b18c518a50e96c3aca4/llvmlite-0.44.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:eed7d5f29136bda63b6d7804c279e2b72e08c952b7c5df61f45db408e0ee52f3", size = 28132305, upload-time = "2025-01-20T11:12:53.936Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ec/506902dc6870249fbe2466d9cf66d531265d0f3a1157213c8f986250c033/llvmlite-0.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ace564d9fa44bb91eb6e6d8e7754977783c68e90a471ea7ce913bff30bd62427", size = 26201090, upload-time = "2025-01-20T11:12:59.847Z" }, - { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858, upload-time = "2025-01-20T11:13:07.623Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200, upload-time = "2025-01-20T11:13:20.058Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c6/258801143975a6d09a373f2641237992496e15567b907a4d401839d671b8/llvmlite-0.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8489634d43c20cd0ad71330dde1d5bc7b9966937a263ff1ec1cebb90dc50955", size = 30331193, upload-time = "2025-01-20T11:13:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/15/86/e3c3195b92e6e492458f16d233e58a1a812aa2bfbef9bdd0fbafcec85c60/llvmlite-0.44.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:1d671a56acf725bf1b531d5ef76b86660a5ab8ef19bb6a46064a705c6ca80aad", size = 28132297, upload-time = "2025-01-20T11:13:32.57Z" }, - { url = "https://files.pythonhosted.org/packages/d6/53/373b6b8be67b9221d12b24125fd0ec56b1078b660eeae266ec388a6ac9a0/llvmlite-0.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f79a728e0435493611c9f405168682bb75ffd1fbe6fc360733b850c80a026db", size = 26201105, upload-time = "2025-01-20T11:13:38.744Z" }, - { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901, upload-time = "2025-01-20T11:13:46.711Z" }, - { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247, upload-time = "2025-01-20T11:13:56.159Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3b/a9a17366af80127bd09decbe2a54d8974b6d8b274b39bf47fbaedeec6307/llvmlite-0.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:eae7e2d4ca8f88f89d315b48c6b741dcb925d6a1042da694aa16ab3dd4cbd3a1", size = 30332380, upload-time = "2025-01-20T11:14:02.442Z" }, + { url = "https://files.pythonhosted.org/packages/04/ad/9bdc87b2eb34642c1cfe6bcb4f5db64c21f91f26b010f263e7467e7536a3/llvmlite-0.45.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:60f92868d5d3af30b4239b50e1717cb4e4e54f6ac1c361a27903b318d0f07f42", size = 43043526, upload-time = "2025-10-01T18:03:15.051Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ea/c25c6382f452a943b4082da5e8c1665ce29a62884e2ec80608533e8e82d5/llvmlite-0.45.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98baab513e19beb210f1ef39066288784839a44cd504e24fff5d17f1b3cf0860", size = 37253118, upload-time = "2025-10-01T18:04:06.783Z" }, + { url = "https://files.pythonhosted.org/packages/fe/af/85fc237de98b181dbbe8647324331238d6c52a3554327ccdc83ced28efba/llvmlite-0.45.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3adc2355694d6a6fbcc024d59bb756677e7de506037c878022d7b877e7613a36", size = 56288209, upload-time = "2025-10-01T18:01:00.168Z" }, + { url = "https://files.pythonhosted.org/packages/0a/df/3daf95302ff49beff4230065e3178cd40e71294968e8d55baf4a9e560814/llvmlite-0.45.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f3377a6db40f563058c9515dedcc8a3e562d8693a106a28f2ddccf2c8fcf6ca", size = 55140958, upload-time = "2025-10-01T18:02:11.199Z" }, + { url = "https://files.pythonhosted.org/packages/a4/56/4c0d503fe03bac820ecdeb14590cf9a248e120f483bcd5c009f2534f23f0/llvmlite-0.45.1-cp311-cp311-win_amd64.whl", hash = "sha256:f9c272682d91e0d57f2a76c6d9ebdfccc603a01828cdbe3d15273bdca0c3363a", size = 38132232, upload-time = "2025-10-01T18:04:52.181Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7c/82cbd5c656e8991bcc110c69d05913be2229302a92acb96109e166ae31fb/llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:28e763aba92fe9c72296911e040231d486447c01d4f90027c8e893d89d49b20e", size = 43043524, upload-time = "2025-10-01T18:03:30.666Z" }, + { url = "https://files.pythonhosted.org/packages/9d/bc/5314005bb2c7ee9f33102c6456c18cc81745d7055155d1218f1624463774/llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a53f4b74ee9fd30cb3d27d904dadece67a7575198bd80e687ee76474620735f", size = 37253123, upload-time = "2025-10-01T18:04:18.177Z" }, + { url = "https://files.pythonhosted.org/packages/96/76/0f7154952f037cb320b83e1c952ec4a19d5d689cf7d27cb8a26887d7bbc1/llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b3796b1b1e1c14dcae34285d2f4ea488402fbd2c400ccf7137603ca3800864f", size = 56288211, upload-time = "2025-10-01T18:01:24.079Z" }, + { url = "https://files.pythonhosted.org/packages/00/b1/0b581942be2683ceb6862d558979e87387e14ad65a1e4db0e7dd671fa315/llvmlite-0.45.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:779e2f2ceefef0f4368548685f0b4adde34e5f4b457e90391f570a10b348d433", size = 55140958, upload-time = "2025-10-01T18:02:30.482Z" }, + { url = "https://files.pythonhosted.org/packages/33/94/9ba4ebcf4d541a325fd8098ddc073b663af75cc8b065b6059848f7d4dce7/llvmlite-0.45.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e6c9949baf25d9aa9cd7cf0f6d011b9ca660dd17f5ba2b23bdbdb77cc86b116", size = 38132231, upload-time = "2025-10-01T18:05:03.664Z" }, ] [[package]] name = "lxml" -version = "6.0.1" +version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8f/bd/f9d01fd4132d81c6f43ab01983caea69ec9614b913c290a26738431a015d/lxml-6.0.1.tar.gz", hash = "sha256:2b3a882ebf27dd026df3801a87cf49ff791336e0f94b0fad195db77e01240690", size = 4070214, upload-time = "2025-08-22T10:37:53.525Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/c8/262c1d19339ef644cdc9eb5aad2e85bd2d1fa2d7c71cdef3ede1a3eed84d/lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6acde83f7a3d6399e6d83c1892a06ac9b14ea48332a5fbd55d60b9897b9570a", size = 8422719, upload-time = "2025-08-22T10:32:24.848Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d4/1b0afbeb801468a310642c3a6f6704e53c38a4a6eb1ca6faea013333e02f/lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d21c9cacb6a889cbb8eeb46c77ef2c1dd529cde10443fdeb1de847b3193c541", size = 4575763, upload-time = "2025-08-22T10:32:27.057Z" }, - { url = "https://files.pythonhosted.org/packages/5b/c1/8db9b5402bf52ceb758618313f7423cd54aea85679fcf607013707d854a8/lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:847458b7cd0d04004895f1fb2cca8e7c0f8ec923c49c06b7a72ec2d48ea6aca2", size = 4943244, upload-time = "2025-08-22T10:32:28.847Z" }, - { url = "https://files.pythonhosted.org/packages/e7/78/838e115358dd2369c1c5186080dd874a50a691fb5cd80db6afe5e816e2c6/lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1dc13405bf315d008fe02b1472d2a9d65ee1c73c0a06de5f5a45e6e404d9a1c0", size = 5081725, upload-time = "2025-08-22T10:32:30.666Z" }, - { url = "https://files.pythonhosted.org/packages/c7/b6/bdcb3a3ddd2438c5b1a1915161f34e8c85c96dc574b0ef3be3924f36315c/lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f540c229a8c0a770dcaf6d5af56a5295e0fc314fc7ef4399d543328054bcea", size = 5021238, upload-time = "2025-08-22T10:32:32.49Z" }, - { url = "https://files.pythonhosted.org/packages/73/e5/1bfb96185dc1a64c7c6fbb7369192bda4461952daa2025207715f9968205/lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:d2f73aef768c70e8deb8c4742fca4fd729b132fda68458518851c7735b55297e", size = 5343744, upload-time = "2025-08-22T10:32:34.385Z" }, - { url = "https://files.pythonhosted.org/packages/a2/ae/df3ea9ebc3c493b9c6bdc6bd8c554ac4e147f8d7839993388aab57ec606d/lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e7f4066b85a4fa25ad31b75444bd578c3ebe6b8ed47237896341308e2ce923c3", size = 5223477, upload-time = "2025-08-22T10:32:36.256Z" }, - { url = "https://files.pythonhosted.org/packages/37/b3/65e1e33600542c08bc03a4c5c9c306c34696b0966a424a3be6ffec8038ed/lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:0cce65db0cd8c750a378639900d56f89f7d6af11cd5eda72fde054d27c54b8ce", size = 4676626, upload-time = "2025-08-22T10:32:38.793Z" }, - { url = "https://files.pythonhosted.org/packages/7a/46/ee3ed8f3a60e9457d7aea46542d419917d81dbfd5700fe64b2a36fb5ef61/lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c372d42f3eee5844b69dcab7b8d18b2f449efd54b46ac76970d6e06b8e8d9a66", size = 5066042, upload-time = "2025-08-22T10:32:41.134Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b9/8394538e7cdbeb3bfa36bc74924be1a4383e0bb5af75f32713c2c4aa0479/lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2e2b0e042e1408bbb1c5f3cfcb0f571ff4ac98d8e73f4bf37c5dd179276beedd", size = 4724714, upload-time = "2025-08-22T10:32:43.94Z" }, - { url = "https://files.pythonhosted.org/packages/b3/21/3ef7da1ea2a73976c1a5a311d7cde5d379234eec0968ee609517714940b4/lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cc73bb8640eadd66d25c5a03175de6801f63c535f0f3cf50cac2f06a8211f420", size = 5247376, upload-time = "2025-08-22T10:32:46.263Z" }, - { url = "https://files.pythonhosted.org/packages/26/7d/0980016f124f00c572cba6f4243e13a8e80650843c66271ee692cddf25f3/lxml-6.0.1-cp311-cp311-win32.whl", hash = "sha256:7c23fd8c839708d368e406282d7953cee5134f4592ef4900026d84566d2b4c88", size = 3609499, upload-time = "2025-08-22T10:32:48.156Z" }, - { url = "https://files.pythonhosted.org/packages/b1/08/28440437521f265eff4413eb2a65efac269c4c7db5fd8449b586e75d8de2/lxml-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:2516acc6947ecd3c41a4a4564242a87c6786376989307284ddb115f6a99d927f", size = 4036003, upload-time = "2025-08-22T10:32:50.662Z" }, - { url = "https://files.pythonhosted.org/packages/7b/dc/617e67296d98099213a505d781f04804e7b12923ecd15a781a4ab9181992/lxml-6.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:cb46f8cfa1b0334b074f40c0ff94ce4d9a6755d492e6c116adb5f4a57fb6ad96", size = 3679662, upload-time = "2025-08-22T10:32:52.739Z" }, - { url = "https://files.pythonhosted.org/packages/b0/a9/82b244c8198fcdf709532e39a1751943a36b3e800b420adc739d751e0299/lxml-6.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c03ac546adaabbe0b8e4a15d9ad815a281afc8d36249c246aecf1aaad7d6f200", size = 8422788, upload-time = "2025-08-22T10:32:56.612Z" }, - { url = "https://files.pythonhosted.org/packages/c9/8d/1ed2bc20281b0e7ed3e6c12b0a16e64ae2065d99be075be119ba88486e6d/lxml-6.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33b862c7e3bbeb4ba2c96f3a039f925c640eeba9087a4dc7a572ec0f19d89392", size = 4593547, upload-time = "2025-08-22T10:32:59.016Z" }, - { url = "https://files.pythonhosted.org/packages/76/53/d7fd3af95b72a3493bf7fbe842a01e339d8f41567805cecfecd5c71aa5ee/lxml-6.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a3ec1373f7d3f519de595032d4dcafae396c29407cfd5073f42d267ba32440d", size = 4948101, upload-time = "2025-08-22T10:33:00.765Z" }, - { url = "https://files.pythonhosted.org/packages/9d/51/4e57cba4d55273c400fb63aefa2f0d08d15eac021432571a7eeefee67bed/lxml-6.0.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03b12214fb1608f4cffa181ec3d046c72f7e77c345d06222144744c122ded870", size = 5108090, upload-time = "2025-08-22T10:33:03.108Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6e/5f290bc26fcc642bc32942e903e833472271614e24d64ad28aaec09d5dae/lxml-6.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:207ae0d5f0f03b30f95e649a6fa22aa73f5825667fee9c7ec6854d30e19f2ed8", size = 5021791, upload-time = "2025-08-22T10:33:06.972Z" }, - { url = "https://files.pythonhosted.org/packages/13/d4/2e7551a86992ece4f9a0f6eebd4fb7e312d30f1e372760e2109e721d4ce6/lxml-6.0.1-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:32297b09ed4b17f7b3f448de87a92fb31bb8747496623483788e9f27c98c0f00", size = 5358861, upload-time = "2025-08-22T10:33:08.967Z" }, - { url = "https://files.pythonhosted.org/packages/8a/5f/cb49d727fc388bf5fd37247209bab0da11697ddc5e976ccac4826599939e/lxml-6.0.1-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7e18224ea241b657a157c85e9cac82c2b113ec90876e01e1f127312006233756", size = 5652569, upload-time = "2025-08-22T10:33:10.815Z" }, - { url = "https://files.pythonhosted.org/packages/ca/b8/66c1ef8c87ad0f958b0a23998851e610607c74849e75e83955d5641272e6/lxml-6.0.1-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a07a994d3c46cd4020c1ea566345cf6815af205b1e948213a4f0f1d392182072", size = 5252262, upload-time = "2025-08-22T10:33:12.673Z" }, - { url = "https://files.pythonhosted.org/packages/1a/ef/131d3d6b9590e64fdbb932fbc576b81fcc686289da19c7cb796257310e82/lxml-6.0.1-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:2287fadaa12418a813b05095485c286c47ea58155930cfbd98c590d25770e225", size = 4710309, upload-time = "2025-08-22T10:33:14.952Z" }, - { url = "https://files.pythonhosted.org/packages/bc/3f/07f48ae422dce44902309aa7ed386c35310929dc592439c403ec16ef9137/lxml-6.0.1-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b4e597efca032ed99f418bd21314745522ab9fa95af33370dcee5533f7f70136", size = 5265786, upload-time = "2025-08-22T10:33:16.721Z" }, - { url = "https://files.pythonhosted.org/packages/11/c7/125315d7b14ab20d9155e8316f7d287a4956098f787c22d47560b74886c4/lxml-6.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9696d491f156226decdd95d9651c6786d43701e49f32bf23715c975539aa2b3b", size = 5062272, upload-time = "2025-08-22T10:33:18.478Z" }, - { url = "https://files.pythonhosted.org/packages/8b/c3/51143c3a5fc5168a7c3ee626418468ff20d30f5a59597e7b156c1e61fba8/lxml-6.0.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e4e3cd3585f3c6f87cdea44cda68e692cc42a012f0131d25957ba4ce755241a7", size = 4786955, upload-time = "2025-08-22T10:33:20.34Z" }, - { url = "https://files.pythonhosted.org/packages/11/86/73102370a420ec4529647b31c4a8ce8c740c77af3a5fae7a7643212d6f6e/lxml-6.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:45cbc92f9d22c28cd3b97f8d07fcefa42e569fbd587dfdac76852b16a4924277", size = 5673557, upload-time = "2025-08-22T10:33:22.282Z" }, - { url = "https://files.pythonhosted.org/packages/d7/2d/aad90afaec51029aef26ef773b8fd74a9e8706e5e2f46a57acd11a421c02/lxml-6.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:f8c9bcfd2e12299a442fba94459adf0b0d001dbc68f1594439bfa10ad1ecb74b", size = 5254211, upload-time = "2025-08-22T10:33:24.15Z" }, - { url = "https://files.pythonhosted.org/packages/63/01/c9e42c8c2d8b41f4bdefa42ab05448852e439045f112903dd901b8fbea4d/lxml-6.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1e9dc2b9f1586e7cd77753eae81f8d76220eed9b768f337dc83a3f675f2f0cf9", size = 5275817, upload-time = "2025-08-22T10:33:26.007Z" }, - { url = "https://files.pythonhosted.org/packages/bc/1f/962ea2696759abe331c3b0e838bb17e92224f39c638c2068bf0d8345e913/lxml-6.0.1-cp312-cp312-win32.whl", hash = "sha256:987ad5c3941c64031f59c226167f55a04d1272e76b241bfafc968bdb778e07fb", size = 3610889, upload-time = "2025-08-22T10:33:28.169Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/22c86a990b51b44442b75c43ecb2f77b8daba8c4ba63696921966eac7022/lxml-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:abb05a45394fd76bf4a60c1b7bec0e6d4e8dfc569fc0e0b1f634cd983a006ddc", size = 4010925, upload-time = "2025-08-22T10:33:29.874Z" }, - { url = "https://files.pythonhosted.org/packages/b2/21/dc0c73325e5eb94ef9c9d60dbb5dcdcb2e7114901ea9509735614a74e75a/lxml-6.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:c4be29bce35020d8579d60aa0a4e95effd66fcfce31c46ffddf7e5422f73a299", size = 3671922, upload-time = "2025-08-22T10:33:31.535Z" }, - { url = "https://files.pythonhosted.org/packages/41/37/41961f53f83ded57b37e65e4f47d1c6c6ef5fd02cb1d6ffe028ba0efa7d4/lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b556aaa6ef393e989dac694b9c95761e32e058d5c4c11ddeef33f790518f7a5e", size = 3903412, upload-time = "2025-08-22T10:37:40.758Z" }, - { url = "https://files.pythonhosted.org/packages/3d/47/8631ea73f3dc776fb6517ccde4d5bd5072f35f9eacbba8c657caa4037a69/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:64fac7a05ebb3737b79fd89fe5a5b6c5546aac35cfcfd9208eb6e5d13215771c", size = 4224810, upload-time = "2025-08-22T10:37:42.839Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b8/39ae30ca3b1516729faeef941ed84bf8f12321625f2644492ed8320cb254/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:038d3c08babcfce9dc89aaf498e6da205efad5b7106c3b11830a488d4eadf56b", size = 4329221, upload-time = "2025-08-22T10:37:45.223Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ea/048dea6cdfc7a72d40ae8ed7e7d23cf4a6b6a6547b51b492a3be50af0e80/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:445f2cee71c404ab4259bc21e20339a859f75383ba2d7fb97dfe7c163994287b", size = 4270228, upload-time = "2025-08-22T10:37:47.276Z" }, - { url = "https://files.pythonhosted.org/packages/6b/d4/c2b46e432377c45d611ae2f669aa47971df1586c1a5240675801d0f02bac/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e352d8578e83822d70bea88f3d08b9912528e4c338f04ab707207ab12f4b7aac", size = 4416077, upload-time = "2025-08-22T10:37:49.822Z" }, - { url = "https://files.pythonhosted.org/packages/b6/db/8f620f1ac62cf32554821b00b768dd5957ac8e3fd051593532be5b40b438/lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:51bd5d1a9796ca253db6045ab45ca882c09c071deafffc22e06975b7ace36300", size = 3518127, upload-time = "2025-08-22T10:37:51.66Z" }, + { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, + { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" }, + { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" }, + { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" }, + { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, + { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, + { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, + { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" }, + { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" }, ] [[package]] @@ -3167,19 +3206,10 @@ wheels = [ ] [[package]] -name = "mailchimp-transactional" -version = "1.0.56" +name = "madoka" +version = "0.7.1" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "python-dateutil" }, - { name = "requests" }, - { name = "six" }, - { name = "urllib3" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/bc/cb60d02c00996839bbd87444a97d0ba5ac271b1a324001562afb8f685251/mailchimp_transactional-1.0.56-py3-none-any.whl", hash = "sha256:a76ea88b90a2d47d8b5134586aabbd3a96c459f6066d8886748ab59e50de36eb", size = 31660, upload-time = "2024-02-01T18:39:19.717Z" }, -] +sdist = { url = "https://files.pythonhosted.org/packages/da/eb/95288b1c4aa541eb296a6271e3f8c7ece03b78923ac47dbe95d2287d9f5e/madoka-0.7.1.tar.gz", hash = "sha256:e258baa84fc0a3764365993b8bf5e1b065383a6ca8c9f862fb3e3e709843fae7", size = 81413, upload-time = "2019-02-10T18:38:01.382Z" } [[package]] name = "mako" @@ -3216,30 +3246,32 @@ wheels = [ [[package]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, ] [[package]] @@ -3342,16 +3374,16 @@ wheels = [ [[package]] name = "msal" -version = "1.33.0" +version = "1.34.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "pyjwt", extra = ["crypto"] }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/da/81acbe0c1fd7e9e4ec35f55dadeba9833a847b9a6ba2e2d1e4432da901dd/msal-1.33.0.tar.gz", hash = "sha256:836ad80faa3e25a7d71015c990ce61f704a87328b1e73bcbb0623a18cbf17510", size = 153801, upload-time = "2025-07-22T19:36:33.693Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/5b/fbc73e91f7727ae1e79b21ed833308e99dc11cc1cd3d4717f579775de5e9/msal-1.33.0-py3-none-any.whl", hash = "sha256:c0cd41cecf8eaed733ee7e3be9e040291eba53b0f262d3ae9c58f38b04244273", size = 116853, upload-time = "2025-07-22T19:36:32.403Z" }, + { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, ] [[package]] @@ -3366,65 +3398,49 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, ] -[[package]] -name = "msrest" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "certifi" }, - { name = "isodate" }, - { name = "requests" }, - { name = "requests-oauthlib" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/77/8397c8fb8fc257d8ea0fa66f8068e073278c65f05acb17dcb22a02bfdc42/msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9", size = 175332, upload-time = "2022-06-13T22:41:25.111Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/cf/f2966a2638144491f8696c27320d5219f48a072715075d168b31d3237720/msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32", size = 85384, upload-time = "2022-06-13T22:41:22.42Z" }, -] - [[package]] name = "multidict" -version = "6.6.4" +version = "6.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, - { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, - { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, - { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, - { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, - { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, - { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, - { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, - { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, - { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, - { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, - { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, - { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, - { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, - { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, - { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, - { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, - { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, - { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, - { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, - { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, - { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, - { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, - { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, - { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, - { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, - { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, - { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, - { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, - { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, - { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, - { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] [[package]] @@ -3455,14 +3471,14 @@ wheels = [ [[package]] name = "mypy-boto3-bedrock-runtime" -version = "1.40.21" +version = "1.40.41" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/ff/074a1e1425d04e7294c962803655e85e20e158734534ce8d302efaa8230a/mypy_boto3_bedrock_runtime-1.40.21.tar.gz", hash = "sha256:fa9401e86d42484a53803b1dba0782d023ab35c817256e707fbe4fff88aeb881", size = 28326, upload-time = "2025-08-29T19:25:09.405Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/38/79989f7bce998776ed1a01c17f3f58e7bc6f5fc2bcbdff929701526fa2f1/mypy_boto3_bedrock_runtime-1.40.41.tar.gz", hash = "sha256:ee9bda6d6d478c8d0995e84e884bdf1798e150d437974ae27c175774a58ffaa5", size = 28333, upload-time = "2025-09-29T19:26:04.804Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/02/9d3b881bee5552600c6f456e446069d5beffd2b7862b99e1e945d60d6a9b/mypy_boto3_bedrock_runtime-1.40.21-py3-none-any.whl", hash = "sha256:4c9ea181ef00cb3d15f9b051a50e3b78272122d24cd24ac34938efe6ddfecc62", size = 34149, upload-time = "2025-08-29T19:25:03.941Z" }, + { url = "https://files.pythonhosted.org/packages/3d/6c/d3431dadf473bb76aa590b1ed8cc91726a48b029b542eff9d3024f2d70b9/mypy_boto3_bedrock_runtime-1.40.41-py3-none-any.whl", hash = "sha256:d65dff200986ff06c6b3579ddcea102555f2067c8987fca379bf4f9ed8ba3121", size = 34181, upload-time = "2025-09-29T19:26:01.898Z" }, ] [[package]] @@ -3474,6 +3490,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] +[[package]] +name = "mysql-connector-python" +version = "9.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/77/2b45e6460d05b1f1b7a4c8eb79a50440b4417971973bb78c9ef6cad630a6/mysql_connector_python-9.4.0.tar.gz", hash = "sha256:d111360332ae78933daf3d48ff497b70739aa292ab0017791a33e826234e743b", size = 12185532, upload-time = "2025-07-22T08:02:05.788Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/0c/4365a802129be9fa63885533c38be019f1c6b6f5bcf8844ac53902314028/mysql_connector_python-9.4.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:7df1a8ddd182dd8adc914f6dc902a986787bf9599705c29aca7b2ce84e79d361", size = 17501627, upload-time = "2025-07-22T07:57:45.416Z" }, + { url = "https://files.pythonhosted.org/packages/c0/bf/ca596c00d7a6eaaf8ef2f66c9b23cd312527f483073c43ffac7843049cb4/mysql_connector_python-9.4.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3892f20472e13e63b1fb4983f454771dd29f211b09724e69a9750e299542f2f8", size = 18369494, upload-time = "2025-07-22T07:57:49.714Z" }, + { url = "https://files.pythonhosted.org/packages/25/14/6510a11ed9f80d77f743dc207773092c4ab78d5efa454b39b48480315d85/mysql_connector_python-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d3e87142103d71c4df647ece30f98e85e826652272ed1c74822b56f6acdc38e7", size = 33516187, upload-time = "2025-07-22T07:57:55.294Z" }, + { url = "https://files.pythonhosted.org/packages/16/a8/4f99d80f1cf77733ce9a44b6adb7f0dd7079e7afa51ca4826515ef0c3e16/mysql_connector_python-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b27fcd403436fe83bafb2fe7fcb785891e821e639275c4ad3b3bd1e25f533206", size = 33917818, upload-time = "2025-07-22T07:58:00.523Z" }, + { url = "https://files.pythonhosted.org/packages/15/9c/127f974ca9d5ee25373cb5433da06bb1f36e05f2a6b7436da1fe9c6346b0/mysql_connector_python-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd6ff5afb9c324b0bbeae958c93156cce4168c743bf130faf224d52818d1f0ee", size = 16392378, upload-time = "2025-07-22T07:58:04.669Z" }, + { url = "https://files.pythonhosted.org/packages/03/7c/a543fb17c2dfa6be8548dfdc5879a0c7924cd5d1c79056c48472bb8fe858/mysql_connector_python-9.4.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:4efa3898a24aba6a4bfdbf7c1f5023c78acca3150d72cc91199cca2ccd22f76f", size = 17503693, upload-time = "2025-07-22T07:58:08.96Z" }, + { url = "https://files.pythonhosted.org/packages/cb/6e/c22fbee05f5cfd6ba76155b6d45f6261d8d4c1e36e23de04e7f25fbd01a4/mysql_connector_python-9.4.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:665c13e7402235162e5b7a2bfdee5895192121b64ea455c90a81edac6a48ede5", size = 18371987, upload-time = "2025-07-22T07:58:13.273Z" }, + { url = "https://files.pythonhosted.org/packages/b4/fd/f426f5f35a3d3180c7f84d1f96b4631be2574df94ca1156adab8618b236c/mysql_connector_python-9.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:815aa6cad0f351c1223ef345781a538f2e5e44ef405fdb3851eb322bd9c4ca2b", size = 33516214, upload-time = "2025-07-22T07:58:18.967Z" }, + { url = "https://files.pythonhosted.org/packages/45/5a/1b053ae80b43cd3ccebc4bb99a98826969b3b0f8adebdcc2530750ad76ed/mysql_connector_python-9.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b3436a2c8c0ec7052932213e8d01882e6eb069dbab33402e685409084b133a1c", size = 33918565, upload-time = "2025-07-22T07:58:25.28Z" }, + { url = "https://files.pythonhosted.org/packages/cb/69/36b989de675d98ba8ff7d45c96c30c699865c657046f2e32db14e78f13d9/mysql_connector_python-9.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:57b0c224676946b70548c56798d5023f65afa1ba5b8ac9f04a143d27976c7029", size = 16392563, upload-time = "2025-07-22T07:58:29.623Z" }, + { url = "https://files.pythonhosted.org/packages/36/34/b6165e15fd45a8deb00932d8e7d823de7650270873b4044c4db6688e1d8f/mysql_connector_python-9.4.0-py2.py3-none-any.whl", hash = "sha256:56e679169c704dab279b176fab2a9ee32d2c632a866c0f7cd48a8a1e2cf802c4", size = 406574, upload-time = "2025-07-22T07:59:08.394Z" }, +] + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -3494,7 +3529,7 @@ wheels = [ [[package]] name = "nltk" -version = "3.9.1" +version = "3.9.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -3502,74 +3537,74 @@ dependencies = [ { name = "regex" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691, upload-time = "2024-08-18T19:48:37.769Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629, upload-time = "2025-10-01T07:19:23.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442, upload-time = "2024-08-18T19:48:21.909Z" }, + { url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404, upload-time = "2025-10-01T07:19:21.648Z" }, ] [[package]] name = "nodejs-wheel-binaries" -version = "22.19.0" +version = "22.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/ca/6033f80b7aebc23cb31ed8b09608b6308c5273c3522aedd043e8a0644d83/nodejs_wheel_binaries-22.19.0.tar.gz", hash = "sha256:e69b97ef443d36a72602f7ed356c6a36323873230f894799f4270a853932fdb3", size = 8060, upload-time = "2025-09-12T10:33:46.935Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/54/02f58c8119e2f1984e2572cc77a7b469dbaf4f8d171ad376e305749ef48e/nodejs_wheel_binaries-22.20.0.tar.gz", hash = "sha256:a62d47c9fd9c32191dff65bbe60261504f26992a0a19fe8b4d523256a84bd351", size = 8058, upload-time = "2025-09-26T09:48:00.906Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/a2/0d055fd1d8c9a7a971c4db10cf42f3bba57c964beb6cf383ca053f2cdd20/nodejs_wheel_binaries-22.19.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:43eca1526455a1fb4cb777095198f7ebe5111a4444749c87f5c2b84645aaa72a", size = 50902454, upload-time = "2025-09-12T10:33:18.3Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f5/446f7b3c5be1d2f5145ffa3c9aac3496e06cdf0f436adeb21a1f95dd79a7/nodejs_wheel_binaries-22.19.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:feb06709e1320790d34babdf71d841ec7f28e4c73217d733e7f5023060a86bfc", size = 51837860, upload-time = "2025-09-12T10:33:21.599Z" }, - { url = "https://files.pythonhosted.org/packages/1e/4e/d0a036f04fd0f5dc3ae505430657044b8d9853c33be6b2d122bb171aaca3/nodejs_wheel_binaries-22.19.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9f5777292491430457c99228d3a267decf12a09d31246f0692391e3513285e", size = 57841528, upload-time = "2025-09-12T10:33:25.433Z" }, - { url = "https://files.pythonhosted.org/packages/e2/11/4811d27819f229cc129925c170db20c12d4f01ad366a0066f06d6eb833cf/nodejs_wheel_binaries-22.19.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1392896f1a05a88a8a89b26e182d90fdf3020b4598a047807b91b65731e24c00", size = 58368815, upload-time = "2025-09-12T10:33:29.083Z" }, - { url = "https://files.pythonhosted.org/packages/6e/94/df41416856b980e38a7ff280cfb59f142a77955ccdbec7cc4260d8ab2e78/nodejs_wheel_binaries-22.19.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9164c876644f949cad665e3ada00f75023e18f381e78a1d7b60ccbbfb4086e73", size = 59690937, upload-time = "2025-09-12T10:33:32.771Z" }, - { url = "https://files.pythonhosted.org/packages/d1/39/8d0d5f84b7616bdc4eca725f5d64a1cfcac3d90cf3f30cae17d12f8e987f/nodejs_wheel_binaries-22.19.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6b4b75166134010bc9cfebd30dc57047796a27049fef3fc22316216d76bc0af7", size = 60751996, upload-time = "2025-09-12T10:33:36.962Z" }, - { url = "https://files.pythonhosted.org/packages/41/93/2d66b5b60055dd1de6e37e35bef563c15e4cafa5cfe3a6990e0ab358e515/nodejs_wheel_binaries-22.19.0-py2.py3-none-win_amd64.whl", hash = "sha256:3f271f5abfc71b052a6b074225eca8c1223a0f7216863439b86feaca814f6e5a", size = 40026140, upload-time = "2025-09-12T10:33:40.33Z" }, - { url = "https://files.pythonhosted.org/packages/a3/46/c9cf7ff7e3c71f07ca8331c939afd09b6e59fc85a2944ea9411e8b29ce50/nodejs_wheel_binaries-22.19.0-py2.py3-none-win_arm64.whl", hash = "sha256:666a355fe0c9bde44a9221cd543599b029045643c8196b8eedb44f28dc192e06", size = 38804500, upload-time = "2025-09-12T10:33:43.302Z" }, + { url = "https://files.pythonhosted.org/packages/24/6d/333e5458422f12318e3c3e6e7f194353aa68b0d633217c7e89833427ca01/nodejs_wheel_binaries-22.20.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:455add5ac4f01c9c830ab6771dbfad0fdf373f9b040d3aabe8cca9b6c56654fb", size = 53246314, upload-time = "2025-09-26T09:47:32.536Z" }, + { url = "https://files.pythonhosted.org/packages/56/30/dcd6879d286a35b3c4c8f9e5e0e1bcf4f9e25fe35310fc77ecf97f915a23/nodejs_wheel_binaries-22.20.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:5d8c12f97eea7028b34a84446eb5ca81829d0c428dfb4e647e09ac617f4e21fa", size = 53644391, upload-time = "2025-09-26T09:47:36.093Z" }, + { url = "https://files.pythonhosted.org/packages/58/be/c7b2e7aa3bb281d380a1c531f84d0ccfe225832dfc3bed1ca171753b9630/nodejs_wheel_binaries-22.20.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a2b0989194148f66e9295d8f11bc463bde02cbe276517f4d20a310fb84780ae", size = 60282516, upload-time = "2025-09-26T09:47:39.88Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c5/8befacf4190e03babbae54cb0809fb1a76e1600ec3967ab8ee9f8fc85b65/nodejs_wheel_binaries-22.20.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5c500aa4dc046333ecb0a80f183e069e5c30ce637f1c1a37166b2c0b642dc21", size = 60347290, upload-time = "2025-09-26T09:47:43.712Z" }, + { url = "https://files.pythonhosted.org/packages/c0/bd/cfffd1e334277afa0714962c6ec432b5fe339340a6bca2e5fa8e678e7590/nodejs_wheel_binaries-22.20.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3279eb1b99521f0d20a850bbfc0159a658e0e85b843b3cf31b090d7da9f10dfc", size = 62178798, upload-time = "2025-09-26T09:47:47.752Z" }, + { url = "https://files.pythonhosted.org/packages/08/14/10b83a9c02faac985b3e9f5e65d63a34fc0f46b48d8a2c3e4caa3e1e7318/nodejs_wheel_binaries-22.20.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d29705797b33bade62d79d8f106c2453c8a26442a9b2a5576610c0f7e7c351ed", size = 62772957, upload-time = "2025-09-26T09:47:51.266Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a9/c6a480259aa0d6b270aac2c6ba73a97444b9267adde983a5b7e34f17e45a/nodejs_wheel_binaries-22.20.0-py2.py3-none-win_amd64.whl", hash = "sha256:4bd658962f24958503541963e5a6f2cc512a8cb301e48a69dc03c879f40a28ae", size = 40120431, upload-time = "2025-09-26T09:47:54.363Z" }, + { url = "https://files.pythonhosted.org/packages/42/b1/6a4eb2c6e9efa028074b0001b61008c9d202b6b46caee9e5d1b18c088216/nodejs_wheel_binaries-22.20.0-py2.py3-none-win_arm64.whl", hash = "sha256:1fccac931faa210d22b6962bcdbc99269d16221d831b9a118bbb80fe434a60b8", size = 38844133, upload-time = "2025-09-26T09:47:57.357Z" }, ] [[package]] name = "numba" -version = "0.61.2" +version = "0.62.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "llvmlite" }, { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/20/33dbdbfe60e5fd8e3dbfde299d106279a33d9f8308346022316781368591/numba-0.62.1.tar.gz", hash = "sha256:7b774242aa890e34c21200a1fc62e5b5757d5286267e71103257f4e2af0d5161", size = 2749817, upload-time = "2025-09-29T10:46:31.551Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/97/c99d1056aed767503c228f7099dc11c402906b42a4757fec2819329abb98/numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2", size = 2775825, upload-time = "2025-04-09T02:57:43.442Z" }, - { url = "https://files.pythonhosted.org/packages/95/9e/63c549f37136e892f006260c3e2613d09d5120672378191f2dc387ba65a2/numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b", size = 2778695, upload-time = "2025-04-09T02:57:44.968Z" }, - { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227, upload-time = "2025-04-09T02:57:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422, upload-time = "2025-04-09T02:57:48.222Z" }, - { url = "https://files.pythonhosted.org/packages/0f/a4/2b309a6a9f6d4d8cfba583401c7c2f9ff887adb5d54d8e2e130274c0973f/numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1", size = 2831505, upload-time = "2025-04-09T02:57:50.108Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a0/c6b7b9c615cfa3b98c4c63f4316e3f6b3bbe2387740277006551784218cd/numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2", size = 2776626, upload-time = "2025-04-09T02:57:51.857Z" }, - { url = "https://files.pythonhosted.org/packages/92/4a/fe4e3c2ecad72d88f5f8cd04e7f7cff49e718398a2fac02d2947480a00ca/numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8", size = 2779287, upload-time = "2025-04-09T02:57:53.658Z" }, - { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928, upload-time = "2025-04-09T02:57:55.206Z" }, - { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115, upload-time = "2025-04-09T02:57:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/68/1d/ddb3e704c5a8fb90142bf9dc195c27db02a08a99f037395503bfbc1d14b3/numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18", size = 2831929, upload-time = "2025-04-09T02:57:58.45Z" }, + { url = "https://files.pythonhosted.org/packages/dd/5f/8b3491dd849474f55e33c16ef55678ace1455c490555337899c35826836c/numba-0.62.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f43e24b057714e480fe44bc6031de499e7cf8150c63eb461192caa6cc8530bc8", size = 2684279, upload-time = "2025-09-29T10:43:37.213Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/71969149bfeb65a629e652b752b80167fe8a6a6f6e084f1f2060801f7f31/numba-0.62.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:57cbddc53b9ee02830b828a8428757f5c218831ccc96490a314ef569d8342b7b", size = 2687330, upload-time = "2025-09-29T10:43:59.601Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7d/403be3fecae33088027bc8a95dc80a2fda1e3beff3e0e5fc4374ada3afbe/numba-0.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:604059730c637c7885386521bb1b0ddcbc91fd56131a6dcc54163d6f1804c872", size = 3739727, upload-time = "2025-09-29T10:42:45.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c3/3d910d08b659a6d4c62ab3cd8cd93c4d8b7709f55afa0d79a87413027ff6/numba-0.62.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6c540880170bee817011757dc9049dba5a29db0c09b4d2349295991fe3ee55f", size = 3445490, upload-time = "2025-09-29T10:43:12.692Z" }, + { url = "https://files.pythonhosted.org/packages/5b/82/9d425c2f20d9f0a37f7cb955945a553a00fa06a2b025856c3550227c5543/numba-0.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:03de6d691d6b6e2b76660ba0f38f37b81ece8b2cc524a62f2a0cfae2bfb6f9da", size = 2745550, upload-time = "2025-09-29T10:44:20.571Z" }, + { url = "https://files.pythonhosted.org/packages/5e/fa/30fa6873e9f821c0ae755915a3ca444e6ff8d6a7b6860b669a3d33377ac7/numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:1b743b32f8fa5fff22e19c2e906db2f0a340782caf024477b97801b918cf0494", size = 2685346, upload-time = "2025-09-29T10:43:43.677Z" }, + { url = "https://files.pythonhosted.org/packages/a9/d5/504ce8dc46e0dba2790c77e6b878ee65b60fe3e7d6d0006483ef6fde5a97/numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90fa21b0142bcf08ad8e32a97d25d0b84b1e921bc9423f8dda07d3652860eef6", size = 2688139, upload-time = "2025-09-29T10:44:04.894Z" }, + { url = "https://files.pythonhosted.org/packages/50/5f/6a802741176c93f2ebe97ad90751894c7b0c922b52ba99a4395e79492205/numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ef84d0ac19f1bf80431347b6f4ce3c39b7ec13f48f233a48c01e2ec06ecbc59", size = 3796453, upload-time = "2025-09-29T10:42:52.771Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/efd21527d25150c4544eccc9d0b7260a5dec4b7e98b5a581990e05a133c0/numba-0.62.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9315cc5e441300e0ca07c828a627d92a6802bcbf27c5487f31ae73783c58da53", size = 3496451, upload-time = "2025-09-29T10:43:19.279Z" }, + { url = "https://files.pythonhosted.org/packages/80/44/79bfdab12a02796bf4f1841630355c82b5a69933b1d50eb15c7fa37dabe8/numba-0.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:44e3aa6228039992f058f5ebfcfd372c83798e9464297bdad8cc79febcf7891e", size = 2745552, upload-time = "2025-09-29T10:44:26.399Z" }, ] [[package]] name = "numexpr" -version = "2.12.1" +version = "2.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/08/211c9ae8a230f20976f3b0b9a3308264c62bd05caf92aba7c59beebf6049/numexpr-2.12.1.tar.gz", hash = "sha256:e239faed0af001d1f1ea02934f7b3bb2bb6711ddb98e7a7bef61be5f45ff54ab", size = 115053, upload-time = "2025-09-11T11:04:04.36Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/2f/fdba158c9dbe5caca9c3eca3eaffffb251f2fb8674bf8e2d0aed5f38d319/numexpr-2.14.1.tar.gz", hash = "sha256:4be00b1086c7b7a5c32e31558122b7b80243fe098579b170967da83f3152b48b", size = 119400, upload-time = "2025-10-13T16:17:27.351Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/a1/e10d3812e352eeedacea964ae7078181f5da659f77f65f4ff75aca67372c/numexpr-2.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ac38131930d6a1c4760f384621b9bd6fd8ab557147e81b7bcce777d557ee81", size = 154204, upload-time = "2025-09-11T11:02:20.607Z" }, - { url = "https://files.pythonhosted.org/packages/a2/fc/8e30453e82ffa2a25ccc263a69cb90bad4c195ce91d2c53c6d8699564b95/numexpr-2.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea09d6e669de2f7a92228d38d58ca0e59eeb83100a9b93b6467547ffdf93ceeb", size = 144226, upload-time = "2025-09-11T11:02:21.957Z" }, - { url = "https://files.pythonhosted.org/packages/3d/3a/4ea9dca5d82e8654ad54f788af6215d72ad9afc650f8f21098923391b8a8/numexpr-2.12.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:05ec71d3feae4a96c177d696de608d6003de96a0ed6c725e229d29c6ea495a2e", size = 422124, upload-time = "2025-09-11T11:02:23.017Z" }, - { url = "https://files.pythonhosted.org/packages/4e/42/26432c6d691c2534edcdd66d8c8aefeac90a71b6c767ab569609d2683869/numexpr-2.12.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09375dbc588c1042e99963289bcf2092d427a27e680ad267fe7e83fd1913d57f", size = 411888, upload-time = "2025-09-11T11:02:24.525Z" }, - { url = "https://files.pythonhosted.org/packages/49/20/c00814929daad00193e3d07f176066f17d83c064dec26699bd02e64cefbd/numexpr-2.12.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c6a16946a7a9c6fe6e68da87b822eaa9c2edb0e0d36885218c1b8122772f8068", size = 1387205, upload-time = "2025-09-11T11:02:25.701Z" }, - { url = "https://files.pythonhosted.org/packages/a8/1f/61c7d82321face677fb8fdd486c1a8fe64bcbcf184f65cc76c8ff2ee0c19/numexpr-2.12.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aa47f6d3798e9f9677acdea40ff6dd72fd0f2993b87fc1a85e120acbac99323b", size = 1434537, upload-time = "2025-09-11T11:02:26.937Z" }, - { url = "https://files.pythonhosted.org/packages/09/0e/7996ad143e2a5b4f295da718dba70c2108e6070bcff494c4a55f0b19c315/numexpr-2.12.1-cp311-cp311-win32.whl", hash = "sha256:d77311ce7910c14ebf45dec6ac98a597493b63e146a86bfd94128bdcdd7d2a3f", size = 156808, upload-time = "2025-09-11T11:02:28.126Z" }, - { url = "https://files.pythonhosted.org/packages/ce/7b/6ea78f0f5a39057cc10057bcd0d9e814ff60dc3698cbcd36b178c7533931/numexpr-2.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:4c3d6e524c4a386bc77cd3472b370c1bbe50e23c0a6d66960a006ad90db61d4d", size = 151235, upload-time = "2025-09-11T11:02:29.098Z" }, - { url = "https://files.pythonhosted.org/packages/7b/17/817f21537fc7827b55691990e44f1260e295be7e68bb37d4bc8741439723/numexpr-2.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cba7e922b813fd46415fbeac618dd78169a6acb6bd10e6055c1cd8a8f8bebd6e", size = 153915, upload-time = "2025-09-11T11:02:30.15Z" }, - { url = "https://files.pythonhosted.org/packages/0a/11/65d9d918339e6b9116f8cda9210249a3127843aef9f147d50cd2dad10d60/numexpr-2.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33e5f20bc5a64c163beeed6c57e75497247c779531266e255f93c76c57248a49", size = 144358, upload-time = "2025-09-11T11:02:31.173Z" }, - { url = "https://files.pythonhosted.org/packages/64/1d/8d349126ea9c00002b574aa5310a5eb669d3cf4e82e45ff643aa01ac48fe/numexpr-2.12.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:59958402930d13fafbf8c9fdff5b0866f0ea04083f877743b235447725aaea97", size = 423752, upload-time = "2025-09-11T11:02:32.208Z" }, - { url = "https://files.pythonhosted.org/packages/ba/4a/a16aba2aa141c6634bf619bf8d069942c3f875b71ae0650172bcff0200ec/numexpr-2.12.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12bb47518bfbc740afe4119fe141d20e715ab29e910250c96954d2794c0e6aa4", size = 413612, upload-time = "2025-09-11T11:02:33.656Z" }, - { url = "https://files.pythonhosted.org/packages/d0/61/91b85d42541a6517cc1a9f9dabc730acc56b724f4abdc5c84513558a0c79/numexpr-2.12.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e579d9a4a183f09affe102577e757e769150c0145c3ee46fbd00345d531d42b", size = 1388903, upload-time = "2025-09-11T11:02:35.229Z" }, - { url = "https://files.pythonhosted.org/packages/8d/58/2913b7938bd656e412fd41213dcd56cb72978a72d3b03636ab021eadc4ee/numexpr-2.12.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:69ba864878665f4289ef675997276439a854012044b442ce9048a03e39b8191e", size = 1436092, upload-time = "2025-09-11T11:02:36.363Z" }, - { url = "https://files.pythonhosted.org/packages/fc/31/c1863597c26d92554af29a3fff5b05d4c1885cf5450a690724c7cee04af9/numexpr-2.12.1-cp312-cp312-win32.whl", hash = "sha256:713410f76c0bbe08947c3d49477db05944ce0094449845591859e250866ba074", size = 156948, upload-time = "2025-09-11T11:02:37.518Z" }, - { url = "https://files.pythonhosted.org/packages/f5/ca/c9bc0f460d352ab5934d659a4cb5bc9529e20e78ac60f906d7e41cbfbd42/numexpr-2.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:c32f934066608a32501e06d99b93e6f2dded33606905f9af40e1f4649973ae6e", size = 151370, upload-time = "2025-09-11T11:02:38.445Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a3/67999bdd1ed1f938d38f3fedd4969632f2f197b090e50505f7cc1fa82510/numexpr-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d03fcb4644a12f70a14d74006f72662824da5b6128bf1bcd10cc3ed80e64c34", size = 163195, upload-time = "2025-10-13T16:16:31.212Z" }, + { url = "https://files.pythonhosted.org/packages/25/95/d64f680ea1fc56d165457287e0851d6708800f9fcea346fc1b9957942ee6/numexpr-2.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2773ee1133f77009a1fc2f34fe236f3d9823779f5f75450e183137d49f00499f", size = 152088, upload-time = "2025-10-13T16:16:33.186Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7f/3bae417cb13ae08afd86d08bb0301c32440fe0cae4e6262b530e0819aeda/numexpr-2.14.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebe4980f9494b9f94d10d2e526edc29e72516698d3bf95670ba79415492212a4", size = 451126, upload-time = "2025-10-13T16:13:22.248Z" }, + { url = "https://files.pythonhosted.org/packages/4c/1a/edbe839109518364ac0bd9e918cf874c755bb2c128040e920f198c494263/numexpr-2.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a381e5e919a745c9503bcefffc1c7f98c972c04ec58fc8e999ed1a929e01ba6", size = 442012, upload-time = "2025-10-13T16:14:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/b1/be4ce99bff769a5003baddac103f34681997b31d4640d5a75c0e8ed59c78/numexpr-2.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d08856cfc1b440eb1caaa60515235369654321995dd68eb9377577392020f6cb", size = 1415975, upload-time = "2025-10-13T16:13:26.088Z" }, + { url = "https://files.pythonhosted.org/packages/e7/33/b33b8fdc032a05d9ebb44a51bfcd4b92c178a2572cd3e6c1b03d8a4b45b2/numexpr-2.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03130afa04edf83a7b590d207444f05a00363c9b9ea5d81c0f53b1ea13fad55a", size = 1464683, upload-time = "2025-10-13T16:14:58.87Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b2/ddcf0ac6cf0a1d605e5aecd4281507fd79a9628a67896795ab2e975de5df/numexpr-2.14.1-cp311-cp311-win32.whl", hash = "sha256:db78fa0c9fcbaded3ae7453faf060bd7a18b0dc10299d7fcd02d9362be1213ed", size = 166838, upload-time = "2025-10-13T16:17:06.765Z" }, + { url = "https://files.pythonhosted.org/packages/64/72/4ca9bd97b2eb6dce9f5e70a3b6acec1a93e1fb9b079cb4cba2cdfbbf295d/numexpr-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e9b2f957798c67a2428be96b04bce85439bed05efe78eb78e4c2ca43737578e7", size = 160069, upload-time = "2025-10-13T16:17:08.752Z" }, + { url = "https://files.pythonhosted.org/packages/9d/20/c473fc04a371f5e2f8c5749e04505c13e7a8ede27c09e9f099b2ad6f43d6/numexpr-2.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ebae0ab18c799b0e6b8c5a8d11e1fa3848eb4011271d99848b297468a39430", size = 162790, upload-time = "2025-10-13T16:16:34.903Z" }, + { url = "https://files.pythonhosted.org/packages/45/93/b6760dd1904c2a498e5f43d1bb436f59383c3ddea3815f1461dfaa259373/numexpr-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47041f2f7b9e69498fb311af672ba914a60e6e6d804011caacb17d66f639e659", size = 152196, upload-time = "2025-10-13T16:16:36.593Z" }, + { url = "https://files.pythonhosted.org/packages/72/94/cc921e35593b820521e464cbbeaf8212bbdb07f16dc79fe283168df38195/numexpr-2.14.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d686dfb2c1382d9e6e0ee0b7647f943c1886dba3adbf606c625479f35f1956c1", size = 452468, upload-time = "2025-10-13T16:13:29.531Z" }, + { url = "https://files.pythonhosted.org/packages/d9/43/560e9ba23c02c904b5934496486d061bcb14cd3ebba2e3cf0e2dccb6c22b/numexpr-2.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee6d4fbbbc368e6cdd0772734d6249128d957b3b8ad47a100789009f4de7083", size = 443631, upload-time = "2025-10-13T16:15:02.473Z" }, + { url = "https://files.pythonhosted.org/packages/7b/6c/78f83b6219f61c2c22d71ab6e6c2d4e5d7381334c6c29b77204e59edb039/numexpr-2.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a2839efa25f3c8d4133252ea7342d8f81226c7c4dda81f97a57e090b9d87a48", size = 1417670, upload-time = "2025-10-13T16:13:33.464Z" }, + { url = "https://files.pythonhosted.org/packages/0e/bb/1ccc9dcaf46281568ce769888bf16294c40e98a5158e4b16c241de31d0d3/numexpr-2.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9f9137f1351b310436662b5dc6f4082a245efa8950c3b0d9008028df92fefb9b", size = 1466212, upload-time = "2025-10-13T16:15:12.828Z" }, + { url = "https://files.pythonhosted.org/packages/31/9f/203d82b9e39dadd91d64bca55b3c8ca432e981b822468dcef41a4418626b/numexpr-2.14.1-cp312-cp312-win32.whl", hash = "sha256:36f8d5c1bd1355df93b43d766790f9046cccfc1e32b7c6163f75bcde682cda07", size = 166996, upload-time = "2025-10-13T16:17:10.369Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/ffe750b5452eb66de788c34e7d21ec6d886abb4d7c43ad1dc88ceb3d998f/numexpr-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:fdd886f4b7dbaf167633ee396478f0d0aa58ea2f9e7ccc3c6431019623e8d68f", size = 160187, upload-time = "2025-10-13T16:17:11.974Z" }, ] [[package]] @@ -3637,7 +3672,7 @@ wheels = [ [[package]] name = "onnxruntime" -version = "1.22.1" +version = "1.23.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coloredlogs" }, @@ -3648,19 +3683,21 @@ dependencies = [ { name = "sympy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/82/ff/4a1a6747e039ef29a8d4ee4510060e9a805982b6da906a3da2306b7a3be6/onnxruntime-1.22.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:f4581bccb786da68725d8eac7c63a8f31a89116b8761ff8b4989dc58b61d49a0", size = 34324148, upload-time = "2025-07-10T19:15:26.584Z" }, - { url = "https://files.pythonhosted.org/packages/0b/05/9f1929723f1cca8c9fb1b2b97ac54ce61362c7201434d38053ea36ee4225/onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ae7526cf10f93454beb0f751e78e5cb7619e3b92f9fc3bd51aa6f3b7a8977e5", size = 14473779, upload-time = "2025-07-10T19:15:30.183Z" }, - { url = "https://files.pythonhosted.org/packages/59/f3/c93eb4167d4f36ea947930f82850231f7ce0900cb00e1a53dc4995b60479/onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f6effa1299ac549a05c784d50292e3378dbbf010346ded67400193b09ddc2f04", size = 16460799, upload-time = "2025-07-10T19:15:33.005Z" }, - { url = "https://files.pythonhosted.org/packages/a8/01/e536397b03e4462d3260aee5387e6f606c8fa9d2b20b1728f988c3c72891/onnxruntime-1.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:f28a42bb322b4ca6d255531bb334a2b3e21f172e37c1741bd5e66bc4b7b61f03", size = 12689881, upload-time = "2025-07-10T19:15:35.501Z" }, - { url = "https://files.pythonhosted.org/packages/48/70/ca2a4d38a5deccd98caa145581becb20c53684f451e89eb3a39915620066/onnxruntime-1.22.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:a938d11c0dc811badf78e435daa3899d9af38abee950d87f3ab7430eb5b3cf5a", size = 34342883, upload-time = "2025-07-10T19:15:38.223Z" }, - { url = "https://files.pythonhosted.org/packages/29/e5/00b099b4d4f6223b610421080d0eed9327ef9986785c9141819bbba0d396/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:984cea2a02fcc5dfea44ade9aca9fe0f7a8a2cd6f77c258fc4388238618f3928", size = 14473861, upload-time = "2025-07-10T19:15:42.911Z" }, - { url = "https://files.pythonhosted.org/packages/0a/50/519828a5292a6ccd8d5cd6d2f72c6b36ea528a2ef68eca69647732539ffa/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d39a530aff1ec8d02e365f35e503193991417788641b184f5b1e8c9a6d5ce8d", size = 16475713, upload-time = "2025-07-10T19:15:45.452Z" }, - { url = "https://files.pythonhosted.org/packages/5d/54/7139d463bb0a312890c9a5db87d7815d4a8cce9e6f5f28d04f0b55fcb160/onnxruntime-1.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:6a64291d57ea966a245f749eb970f4fa05a64d26672e05a83fdb5db6b7d62f87", size = 12690910, upload-time = "2025-07-10T19:15:47.478Z" }, + { url = "https://files.pythonhosted.org/packages/8a/61/ee52bb2c9402cd1a0d550fc65b826c174f8eed49677dd3833ac1bfc0e35a/onnxruntime-1.23.1-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:9ba6e52fb7bc2758a61d1e421d060cf71d5e4259f95ea8a6f72320ae4415f229", size = 17194265, upload-time = "2025-10-08T04:25:24.479Z" }, + { url = "https://files.pythonhosted.org/packages/d3/67/67122b7b4138815090e0d304c8893fefb77370066a847d08e185f04f75fe/onnxruntime-1.23.1-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:7f130f4b0d31ba17c8789053a641958d0d341d96a1bff578d613fb52ded218c2", size = 19150493, upload-time = "2025-10-08T04:24:21.839Z" }, + { url = "https://files.pythonhosted.org/packages/73/e6/66cebc4dcdb217ccb1027cfcbcc01d6399e999c294d986806991c144cbe7/onnxruntime-1.23.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b89fd116f20b70e1140a77286954a7715eb9347260ff2008ee7ec94994df039", size = 15216531, upload-time = "2025-10-08T04:24:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/38/47/083847220c4a429e272ce9407bc8c47fa77b62e0c787ef2cc94fe9776c1b/onnxruntime-1.23.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61139a29d536b71db6045c75462e593a53feecc19756dc222531971cd08e5efe", size = 17368047, upload-time = "2025-10-08T04:24:48.426Z" }, + { url = "https://files.pythonhosted.org/packages/ac/8e/b3d861a7d199fd9c6a0b4af9b5d813bcc853d2e4dd4dac2c70b6c23097ed/onnxruntime-1.23.1-cp311-cp311-win_amd64.whl", hash = "sha256:7973186e8eb66e32ea20cb238ae92b604091e4d1df632653ec830abf7584d0b3", size = 13466816, upload-time = "2025-10-08T04:25:15.037Z" }, + { url = "https://files.pythonhosted.org/packages/00/3c/4b4f56b5df4596d1d95aafe13cbc987d050a89364ff5b2f90308376901fb/onnxruntime-1.23.1-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:564d6add1688efdb0720cf2158b50314fc35b744ad2623155ee3b805c381d9ce", size = 17194708, upload-time = "2025-10-08T04:25:27.188Z" }, + { url = "https://files.pythonhosted.org/packages/b4/97/05529b97142c1a09bde2caefea4fd29f71329b9275b52bacdbc2c4f9e964/onnxruntime-1.23.1-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:3864c39307714eff1753149215ad86324a9372e3172a0275d5b16ffd296574bf", size = 19152841, upload-time = "2025-10-08T04:24:24.157Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b9/1232fd295fa9c818aa2a7883d87a2f864fb5edee56ec757c6e857fdd1863/onnxruntime-1.23.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e6b6b5ea80a96924f67fe1e5519f6c6f9cd716fdb5a4fd1ecb4f2b0971e8d00", size = 15223749, upload-time = "2025-10-08T04:24:08.088Z" }, + { url = "https://files.pythonhosted.org/packages/c4/b0/4663a333a82c77f159e48fe8639b1f03e4a05036625be9129c20c4d71d12/onnxruntime-1.23.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:576502dad714ffe5f3b4e1918c5b3368766b222063c585e5fd88415c063e4c80", size = 17378483, upload-time = "2025-10-08T04:24:50.712Z" }, + { url = "https://files.pythonhosted.org/packages/7c/60/8100d98690cbf1de03e08d1f3eff33ff00c652806c7130658a48a8f60584/onnxruntime-1.23.1-cp312-cp312-win_amd64.whl", hash = "sha256:1b89b7c4d4c00a67debc2b0a1484d7f51b23fef85fbd80ac83ed2d17b2161bd6", size = 13467773, upload-time = "2025-10-08T04:25:17.097Z" }, ] [[package]] name = "openai" -version = "1.61.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -3672,9 +3709,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/cf/61e71ce64cf0a38f029da0f9a5f10c9fa0e69a7a977b537126dac50adfea/openai-1.61.1.tar.gz", hash = "sha256:ce1851507218209961f89f3520e06726c0aa7d0512386f0f977e3ac3e4f2472e", size = 350784, upload-time = "2025-02-05T14:34:15.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/90/8f26554d24d63ed4f94d33c24271559863223a67e624f4d2e65ba8e48dca/openai-2.3.0.tar.gz", hash = "sha256:8d213ee5aaf91737faea2d7fc1cd608657a5367a18966372a3756ceaabfbd812", size = 589616, upload-time = "2025-10-10T01:12:50.851Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/b6/2e2a011b2dc27a6711376808b4cd8c922c476ea0f1420b39892117fa8563/openai-1.61.1-py3-none-any.whl", hash = "sha256:72b0826240ce26026ac2cd17951691f046e5be82ad122d20a8e1b30ca18bd11e", size = 463126, upload-time = "2025-02-05T14:34:13.643Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5b/4be258ff072ed8ee15f6bfd8d5a1a4618aa4704b127c0c5959212ad177d6/openai-2.3.0-py3-none-any.whl", hash = "sha256:a7aa83be6f7b0ab2e4d4d7bcaf36e3d790874c0167380c5d0afd0ed99a86bd7b", size = 999768, upload-time = "2025-10-10T01:12:48.647Z" }, ] [[package]] @@ -3695,7 +3732,7 @@ wheels = [ [[package]] name = "openinference-instrumentation" -version = "0.1.38" +version = "0.1.40" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "openinference-semantic-conventions" }, @@ -3703,18 +3740,18 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/87/71c599f804203077f3766e7c6ce831cdfd0ca202278c35877a704e00b2cf/openinference_instrumentation-0.1.38.tar.gz", hash = "sha256:b45e5d19b5c0d14e884a11ed5b888deda03d955c6e6f4478d8cefd3edaea089d", size = 23749, upload-time = "2025-09-02T21:06:22.025Z" } +sdist = { url = "https://files.pythonhosted.org/packages/37/59/750c25a353260a72287e618b9ccabd57f02db6bfd571c6dbf132202abeff/openinference_instrumentation-0.1.40.tar.gz", hash = "sha256:3080785479793a56023806c71dccbc39418925947407667794c651f992f700a2", size = 23824, upload-time = "2025-10-10T03:48:48.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/f7/72bd2dbb8bbdd785512c9d128f2056e2eaadccfaecb09d2ae59bde6d4af2/openinference_instrumentation-0.1.38-py3-none-any.whl", hash = "sha256:5c45d73c5f3c79e9d9e44fbf4b2c3bdae514be74396cc1880cb845b9b7acc78f", size = 29885, upload-time = "2025-09-02T21:06:20.845Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fd/2b6ea9d95f3eb1deba10975a14b80d7fe79528258111771580a0437d4f44/openinference_instrumentation-0.1.40-py3-none-any.whl", hash = "sha256:d2e894f25addb1dfba563789213139876c5a01fca0a1fa8aa52a455a988a11d4", size = 29967, upload-time = "2025-10-10T03:48:46.518Z" }, ] [[package]] name = "openinference-semantic-conventions" -version = "0.1.21" +version = "0.1.24" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/0f/b794eb009846d4b10af50e205a323ca359f284563ef4d1778f35a80522ac/openinference_semantic_conventions-0.1.21.tar.gz", hash = "sha256:328405b9f79ff72a659c7712b8429c0d7ea68c6a4a1679e3eb44372aa228119b", size = 12534, upload-time = "2025-06-13T05:22:18.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/15/be7566a4bba4b57f7c70b088f42735f2005e2c0adce646a537f63dcf21de/openinference_semantic_conventions-0.1.24.tar.gz", hash = "sha256:3223b8c3958525457a369d58ebf0c56230a1f00567ae1e99f1c2049a8ac2cacd", size = 12741, upload-time = "2025-10-10T03:49:13.987Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/4d/092766f8e610f2c513e483c4adc892eea1634945022a73371fe01f621165/openinference_semantic_conventions-0.1.21-py3-none-any.whl", hash = "sha256:acde8282c20da1de900cdc0d6258a793ec3eb8031bfc496bd823dae17d32e326", size = 10167, upload-time = "2025-06-13T05:22:18.118Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c5/fa81b19042b387826151f984a91fa3d0b52b08374e4d5786521ac2d9e704/openinference_semantic_conventions-0.1.24-py3-none-any.whl", hash = "sha256:b2d650ca7e39c5fb02bf908b8049d6ece2a2657757448e1925a38b59548a80b3", size = 10373, upload-time = "2025-10-10T03:49:00.318Z" }, ] [[package]] @@ -3911,6 +3948,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588, upload-time = "2024-08-28T21:26:58.504Z" }, ] +[[package]] +name = "opentelemetry-instrumentation-httpx" +version = "0.48b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/d9/c65d818607c16d1b7ea8d2de6111c6cecadf8d2fd38c1885a72733a7c6d3/opentelemetry_instrumentation_httpx-0.48b0.tar.gz", hash = "sha256:ee977479e10398931921fb995ac27ccdeea2e14e392cb27ef012fc549089b60a", size = 16931, upload-time = "2024-08-28T21:28:03.794Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/fe/f2daa9d6d988c093b8c7b1d35df675761a8ece0b600b035dc04982746c9d/opentelemetry_instrumentation_httpx-0.48b0-py3-none-any.whl", hash = "sha256:d94f9d612c82d09fe22944d1904a30a464c19bea2ba76be656c99a28ad8be8e5", size = 13900, upload-time = "2024-08-28T21:27:01.566Z" }, +] + [[package]] name = "opentelemetry-instrumentation-redis" version = "0.48b0" @@ -3926,21 +3978,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/40/892f30d400091106309cc047fd3f6d76a828fedd984a953fd5386b78a2fb/opentelemetry_instrumentation_redis-0.48b0-py3-none-any.whl", hash = "sha256:48c7f2e25cbb30bde749dc0d8b9c74c404c851f554af832956b9630b27f5bcb7", size = 11610, upload-time = "2024-08-28T21:27:18.759Z" }, ] -[[package]] -name = "opentelemetry-instrumentation-requests" -version = "0.48b0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "opentelemetry-instrumentation" }, - { name = "opentelemetry-semantic-conventions" }, - { name = "opentelemetry-util-http" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/52/ac/5eb78efde21ff21d0ad5dc8c6cc6a0f8ae482ce8a46293c2f45a628b6166/opentelemetry_instrumentation_requests-0.48b0.tar.gz", hash = "sha256:67ab9bd877a0352ee0db4616c8b4ae59736ddd700c598ed907482d44f4c9a2b3", size = 14120, upload-time = "2024-08-28T21:28:16.933Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/df/0df9226d1b14f29d23c07e6194b9fd5ad50e7d987b7fd13df7dcf718aeb1/opentelemetry_instrumentation_requests-0.48b0-py3-none-any.whl", hash = "sha256:d4f01852121d0bd4c22f14f429654a735611d4f7bf3cf93f244bdf1489b2233d", size = 12366, upload-time = "2024-08-28T21:27:20.771Z" }, -] - [[package]] name = "opentelemetry-instrumentation-sqlalchemy" version = "0.48b0" @@ -4035,7 +4072,7 @@ wheels = [ [[package]] name = "opik" -version = "1.7.43" +version = "1.8.74" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "boto3-stubs", extra = ["bedrock-runtime"] }, @@ -4054,21 +4091,21 @@ dependencies = [ { name = "tqdm" }, { name = "uuid6" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/52/cea0317bc3207bc967b48932781995d9cdb2c490e7e05caa00ff660f7205/opik-1.7.43.tar.gz", hash = "sha256:0b02522b0b74d0a67b141939deda01f8bb69690eda6b04a7cecb1c7f0649ccd0", size = 326886, upload-time = "2025-07-07T10:30:07.715Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/31/887f133aa82aeb4cb8a01d98ad6ae73cb0580c2c9395d76bae1d67dbb6f6/opik-1.8.74.tar.gz", hash = "sha256:4b18248dbd741dab16dab399c1ab7197f1f6c6775ee06285ff07d3d22e1810de", size = 412504, upload-time = "2025-10-13T13:43:03.117Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/ae/f3566bdc3c49a1a8f795b1b6e726ef211c87e31f92d870ca6d63999c9bbf/opik-1.7.43-py3-none-any.whl", hash = "sha256:a66395c8b5ea7c24846f72dafc70c74d5b8f24ffbc4c8a1b3a7f9456e550568d", size = 625356, upload-time = "2025-07-07T10:30:06.389Z" }, + { url = "https://files.pythonhosted.org/packages/37/5d/11c12e2471880effa7a597d96bce848271fa93007f7f543ed607fb31822a/opik-1.8.74-py3-none-any.whl", hash = "sha256:34ffbff2c447da117e58bcc2fdf53b3b534dd1ffe9a293eb912f5419fc9904c3", size = 772547, upload-time = "2025-10-13T13:43:01.29Z" }, ] [[package]] name = "optype" -version = "0.13.4" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/7f/daa32a35b2a6a564a79723da49c0ddc464c462e67a906fc2b66a0d64f28e/optype-0.13.4.tar.gz", hash = "sha256:131d8e0f1c12d8095d553e26b54598597133830983233a6a2208886e7a388432", size = 99547, upload-time = "2025-08-19T19:52:44.242Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/ca/d3a2abcf12cc8c18ccac1178ef87ab50a235bf386d2401341776fdad18aa/optype-0.14.0.tar.gz", hash = "sha256:925cf060b7d1337647f880401f6094321e7d8e837533b8e159b9a92afa3157c6", size = 100880, upload-time = "2025-10-01T04:49:56.232Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/bb/b51940f2d91071325d5ae2044562aa698470a105474d9317b9dbdaad63df/optype-0.13.4-py3-none-any.whl", hash = "sha256:500c89cfac82e2f9448a54ce0a5d5c415b6976b039c2494403cd6395bd531979", size = 87919, upload-time = "2025-08-19T19:52:41.314Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/11b0eb65eeafa87260d36858b69ec4e0072d09e37ea6714280960030bc93/optype-0.14.0-py3-none-any.whl", hash = "sha256:50d02edafd04edf2e5e27d6249760a51b2198adb9f6ffd778030b3d2806b026b", size = 89465, upload-time = "2025-10-01T04:49:54.674Z" }, ] [package.optional-dependencies] @@ -4079,23 +4116,23 @@ numpy = [ [[package]] name = "oracledb" -version = "3.0.0" +version = "3.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/39/712f797b75705c21148fa1d98651f63c2e5cc6876e509a0a9e2f5b406572/oracledb-3.0.0.tar.gz", hash = "sha256:64dc86ee5c032febc556798b06e7b000ef6828bb0252084f6addacad3363db85", size = 840431, upload-time = "2025-03-03T19:36:12.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/c9/fae18fa5d803712d188486f8e86ad4f4e00316793ca19745d7c11092c360/oracledb-3.3.0.tar.gz", hash = "sha256:e830d3544a1578296bcaa54c6e8c8ae10a58c7db467c528c4b27adbf9c8b4cb0", size = 811776, upload-time = "2025-07-29T22:34:10.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/bf/d872c4b3fc15cd3261fe0ea72b21d181700c92dbc050160e161654987062/oracledb-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:52daa9141c63dfa75c07d445e9bb7f69f43bfb3c5a173ecc48c798fe50288d26", size = 4312963, upload-time = "2025-03-03T19:36:32.576Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ea/01ee29e76a610a53bb34fdc1030f04b7669c3f80b25f661e07850fc6160e/oracledb-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af98941789df4c6aaaf4338f5b5f6b7f2c8c3fe6f8d6a9382f177f350868747a", size = 2661536, upload-time = "2025-03-03T19:36:34.904Z" }, - { url = "https://files.pythonhosted.org/packages/3d/8e/ad380e34a46819224423b4773e58c350bc6269643c8969604097ced8c3bc/oracledb-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9812bb48865aaec35d73af54cd1746679f2a8a13cbd1412ab371aba2e39b3943", size = 2867461, upload-time = "2025-03-03T19:36:36.508Z" }, - { url = "https://files.pythonhosted.org/packages/96/09/ecc4384a27fd6e1e4de824ae9c160e4ad3aaebdaade5b4bdcf56a4d1ff63/oracledb-3.0.0-cp311-cp311-win32.whl", hash = "sha256:6c27fe0de64f2652e949eb05b3baa94df9b981a4a45fa7f8a991e1afb450c8e2", size = 1752046, upload-time = "2025-03-03T19:36:38.313Z" }, - { url = "https://files.pythonhosted.org/packages/62/e8/f34bde24050c6e55eeba46b23b2291f2dd7fd272fa8b322dcbe71be55778/oracledb-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f922709672002f0b40997456f03a95f03e5712a86c61159951c5ce09334325e0", size = 2101210, upload-time = "2025-03-03T19:36:40.669Z" }, - { url = "https://files.pythonhosted.org/packages/6f/fc/24590c3a3d41e58494bd3c3b447a62835138e5f9b243d9f8da0cfb5da8dc/oracledb-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:acd0e747227dea01bebe627b07e958bf36588a337539f24db629dc3431d3f7eb", size = 4351993, upload-time = "2025-03-03T19:36:42.577Z" }, - { url = "https://files.pythonhosted.org/packages/b7/b6/1f3b0b7bb94d53e8857d77b2e8dbdf6da091dd7e377523e24b79dac4fd71/oracledb-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8b402f77c22af031cd0051aea2472ecd0635c1b452998f511aa08b7350c90a4", size = 2532640, upload-time = "2025-03-03T19:36:45.066Z" }, - { url = "https://files.pythonhosted.org/packages/72/1a/1815f6c086ab49c00921cf155ff5eede5267fb29fcec37cb246339a5ce4d/oracledb-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:378a27782e9a37918bd07a5a1427a77cb6f777d0a5a8eac9c070d786f50120ef", size = 2765949, upload-time = "2025-03-03T19:36:47.47Z" }, - { url = "https://files.pythonhosted.org/packages/33/8d/208900f8d372909792ee70b2daad3f7361181e55f2217c45ed9dff658b54/oracledb-3.0.0-cp312-cp312-win32.whl", hash = "sha256:54a28c2cb08316a527cd1467740a63771cc1c1164697c932aa834c0967dc4efc", size = 1709373, upload-time = "2025-03-03T19:36:49.67Z" }, - { url = "https://files.pythonhosted.org/packages/0c/5e/c21754f19c896102793c3afec2277e2180aa7d505e4d7fcca24b52d14e4f/oracledb-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8289bad6d103ce42b140e40576cf0c81633e344d56e2d738b539341eacf65624", size = 2056452, upload-time = "2025-03-03T19:36:51.363Z" }, + { url = "https://files.pythonhosted.org/packages/3f/35/95d9a502fdc48ce1ef3a513ebd027488353441e15aa0448619abb3d09d32/oracledb-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d9adb74f837838e21898d938e3a725cf73099c65f98b0b34d77146b453e945e0", size = 3963945, upload-time = "2025-07-29T22:34:28.633Z" }, + { url = "https://files.pythonhosted.org/packages/16/a7/8f1ef447d995bb51d9fdc36356697afeceb603932f16410c12d52b2df1a4/oracledb-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b063d1007882570f170ebde0f364e78d4a70c8f015735cc900663278b9ceef7", size = 2449385, upload-time = "2025-07-29T22:34:30.592Z" }, + { url = "https://files.pythonhosted.org/packages/b3/fa/6a78480450bc7d256808d0f38ade3385735fb5a90dab662167b4257dcf94/oracledb-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:187728f0a2d161676b8c581a9d8f15d9631a8fea1e628f6d0e9fa2f01280cd22", size = 2634943, upload-time = "2025-07-29T22:34:33.142Z" }, + { url = "https://files.pythonhosted.org/packages/5b/90/ea32b569a45fb99fac30b96f1ac0fb38b029eeebb78357bc6db4be9dde41/oracledb-3.3.0-cp311-cp311-win32.whl", hash = "sha256:920f14314f3402c5ab98f2efc5932e0547e9c0a4ca9338641357f73844e3e2b1", size = 1483549, upload-time = "2025-07-29T22:34:35.015Z" }, + { url = "https://files.pythonhosted.org/packages/81/55/ae60f72836eb8531b630299f9ed68df3fe7868c6da16f820a108155a21f9/oracledb-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:825edb97976468db1c7e52c78ba38d75ce7e2b71a2e88f8629bcf02be8e68a8a", size = 1834737, upload-time = "2025-07-29T22:34:36.824Z" }, + { url = "https://files.pythonhosted.org/packages/08/a8/f6b7809d70e98e113786d5a6f1294da81c046d2fa901ad656669fc5d7fae/oracledb-3.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9d25e37d640872731ac9b73f83cbc5fc4743cd744766bdb250488caf0d7696a8", size = 3943512, upload-time = "2025-07-29T22:34:39.237Z" }, + { url = "https://files.pythonhosted.org/packages/df/b9/8145ad8991f4864d3de4a911d439e5bc6cdbf14af448f3ab1e846a54210c/oracledb-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0bf7cdc2b668f939aa364f552861bc7a149d7cd3f3794730d43ef07613b2bf9", size = 2276258, upload-time = "2025-07-29T22:34:41.547Z" }, + { url = "https://files.pythonhosted.org/packages/56/bf/f65635ad5df17d6e4a2083182750bb136ac663ff0e9996ce59d77d200f60/oracledb-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe20540fde64a6987046807ea47af93be918fd70b9766b3eb803c01e6d4202e", size = 2458811, upload-time = "2025-07-29T22:34:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/7d/30/e0c130b6278c10b0e6cd77a3a1a29a785c083c549676cf701c5d180b8e63/oracledb-3.3.0-cp312-cp312-win32.whl", hash = "sha256:db080be9345cbf9506ffdaea3c13d5314605355e76d186ec4edfa49960ffb813", size = 1445525, upload-time = "2025-07-29T22:34:46.603Z" }, + { url = "https://files.pythonhosted.org/packages/1a/5c/7254f5e1a33a5d6b8bf6813d4f4fdcf5c4166ec8a7af932d987879d5595c/oracledb-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:be81e3afe79f6c8ece79a86d6067ad1572d2992ce1c590a086f3755a09535eb4", size = 1789976, upload-time = "2025-07-29T22:34:48.5Z" }, ] [[package]] @@ -4228,16 +4265,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683, upload-time = "2025-05-27T15:24:28.4Z" }, ] -[[package]] -name = "pandoc" -version = "2.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "plumbum" }, - { name = "ply" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/10/9a/e3186e760c57ee5f1c27ea5cea577a0ff9abfca51eefcb4d9a4cd39aff2e/pandoc-2.4.tar.gz", hash = "sha256:ecd1f8cbb7f4180c6b5db4a17a7c1a74df519995f5f186ef81ce72a9cbd0dd9a", size = 34635, upload-time = "2024-08-07T14:33:58.016Z" } - [[package]] name = "pathspec" version = "0.12.1" @@ -4249,15 +4276,15 @@ wheels = [ [[package]] name = "pdfminer-six" -version = "20240706" +version = "20250506" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "charset-normalizer" }, { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e3/37/63cb918ffa21412dd5d54e32e190e69bfc340f3d6aa072ad740bec9386bb/pdfminer.six-20240706.tar.gz", hash = "sha256:c631a46d5da957a9ffe4460c5dce21e8431dabb615fee5f9f4400603a58d95a6", size = 7363505, upload-time = "2024-07-06T13:48:50.795Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/46/5223d613ac4963e1f7c07b2660fe0e9e770102ec6bda8c038400113fb215/pdfminer_six-20250506.tar.gz", hash = "sha256:b03cc8df09cf3c7aba8246deae52e0bca7ebb112a38895b5e1d4f5dd2b8ca2e7", size = 7387678, upload-time = "2025-05-06T16:17:00.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/7d/44d6b90e5a293d3a975cefdc4e12a932ebba814995b2a07e37e599dd27c6/pdfminer.six-20240706-py3-none-any.whl", hash = "sha256:f4f70e74174b4b3542fcb8406a210b6e2e27cd0f0b5fd04534a8cc0d8951e38c", size = 5615414, upload-time = "2024-07-06T13:48:48.408Z" }, + { url = "https://files.pythonhosted.org/packages/73/16/7a432c0101fa87457e75cb12c879e1749c5870a786525e2e0f42871d6462/pdfminer_six-20250506-py3-none-any.whl", hash = "sha256:d81ad173f62e5f841b53a8ba63af1a4a355933cfc0ffabd608e568b9193909e3", size = 5620187, upload-time = "2025-05-06T16:16:58.669Z" }, ] [[package]] @@ -4328,11 +4355,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.4.0" +version = "4.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, ] [[package]] @@ -4344,18 +4371,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] -[[package]] -name = "plumbum" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f0/5d/49ba324ad4ae5b1a4caefafbce7a1648540129344481f2ed4ef6bb68d451/plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219", size = 319083, upload-time = "2024-10-05T05:59:27.059Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/9d/d03542c93bb3d448406731b80f39c3d5601282f778328c22c77d270f4ed4/plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5", size = 127970, upload-time = "2024-10-05T05:59:25.102Z" }, -] - [[package]] name = "ply" version = "3.11" @@ -4367,7 +4382,7 @@ wheels = [ [[package]] name = "polyfile-weave" -version = "0.5.6" +version = "0.5.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "abnf" }, @@ -4385,9 +4400,21 @@ dependencies = [ { name = "pyyaml" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/16/11/7e0b3908a4f5436197b1fc11713c628cd7f9136dc7c1fb00ac8879991f87/polyfile_weave-0.5.6.tar.gz", hash = "sha256:a9fc41b456272c95a3788a2cab791e052acc24890c512fc5a6f9f4e221d24ed1", size = 5987173, upload-time = "2025-07-28T20:26:32.092Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/c3/5a2a2ba06850bc5ec27f83ac8b92210dff9ff6736b2c42f700b489b3fd86/polyfile_weave-0.5.7.tar.gz", hash = "sha256:c3d863f51c30322c236bdf385e116ac06d4e7de9ec25a3aae14d42b1d528e33b", size = 5987445, upload-time = "2025-09-22T19:21:11.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/63/04c5c7c2093cf69c9eeea338f4757522a5d048703a35b3ac8a5580ed2369/polyfile_weave-0.5.6-py3-none-any.whl", hash = "sha256:658e5b6ed040a973279a0cd7f54f4566249c85b977dee556788fa6f903c1d30b", size = 1655007, upload-time = "2025-07-28T20:26:30.132Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f6/d1efedc0f9506e47699616e896d8efe39e8f0b6a7d1d590c3e97455ecf4a/polyfile_weave-0.5.7-py3-none-any.whl", hash = "sha256:880454788bc383408bf19eefd6d1c49a18b965d90c99bccb58f4da65870c82dd", size = 1655397, upload-time = "2025-09-22T19:21:09.142Z" }, +] + +[[package]] +name = "pondpond" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "madoka" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/9b/8411458ca8ce8b5b9b135e4a19823f1caf958ca9985883db104323492982/pondpond-1.4.1.tar.gz", hash = "sha256:8afa34b869d1434d21dd2ec12644abc3b1733fcda8fcf355300338a13a79bb7b", size = 15237, upload-time = "2024-03-01T07:08:06.756Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/d4/f18d6985157cc68f76469480182cbee2a03a45858456955acf57f9dcbb4c/pondpond-1.4.1-py3-none-any.whl", hash = "sha256:641028ead4e8018ca6de1220c660ddd6d6fbf62a60e72f410655dd0451d82880", size = 14498, upload-time = "2024-03-01T07:08:04.63Z" }, ] [[package]] @@ -4418,7 +4445,7 @@ wheels = [ [[package]] name = "posthog" -version = "6.7.4" +version = "6.7.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff" }, @@ -4428,9 +4455,9 @@ dependencies = [ { name = "six" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/40/d7f585e09e47f492ebaeb8048a8e2ce5d9f49a3896856a7a975cbc1484fa/posthog-6.7.4.tar.gz", hash = "sha256:2bfa74f321ac18efe4a48a256d62034a506ca95477af7efa32292ed488a742c5", size = 118209, upload-time = "2025-09-05T15:29:21.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/ce/11d6fa30ab517018796e1d675498992da585479e7079770ec8fa99a61561/posthog-6.7.6.tar.gz", hash = "sha256:ee5c5ad04b857d96d9b7a4f715e23916a2f206bfcf25e5a9d328a3d27664b0d3", size = 119129, upload-time = "2025-09-22T18:11:12.365Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/95/e795059ef73d480a7f11f1be201087f65207509525920897fb514a04914c/posthog-6.7.4-py3-none-any.whl", hash = "sha256:7f1872c53ec7e9a29b088a5a1ad03fa1be3b871d10d70c8bf6c2dafb91beaac5", size = 136409, upload-time = "2025-09-05T15:29:19.995Z" }, + { url = "https://files.pythonhosted.org/packages/de/84/586422d8861b5391c8414360b10f603c0b7859bb09ad688e64430ed0df7b/posthog-6.7.6-py3-none-any.whl", hash = "sha256:b09a7e65a042ec416c28874b397d3accae412a80a8b0ef3fa686fbffc99e4d4b", size = 137348, upload-time = "2025-09-22T18:11:10.807Z" }, ] [[package]] @@ -4447,43 +4474,41 @@ wheels = [ [[package]] name = "propcache" -version = "0.3.2" +version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, - { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, - { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, - { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, - { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, - { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, - { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, - { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, - { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, - { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, - { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, - { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, - { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, - { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, - { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, - { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] [[package]] @@ -4514,17 +4539,18 @@ wheels = [ [[package]] name = "psutil" -version = "7.0.0" +version = "7.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/31/4723d756b59344b643542936e37a31d1d3204bcdc42a7daa8ee9eb06fb50/psutil-7.1.0.tar.gz", hash = "sha256:655708b3c069387c8b77b072fc429a57d0e214221d01c0a772df7dfedcb3bcd2", size = 497660, upload-time = "2025-09-17T20:14:52.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, + { url = "https://files.pythonhosted.org/packages/46/62/ce4051019ee20ce0ed74432dd73a5bb087a6704284a470bb8adff69a0932/psutil-7.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76168cef4397494250e9f4e73eb3752b146de1dd950040b29186d0cce1d5ca13", size = 245242, upload-time = "2025-09-17T20:14:56.126Z" }, + { url = "https://files.pythonhosted.org/packages/38/61/f76959fba841bf5b61123fbf4b650886dc4094c6858008b5bf73d9057216/psutil-7.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:5d007560c8c372efdff9e4579c2846d71de737e4605f611437255e81efcca2c5", size = 246682, upload-time = "2025-09-17T20:14:58.25Z" }, + { url = "https://files.pythonhosted.org/packages/88/7a/37c99d2e77ec30d63398ffa6a660450b8a62517cabe44b3e9bae97696e8d/psutil-7.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22e4454970b32472ce7deaa45d045b34d3648ce478e26a04c7e858a0a6e75ff3", size = 287994, upload-time = "2025-09-17T20:14:59.901Z" }, + { url = "https://files.pythonhosted.org/packages/9d/de/04c8c61232f7244aa0a4b9a9fbd63a89d5aeaf94b2fc9d1d16e2faa5cbb0/psutil-7.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70e113920d51e89f212dd7be06219a9b88014e63a4cec69b684c327bc474e3", size = 291163, upload-time = "2025-09-17T20:15:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/58/c4f976234bf6d4737bc8c02a81192f045c307b72cf39c9e5c5a2d78927f6/psutil-7.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d4a113425c037300de3ac8b331637293da9be9713855c4fc9d2d97436d7259d", size = 293625, upload-time = "2025-09-17T20:15:04.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/87/157c8e7959ec39ced1b11cc93c730c4fb7f9d408569a6c59dbd92ceb35db/psutil-7.1.0-cp37-abi3-win32.whl", hash = "sha256:09ad740870c8d219ed8daae0ad3b726d3bf9a028a198e7f3080f6a1888b99bca", size = 244812, upload-time = "2025-09-17T20:15:07.462Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/b44c4f697276a7a95b8e94d0e320a7bf7f3318521b23de69035540b39838/psutil-7.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:57f5e987c36d3146c0dd2528cd42151cf96cd359b9d67cfff836995cc5df9a3d", size = 247965, upload-time = "2025-09-17T20:15:09.673Z" }, + { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, ] [[package]] @@ -4535,34 +4561,28 @@ sdist = { url = "https://files.pythonhosted.org/packages/eb/72/4a7965cf54e341006 [[package]] name = "psycopg2-binary" -version = "2.9.10" +version = "2.9.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397, upload-time = "2024-10-16T11:19:40.033Z" }, - { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806, upload-time = "2024-10-16T11:19:43.5Z" }, - { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370, upload-time = "2024-10-16T11:19:46.986Z" }, - { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780, upload-time = "2024-10-16T11:19:50.242Z" }, - { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583, upload-time = "2024-10-16T11:19:54.424Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831, upload-time = "2024-10-16T11:19:57.762Z" }, - { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822, upload-time = "2024-10-16T11:20:04.693Z" }, - { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975, upload-time = "2024-10-16T11:20:11.401Z" }, - { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320, upload-time = "2024-10-16T11:20:17.959Z" }, - { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617, upload-time = "2024-10-16T11:20:24.711Z" }, - { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618, upload-time = "2024-10-16T11:20:27.718Z" }, - { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816, upload-time = "2024-10-16T11:20:30.777Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" }, - { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" }, - { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" }, - { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" }, - { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" }, - { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" }, - { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" }, - { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" }, - { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" }, - { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, ] [[package]] @@ -4658,7 +4678,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -4666,9 +4686,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/54/ecab642b3bed45f7d5f59b38443dcb36ef50f85af192e6ece103dbfe9587/pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423", size = 788494, upload-time = "2025-10-04T10:40:41.338Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/bd/1f/73c53fcbfb0b5a78f91176df41945ca466e71e9d9d836e5c522abda39ee7/pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a", size = 444823, upload-time = "2025-10-04T10:40:39.055Z" }, ] [[package]] @@ -4721,29 +4741,29 @@ wheels = [ [[package]] name = "pydantic-extra-types" -version = "2.10.5" +version = "2.10.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/ba/4178111ec4116c54e1dc7ecd2a1ff8f54256cdbd250e576882911e8f710a/pydantic_extra_types-2.10.5.tar.gz", hash = "sha256:1dcfa2c0cf741a422f088e0dbb4690e7bfadaaf050da3d6f80d6c3cf58a2bad8", size = 138429, upload-time = "2025-06-02T09:31:52.713Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/10/fb64987804cde41bcc39d9cd757cd5f2bb5d97b389d81aa70238b14b8a7e/pydantic_extra_types-2.10.6.tar.gz", hash = "sha256:c63d70bf684366e6bbe1f4ee3957952ebe6973d41e7802aea0b770d06b116aeb", size = 141858, upload-time = "2025-10-08T13:47:49.483Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/1a/5f4fd9e7285f10c44095a4f9fe17d0f358d1702a7c74a9278c794e8a7537/pydantic_extra_types-2.10.5-py3-none-any.whl", hash = "sha256:b60c4e23d573a69a4f1a16dd92888ecc0ef34fb0e655b4f305530377fa70e7a8", size = 38315, upload-time = "2025-06-02T09:31:51.229Z" }, + { url = "https://files.pythonhosted.org/packages/93/04/5c918669096da8d1c9ec7bb716bd72e755526103a61bc5e76a3e4fb23b53/pydantic_extra_types-2.10.6-py3-none-any.whl", hash = "sha256:6106c448316d30abf721b5b9fecc65e983ef2614399a24142d689c7546cc246a", size = 40949, upload-time = "2025-10-08T13:47:48.268Z" }, ] [[package]] name = "pydantic-settings" -version = "2.9.1" +version = "2.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234, upload-time = "2025-04-18T16:44:48.265Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356, upload-time = "2025-04-18T16:44:46.617Z" }, + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, ] [[package]] @@ -4771,7 +4791,7 @@ crypto = [ [[package]] name = "pymilvus" -version = "2.5.15" +version = "2.5.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, @@ -4782,9 +4802,9 @@ dependencies = [ { name = "setuptools" }, { name = "ujson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/f9/dee7f0d42979bf4cbe0bf23f8db9bf4c331b53c4c9f8692d2e027073c928/pymilvus-2.5.15.tar.gz", hash = "sha256:350396ef3bb40aa62c8a2ecaccb5c664bbb1569eef8593b74dd1d5125eb0deb2", size = 1278109, upload-time = "2025-08-21T11:57:58.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/e2/5613bc7b2af0ccd760177ca4255243c284cfc0f2cba3f10ff63325c4ca34/pymilvus-2.5.16.tar.gz", hash = "sha256:65f56b81806bc217cca3cf29b70a27d053dea4b1ffada910cf63a38f96381618", size = 1280614, upload-time = "2025-09-19T07:02:14.747Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/af/10a620686025e5b59889d7075f5d426e45e57a0180c4465051645a88ccb0/pymilvus-2.5.15-py3-none-any.whl", hash = "sha256:a155a3b436e2e3ca4b85aac80c92733afe0bd172c497c3bc0dfaca0b804b90c9", size = 241683, upload-time = "2025-08-21T11:57:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/c6/09/b67a55abee0a53ea50ba0de0cba6e1c0f7ca7ce2c15ffd6f40c059c25e88/pymilvus-2.5.16-py3-none-any.whl", hash = "sha256:76258a324f19c60fee247467e11cd7d6f35a64d2a9c753f5d7b1a5fa15dd6c8a", size = 243272, upload-time = "2025-09-19T07:02:12.443Z" }, ] [[package]] @@ -4838,20 +4858,20 @@ wheels = [ [[package]] name = "pyparsing" -version = "3.2.3" +version = "3.2.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, ] [[package]] name = "pypdf" -version = "6.0.0" +version = "6.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/ac/a300a03c3b34967c050677ccb16e7a4b65607ee5df9d51e8b6d713de4098/pypdf-6.0.0.tar.gz", hash = "sha256:282a99d2cc94a84a3a3159f0d9358c0af53f85b4d28d76ea38b96e9e5ac2a08d", size = 5033827, upload-time = "2025-08-11T14:22:02.352Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/85/4c0f12616db83c2e3ef580c3cfa98bd082e88fc8d02e136bad3bede1e3fa/pypdf-6.1.1.tar.gz", hash = "sha256:10f44d49bf2a82e54c3c5ba3cdcbb118f2a44fc57df8ce51d6fb9b1ed9bfbe8b", size = 5074507, upload-time = "2025-09-28T13:29:16.165Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/83/2cacc506eb322bb31b747bc06ccb82cc9aa03e19ee9c1245e538e49d52be/pypdf-6.0.0-py3-none-any.whl", hash = "sha256:56ea60100ce9f11fc3eec4f359da15e9aec3821b036c1f06d2b660d35683abb8", size = 310465, upload-time = "2025-08-11T14:22:00.481Z" }, + { url = "https://files.pythonhosted.org/packages/07/ed/adae13756d9dabdddee483fc7712905bb5585fbf6e922b1a19aca3a29cd1/pypdf-6.1.1-py3-none-any.whl", hash = "sha256:7781f99493208a37a7d4275601d883e19af24e62a525c25844d22157c2e4cde7", size = 323455, upload-time = "2025-09-28T13:29:14.392Z" }, ] [[package]] @@ -4963,6 +4983,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" }, ] +[[package]] +name = "pytest-timeout" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382, upload-time = "2025-05-05T19:44:33.502Z" }, +] + [[package]] name = "python-calamine" version = "0.5.3" @@ -5130,75 +5162,29 @@ wheels = [ [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, -] - -[[package]] -name = "pyzstd" -version = "0.17.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8f/a2/54d860ccbd07e3c67e4d0321d1c29fc7963ac82cf801a078debfc4ef7c15/pyzstd-0.17.0.tar.gz", hash = "sha256:d84271f8baa66c419204c1dd115a4dec8b266f8a2921da21b81764fa208c1db6", size = 1212160, upload-time = "2025-05-10T14:14:49.764Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/4a/81ca9a6a759ae10a51cb72f002c149b602ec81b3a568ca6292b117f6da0d/pyzstd-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06d1e7afafe86b90f3d763f83d2f6b6a437a8d75119fe1ff52b955eb9df04eaa", size = 377827, upload-time = "2025-05-10T14:12:54.102Z" }, - { url = "https://files.pythonhosted.org/packages/a1/09/584c12c8a918c9311a55be0c667e57a8ee73797367299e2a9f3fc3bf7a39/pyzstd-0.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc827657f644e4510211b49f5dab6b04913216bc316206d98f9a75214361f16e", size = 297579, upload-time = "2025-05-10T14:12:55.748Z" }, - { url = "https://files.pythonhosted.org/packages/e1/89/dc74cd83f30b97f95d42b028362e32032e61a8f8e6cc2a8e47b70976d99a/pyzstd-0.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecffadaa2ee516ecea3e432ebf45348fa8c360017f03b88800dd312d62ecb063", size = 443132, upload-time = "2025-05-10T14:12:57.098Z" }, - { url = "https://files.pythonhosted.org/packages/a8/12/fe93441228a324fe75d10f5f13d5e5d5ed028068810dfdf9505d89d704a0/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:596de361948d3aad98a837c98fcee4598e51b608f7e0912e0e725f82e013f00f", size = 390644, upload-time = "2025-05-10T14:12:58.379Z" }, - { url = "https://files.pythonhosted.org/packages/9d/d1/aa7cdeb9bf8995d9df9936c71151be5f4e7b231561d553e73bbf340c2281/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd3a8d0389c103e93853bf794b9a35ac5d0d11ca3e7e9f87e3305a10f6dfa6b2", size = 478070, upload-time = "2025-05-10T14:12:59.706Z" }, - { url = "https://files.pythonhosted.org/packages/95/62/7e5c450790bfd3db954694d4d877446d0b6d192aae9c73df44511f17b75c/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1356f72c7b8bb99b942d582b61d1a93c5065e66b6df3914dac9f2823136c3228", size = 421240, upload-time = "2025-05-10T14:13:01.151Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b5/d20c60678c0dfe2430f38241d118308f12516ccdb44f9edce27852ee2187/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f514c339b013b0b0a2ed8ea6e44684524223bd043267d7644d7c3a70e74a0dd", size = 412908, upload-time = "2025-05-10T14:13:02.904Z" }, - { url = "https://files.pythonhosted.org/packages/d2/a0/3ae0f1af2982b6cdeacc2a1e1cd20869d086d836ea43e0f14caee8664101/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d4de16306821021c2d82a45454b612e2a8683d99bfb98cff51a883af9334bea0", size = 415572, upload-time = "2025-05-10T14:13:04.828Z" }, - { url = "https://files.pythonhosted.org/packages/7d/84/cb0a10c3796f4cd5f09c112cbd72405ffd019f7c0d1e2e5e99ccc803c60c/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aeb9759c04b6a45c1b56be21efb0a738e49b0b75c4d096a38707497a7ff2be82", size = 445334, upload-time = "2025-05-10T14:13:06.5Z" }, - { url = "https://files.pythonhosted.org/packages/d6/d6/8c5cf223067b69aa63f9ecf01846535d4ba82d98f8c9deadfc0092fa16ca/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7a5b31ddeada0027e67464d99f09167cf08bab5f346c3c628b2d3c84e35e239a", size = 518748, upload-time = "2025-05-10T14:13:08.286Z" }, - { url = "https://files.pythonhosted.org/packages/bf/1c/dc7bab00a118d0ae931239b23e05bf703392005cf3bb16942b7b2286452a/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8338e4e91c52af839abcf32f1f65f3b21e2597ffe411609bdbdaf10274991bd0", size = 562487, upload-time = "2025-05-10T14:13:09.714Z" }, - { url = "https://files.pythonhosted.org/packages/e0/a4/fca96c0af643e4de38bce0dc25dab60ea558c49444c30b9dbe8b7a1714be/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:628e93862feb372b4700085ec4d1d389f1283ac31900af29591ae01019910ff3", size = 432319, upload-time = "2025-05-10T14:13:11.296Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a3/7c924478f6c14b369fec8c5cd807b069439c6ecbf98c4783c5791036d3ad/pyzstd-0.17.0-cp311-cp311-win32.whl", hash = "sha256:c27773f9c95ebc891cfcf1ef282584d38cde0a96cb8d64127953ad752592d3d7", size = 220005, upload-time = "2025-05-10T14:13:13.188Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f6/d081b6b29cf00780c971b07f7889a19257dd884e64a842a5ebc406fd3992/pyzstd-0.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:c043a5766e00a2b7844705c8fa4563b7c195987120afee8f4cf594ecddf7e9ac", size = 246224, upload-time = "2025-05-10T14:13:14.478Z" }, - { url = "https://files.pythonhosted.org/packages/61/f3/f42c767cde8e3b94652baf85863c25476fd463f3bd61f73ed4a02c1db447/pyzstd-0.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:efd371e41153ef55bf51f97e1ce4c1c0b05ceb59ed1d8972fc9aa1e9b20a790f", size = 223036, upload-time = "2025-05-10T14:13:15.752Z" }, - { url = "https://files.pythonhosted.org/packages/76/50/7fa47d0a13301b1ce20972aa0beb019c97f7ee8b0658d7ec66727b5967f9/pyzstd-0.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ac330fc4f64f97a411b6f3fc179d2fe3050b86b79140e75a9a6dd9d6d82087f", size = 379056, upload-time = "2025-05-10T14:13:17.091Z" }, - { url = "https://files.pythonhosted.org/packages/9d/f2/67b03b1fa4e2a0b05e147cc30ac6d271d3d11017b47b30084cb4699451f4/pyzstd-0.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:725180c0c4eb2e643b7048ebfb45ddf43585b740535907f70ff6088f5eda5096", size = 298381, upload-time = "2025-05-10T14:13:18.812Z" }, - { url = "https://files.pythonhosted.org/packages/01/8b/807ff0a13cf3790fe5de85e18e10c22b96d92107d2ce88699cefd3f890cb/pyzstd-0.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c20fe0a60019685fa1f7137cb284f09e3f64680a503d9c0d50be4dd0a3dc5ec", size = 443770, upload-time = "2025-05-10T14:13:20.495Z" }, - { url = "https://files.pythonhosted.org/packages/f0/88/832d8d8147691ee37736a89ea39eaf94ceac5f24a6ce2be316ff5276a1f8/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d97f7aaadc3b6e2f8e51bfa6aa203ead9c579db36d66602382534afaf296d0db", size = 391167, upload-time = "2025-05-10T14:13:22.236Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a5/2e09bee398dfb0d94ca43f3655552a8770a6269881dc4710b8f29c7f71aa/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42dcb34c5759b59721997036ff2d94210515d3ef47a9de84814f1c51a1e07e8a", size = 478960, upload-time = "2025-05-10T14:13:23.584Z" }, - { url = "https://files.pythonhosted.org/packages/da/b5/1f3b778ad1ccc395161fab7a3bf0dfbd85232234b6657c93213ed1ceda7e/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6bf05e18be6f6c003c7129e2878cffd76fcbebda4e7ebd7774e34ae140426cbf", size = 421891, upload-time = "2025-05-10T14:13:25.417Z" }, - { url = "https://files.pythonhosted.org/packages/83/c4/6bfb4725f4f38e9fe9735697060364fb36ee67546e7e8d78135044889619/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f7c3a5144aa4fbccf37c30411f6b1db4c0f2cb6ad4df470b37929bffe6ca0", size = 413608, upload-time = "2025-05-10T14:13:26.75Z" }, - { url = "https://files.pythonhosted.org/packages/95/a2/c48b543e3a482e758b648ea025b94efb1abe1f4859c5185ff02c29596035/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9efd4007f8369fd0890701a4fc77952a0a8c4cb3bd30f362a78a1adfb3c53c12", size = 416429, upload-time = "2025-05-10T14:13:28.096Z" }, - { url = "https://files.pythonhosted.org/packages/5c/62/2d039ee4dbc8116ca1f2a2729b88a1368f076f5dadad463f165993f7afa8/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5f8add139b5fd23b95daa844ca13118197f85bd35ce7507e92fcdce66286cc34", size = 446671, upload-time = "2025-05-10T14:13:29.772Z" }, - { url = "https://files.pythonhosted.org/packages/be/ec/9ec9f0957cf5b842c751103a2b75ecb0a73cf3d99fac57e0436aab6748e0/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:259a60e8ce9460367dcb4b34d8b66e44ca3d8c9c30d53ed59ae7037622b3bfc7", size = 520290, upload-time = "2025-05-10T14:13:31.585Z" }, - { url = "https://files.pythonhosted.org/packages/cc/42/2e2f4bb641c2a9ab693c31feebcffa1d7c24e946d8dde424bba371e4fcce/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:86011a93cc3455c5d2e35988feacffbf2fa106812a48e17eb32c2a52d25a95b3", size = 563785, upload-time = "2025-05-10T14:13:32.971Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/25e198d382faa4d322f617d7a5ff82af4dc65749a10d90f1423af2d194f6/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:425c31bc3de80313054e600398e4f1bd229ee61327896d5d015e2cd0283c9012", size = 433390, upload-time = "2025-05-10T14:13:34.668Z" }, - { url = "https://files.pythonhosted.org/packages/ad/7c/1ab970f5404ace9d343a36a86f1bd0fcf2dc1adf1ef8886394cf0a58bd9e/pyzstd-0.17.0-cp312-cp312-win32.whl", hash = "sha256:7c4b88183bb36eb2cebbc0352e6e9fe8e2d594f15859ae1ef13b63ebc58be158", size = 220291, upload-time = "2025-05-10T14:13:36.005Z" }, - { url = "https://files.pythonhosted.org/packages/b2/52/d35bf3e4f0676a74359fccef015eabe3ceaba95da4ac2212f8be4dde16de/pyzstd-0.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c31947e0120468342d74e0fa936d43f7e1dad66a2262f939735715aa6c730e8", size = 246451, upload-time = "2025-05-10T14:13:37.712Z" }, - { url = "https://files.pythonhosted.org/packages/34/da/a44705fe44dd87e0f09861b062f93ebb114365640dbdd62cbe80da9b8306/pyzstd-0.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:1d0346418abcef11507356a31bef5470520f6a5a786d4e2c69109408361b1020", size = 222967, upload-time = "2025-05-10T14:13:38.94Z" }, - { url = "https://files.pythonhosted.org/packages/b8/95/b1ae395968efdba92704c23f2f8e027d08e00d1407671e42f65ac914d211/pyzstd-0.17.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3ce6bac0c4c032c5200647992a8efcb9801c918633ebe11cceba946afea152d9", size = 368391, upload-time = "2025-05-10T14:14:33.064Z" }, - { url = "https://files.pythonhosted.org/packages/c7/72/856831cacef58492878b8307353e28a3ba4326a85c3c82e4803a95ad0d14/pyzstd-0.17.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:a00998144b35be7c485a383f739fe0843a784cd96c3f1f2f53f1a249545ce49a", size = 283561, upload-time = "2025-05-10T14:14:34.469Z" }, - { url = "https://files.pythonhosted.org/packages/a4/a7/a86e55cd9f3e630a71c0bf78ac6da0c6b50dc428ca81aa7c5adbc66eb880/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8521d7bbd00e0e1c1fd222c1369a7600fba94d24ba380618f9f75ee0c375c277", size = 356912, upload-time = "2025-05-10T14:14:35.722Z" }, - { url = "https://files.pythonhosted.org/packages/ad/b7/de2b42dd96dfdb1c0feb5f43d53db2d3a060607f878da7576f35dff68789/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da65158c877eac78dcc108861d607c02fb3703195c3a177f2687e0bcdfd519d0", size = 329417, upload-time = "2025-05-10T14:14:37.487Z" }, - { url = "https://files.pythonhosted.org/packages/52/65/d4e8196e068e6b430499fb2a5092380eb2cb7eecf459b9d4316cff7ecf6c/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:226ca0430e2357abae1ade802585231a2959b010ec9865600e416652121ba80b", size = 349448, upload-time = "2025-05-10T14:14:38.797Z" }, - { url = "https://files.pythonhosted.org/packages/9e/15/b5ed5ad8c8d2d80c5f5d51e6c61b2cc05f93aaf171164f67ccc7ade815cd/pyzstd-0.17.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e3a19e8521c145a0e2cd87ca464bf83604000c5454f7e0746092834fd7de84d1", size = 241668, upload-time = "2025-05-10T14:14:40.18Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, ] [[package]] @@ -5308,52 +5294,52 @@ hiredis = [ [[package]] name = "referencing" -version = "0.36.2" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "rpds-py" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, ] [[package]] name = "regex" -version = "2025.9.1" +version = "2025.9.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/5a/4c63457fbcaf19d138d72b2e9b39405954f98c0349b31c601bfcb151582c/regex-2025.9.1.tar.gz", hash = "sha256:88ac07b38d20b54d79e704e38aa3bd2c0f8027432164226bdee201a1c0c9c9ff", size = 400852, upload-time = "2025-09-01T22:10:10.479Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hash = "sha256:c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4", size = 400917, upload-time = "2025-09-19T00:38:35.79Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/4d/f741543c0c59f96c6625bc6c11fea1da2e378b7d293ffff6f318edc0ce14/regex-2025.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e5bcf112b09bfd3646e4db6bf2e598534a17d502b0c01ea6550ba4eca780c5e6", size = 484811, upload-time = "2025-09-01T22:08:12.834Z" }, - { url = "https://files.pythonhosted.org/packages/c2/bd/27e73e92635b6fbd51afc26a414a3133243c662949cd1cda677fe7bb09bd/regex-2025.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:67a0295a3c31d675a9ee0238d20238ff10a9a2fdb7a1323c798fc7029578b15c", size = 288977, upload-time = "2025-09-01T22:08:14.499Z" }, - { url = "https://files.pythonhosted.org/packages/eb/7d/7dc0c6efc8bc93cd6e9b947581f5fde8a5dbaa0af7c4ec818c5729fdc807/regex-2025.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea8267fbadc7d4bd7c1301a50e85c2ff0de293ff9452a1a9f8d82c6cafe38179", size = 286606, upload-time = "2025-09-01T22:08:15.881Z" }, - { url = "https://files.pythonhosted.org/packages/d1/01/9b5c6dd394f97c8f2c12f6e8f96879c9ac27292a718903faf2e27a0c09f6/regex-2025.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6aeff21de7214d15e928fb5ce757f9495214367ba62875100d4c18d293750cc1", size = 792436, upload-time = "2025-09-01T22:08:17.38Z" }, - { url = "https://files.pythonhosted.org/packages/fc/24/b7430cfc6ee34bbb3db6ff933beb5e7692e5cc81e8f6f4da63d353566fb0/regex-2025.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d89f1bbbbbc0885e1c230f7770d5e98f4f00b0ee85688c871d10df8b184a6323", size = 858705, upload-time = "2025-09-01T22:08:19.037Z" }, - { url = "https://files.pythonhosted.org/packages/d6/98/155f914b4ea6ae012663188545c4f5216c11926d09b817127639d618b003/regex-2025.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca3affe8ddea498ba9d294ab05f5f2d3b5ad5d515bc0d4a9016dd592a03afe52", size = 905881, upload-time = "2025-09-01T22:08:20.377Z" }, - { url = "https://files.pythonhosted.org/packages/8a/a7/a470e7bc8259c40429afb6d6a517b40c03f2f3e455c44a01abc483a1c512/regex-2025.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91892a7a9f0a980e4c2c85dd19bc14de2b219a3a8867c4b5664b9f972dcc0c78", size = 798968, upload-time = "2025-09-01T22:08:22.081Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/33f6fec4d41449fea5f62fdf5e46d668a1c046730a7f4ed9f478331a8e3a/regex-2025.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e1cb40406f4ae862710615f9f636c1e030fd6e6abe0e0f65f6a695a2721440c6", size = 781884, upload-time = "2025-09-01T22:08:23.832Z" }, - { url = "https://files.pythonhosted.org/packages/42/de/2b45f36ab20da14eedddf5009d370625bc5942d9953fa7e5037a32d66843/regex-2025.9.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94f6cff6f7e2149c7e6499a6ecd4695379eeda8ccbccb9726e8149f2fe382e92", size = 852935, upload-time = "2025-09-01T22:08:25.536Z" }, - { url = "https://files.pythonhosted.org/packages/1e/f9/878f4fc92c87e125e27aed0f8ee0d1eced9b541f404b048f66f79914475a/regex-2025.9.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6c0226fb322b82709e78c49cc33484206647f8a39954d7e9de1567f5399becd0", size = 844340, upload-time = "2025-09-01T22:08:27.141Z" }, - { url = "https://files.pythonhosted.org/packages/90/c2/5b6f2bce6ece5f8427c718c085eca0de4bbb4db59f54db77aa6557aef3e9/regex-2025.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a12f59c7c380b4fcf7516e9cbb126f95b7a9518902bcf4a852423ff1dcd03e6a", size = 787238, upload-time = "2025-09-01T22:08:28.75Z" }, - { url = "https://files.pythonhosted.org/packages/47/66/1ef1081c831c5b611f6f55f6302166cfa1bc9574017410ba5595353f846a/regex-2025.9.1-cp311-cp311-win32.whl", hash = "sha256:49865e78d147a7a4f143064488da5d549be6bfc3f2579e5044cac61f5c92edd4", size = 264118, upload-time = "2025-09-01T22:08:30.388Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e0/8adc550d7169df1d6b9be8ff6019cda5291054a0107760c2f30788b6195f/regex-2025.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:d34b901f6f2f02ef60f4ad3855d3a02378c65b094efc4b80388a3aeb700a5de7", size = 276151, upload-time = "2025-09-01T22:08:32.073Z" }, - { url = "https://files.pythonhosted.org/packages/cb/bd/46fef29341396d955066e55384fb93b0be7d64693842bf4a9a398db6e555/regex-2025.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:47d7c2dab7e0b95b95fd580087b6ae196039d62306a592fa4e162e49004b6299", size = 268460, upload-time = "2025-09-01T22:08:33.281Z" }, - { url = "https://files.pythonhosted.org/packages/39/ef/a0372febc5a1d44c1be75f35d7e5aff40c659ecde864d7fa10e138f75e74/regex-2025.9.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:84a25164bd8dcfa9f11c53f561ae9766e506e580b70279d05a7946510bdd6f6a", size = 486317, upload-time = "2025-09-01T22:08:34.529Z" }, - { url = "https://files.pythonhosted.org/packages/b5/25/d64543fb7eb41a1024786d518cc57faf1ce64aa6e9ddba097675a0c2f1d2/regex-2025.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:645e88a73861c64c1af558dd12294fb4e67b5c1eae0096a60d7d8a2143a611c7", size = 289698, upload-time = "2025-09-01T22:08:36.162Z" }, - { url = "https://files.pythonhosted.org/packages/d8/dc/fbf31fc60be317bd9f6f87daa40a8a9669b3b392aa8fe4313df0a39d0722/regex-2025.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10a450cba5cd5409526ee1d4449f42aad38dd83ac6948cbd6d7f71ca7018f7db", size = 287242, upload-time = "2025-09-01T22:08:37.794Z" }, - { url = "https://files.pythonhosted.org/packages/0f/74/f933a607a538f785da5021acf5323961b4620972e2c2f1f39b6af4b71db7/regex-2025.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9dc5991592933a4192c166eeb67b29d9234f9c86344481173d1bc52f73a7104", size = 797441, upload-time = "2025-09-01T22:08:39.108Z" }, - { url = "https://files.pythonhosted.org/packages/89/d0/71fc49b4f20e31e97f199348b8c4d6e613e7b6a54a90eb1b090c2b8496d7/regex-2025.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a32291add816961aab472f4fad344c92871a2ee33c6c219b6598e98c1f0108f2", size = 862654, upload-time = "2025-09-01T22:08:40.586Z" }, - { url = "https://files.pythonhosted.org/packages/59/05/984edce1411a5685ba9abbe10d42cdd9450aab4a022271f9585539788150/regex-2025.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:588c161a68a383478e27442a678e3b197b13c5ba51dbba40c1ccb8c4c7bee9e9", size = 910862, upload-time = "2025-09-01T22:08:42.416Z" }, - { url = "https://files.pythonhosted.org/packages/b2/02/5c891bb5fe0691cc1bad336e3a94b9097fbcf9707ec8ddc1dce9f0397289/regex-2025.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47829ffaf652f30d579534da9085fe30c171fa2a6744a93d52ef7195dc38218b", size = 801991, upload-time = "2025-09-01T22:08:44.072Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ae/fd10d6ad179910f7a1b3e0a7fde1ef8bb65e738e8ac4fd6ecff3f52252e4/regex-2025.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e978e5a35b293ea43f140c92a3269b6ab13fe0a2bf8a881f7ac740f5a6ade85", size = 786651, upload-time = "2025-09-01T22:08:46.079Z" }, - { url = "https://files.pythonhosted.org/packages/30/cf/9d686b07bbc5bf94c879cc168db92542d6bc9fb67088d03479fef09ba9d3/regex-2025.9.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf09903e72411f4bf3ac1eddd624ecfd423f14b2e4bf1c8b547b72f248b7bf7", size = 856556, upload-time = "2025-09-01T22:08:48.376Z" }, - { url = "https://files.pythonhosted.org/packages/91/9d/302f8a29bb8a49528abbab2d357a793e2a59b645c54deae0050f8474785b/regex-2025.9.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d016b0f77be63e49613c9e26aaf4a242f196cd3d7a4f15898f5f0ab55c9b24d2", size = 849001, upload-time = "2025-09-01T22:08:50.067Z" }, - { url = "https://files.pythonhosted.org/packages/93/fa/b4c6dbdedc85ef4caec54c817cd5f4418dbfa2453214119f2538082bf666/regex-2025.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:656563e620de6908cd1c9d4f7b9e0777e3341ca7db9d4383bcaa44709c90281e", size = 788138, upload-time = "2025-09-01T22:08:51.933Z" }, - { url = "https://files.pythonhosted.org/packages/4a/1b/91ee17a3cbf87f81e8c110399279d0e57f33405468f6e70809100f2ff7d8/regex-2025.9.1-cp312-cp312-win32.whl", hash = "sha256:df33f4ef07b68f7ab637b1dbd70accbf42ef0021c201660656601e8a9835de45", size = 264524, upload-time = "2025-09-01T22:08:53.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/28/6ba31cce05b0f1ec6b787921903f83bd0acf8efde55219435572af83c350/regex-2025.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:5aba22dfbc60cda7c0853516104724dc904caa2db55f2c3e6e984eb858d3edf3", size = 275489, upload-time = "2025-09-01T22:08:55.037Z" }, - { url = "https://files.pythonhosted.org/packages/bd/ed/ea49f324db00196e9ef7fe00dd13c6164d5173dd0f1bbe495e61bb1fb09d/regex-2025.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:ec1efb4c25e1849c2685fa95da44bfde1b28c62d356f9c8d861d4dad89ed56e9", size = 268589, upload-time = "2025-09-01T22:08:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a", size = 484832, upload-time = "2025-09-19T00:35:30.011Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8", size = 288994, upload-time = "2025-09-19T00:35:31.733Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414", size = 286619, upload-time = "2025-09-19T00:35:33.221Z" }, + { url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a", size = 792454, upload-time = "2025-09-19T00:35:35.361Z" }, + { url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4", size = 858723, upload-time = "2025-09-19T00:35:36.949Z" }, + { url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a", size = 905899, upload-time = "2025-09-19T00:35:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f", size = 798981, upload-time = "2025-09-19T00:35:40.416Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a", size = 781900, upload-time = "2025-09-19T00:35:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9", size = 852952, upload-time = "2025-09-19T00:35:43.751Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2", size = 844355, upload-time = "2025-09-19T00:35:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95", size = 787254, upload-time = "2025-09-19T00:35:46.904Z" }, + { url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl", hash = "sha256:895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07", size = 264129, upload-time = "2025-09-19T00:35:48.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl", hash = "sha256:7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9", size = 276160, upload-time = "2025-09-19T00:36:00.45Z" }, + { url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl", hash = "sha256:fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df", size = 268471, upload-time = "2025-09-19T00:36:02.149Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e", size = 486335, upload-time = "2025-09-19T00:36:03.661Z" }, + { url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a", size = 289720, upload-time = "2025-09-19T00:36:05.471Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab", size = 287257, upload-time = "2025-09-19T00:36:07.072Z" }, + { url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5", size = 797463, upload-time = "2025-09-19T00:36:08.399Z" }, + { url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742", size = 862670, upload-time = "2025-09-19T00:36:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425", size = 910881, upload-time = "2025-09-19T00:36:12.223Z" }, + { url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352", size = 802011, upload-time = "2025-09-19T00:36:13.901Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d", size = 786668, upload-time = "2025-09-19T00:36:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56", size = 856578, upload-time = "2025-09-19T00:36:16.845Z" }, + { url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e", size = 849017, upload-time = "2025-09-19T00:36:18.597Z" }, + { url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282", size = 788150, upload-time = "2025-09-19T00:36:20.464Z" }, + { url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl", hash = "sha256:e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459", size = 264536, upload-time = "2025-09-19T00:36:21.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl", hash = "sha256:3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77", size = 275501, upload-time = "2025-09-19T00:36:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl", hash = "sha256:032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5", size = 268601, upload-time = "2025-09-19T00:36:25.092Z" }, ] [[package]] @@ -5424,15 +5410,15 @@ wheels = [ [[package]] name = "rich" -version = "14.1.0" +version = "14.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, ] [[package]] @@ -5499,28 +5485,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.12" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" } +sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071, upload-time = "2025-10-07T18:21:55.763Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" }, - { url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" }, - { url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" }, - { url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" }, - { url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" }, - { url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" }, - { url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" }, - { url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" }, - { url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" }, - { url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" }, - { url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" }, - { url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" }, - { url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" }, - { url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" }, - { url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" }, - { url = "https://files.pythonhosted.org/packages/c8/fa/05b6428a008e60f79546c943e54068316f32ec8ab5c4f73e4563934fbdc7/ruff-0.12.12-py3-none-win32.whl", hash = "sha256:173be2bfc142af07a01e3a759aba6f7791aa47acf3604f610b1c36db888df7b1", size = 12052870, upload-time = "2025-09-04T16:50:09.121Z" }, - { url = "https://files.pythonhosted.org/packages/85/60/d1e335417804df452589271818749d061b22772b87efda88354cf35cdb7a/ruff-0.12.12-py3-none-win_amd64.whl", hash = "sha256:e99620bf01884e5f38611934c09dd194eb665b0109104acae3ba6102b600fd0d", size = 13178016, upload-time = "2025-09-04T16:50:12.559Z" }, - { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" }, + { url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532, upload-time = "2025-10-07T18:21:00.373Z" }, + { url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768, upload-time = "2025-10-07T18:21:04.73Z" }, + { url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376, upload-time = "2025-10-07T18:21:07.833Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055, upload-time = "2025-10-07T18:21:10.72Z" }, + { url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544, upload-time = "2025-10-07T18:21:13.741Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280, upload-time = "2025-10-07T18:21:16.411Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286, upload-time = "2025-10-07T18:21:19.577Z" }, + { url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506, upload-time = "2025-10-07T18:21:22.779Z" }, + { url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384, upload-time = "2025-10-07T18:21:25.758Z" }, + { url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976, upload-time = "2025-10-07T18:21:28.83Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850, upload-time = "2025-10-07T18:21:31.842Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825, upload-time = "2025-10-07T18:21:35.074Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599, upload-time = "2025-10-07T18:21:38.08Z" }, + { url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828, upload-time = "2025-10-07T18:21:41.216Z" }, + { url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617, upload-time = "2025-10-07T18:21:44.04Z" }, + { url = "https://files.pythonhosted.org/packages/73/e6/03b882225a1b0627e75339b420883dc3c90707a8917d2284abef7a58d317/ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543", size = 12367872, upload-time = "2025-10-07T18:21:46.67Z" }, + { url = "https://files.pythonhosted.org/packages/41/77/56cf9cf01ea0bfcc662de72540812e5ba8e9563f33ef3d37ab2174892c47/ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2", size = 13464628, upload-time = "2025-10-07T18:21:50.318Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2a/65880dfd0e13f7f13a775998f34703674a4554906167dce02daf7865b954/ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730", size = 12565142, upload-time = "2025-10-07T18:21:53.577Z" }, ] [[package]] @@ -5559,28 +5545,28 @@ wheels = [ [[package]] name = "scipy-stubs" -version = "1.16.2.0" +version = "1.16.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "optype", extra = ["numpy"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/84/b4c2caf7748f331870992e7ede5b5df0b080671bcef8c8c7e27a3cf8694a/scipy_stubs-1.16.2.0.tar.gz", hash = "sha256:8fdd45155fca401bb755b1b63ac2f192f84f25c3be8da2c99d1cafb2708f3052", size = 352676, upload-time = "2025-09-11T23:28:59.236Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/47/b165711b36a1afb8d5f408393487586e07f8bdb86f829b5b904c1ddd091f/scipy_stubs-1.16.2.3.tar.gz", hash = "sha256:b1afd21442699b8bdd399508187bddcedc6c29a34b188fd603396cb6754c2a91", size = 355436, upload-time = "2025-10-08T01:38:37.403Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/c8/67d984c264f759e7653c130a4b12ae3b4f4304867579560e9a869adb7883/scipy_stubs-1.16.2.0-py3-none-any.whl", hash = "sha256:18c50d49e3c932033fdd4f7fa4fea9e45c8787f92bceaec9e86ccbd140e835d5", size = 553247, upload-time = "2025-09-11T23:28:57.688Z" }, + { url = "https://files.pythonhosted.org/packages/29/9f/3d8f613d0c3be9348cb0c351328249b7a2428f13329447ec6f395628d7b0/scipy_stubs-1.16.2.3-py3-none-any.whl", hash = "sha256:05e93238bdaedb7fa1afedf9c3a2337f94fec3d8c33fb2d403c933e1bcc7412e", size = 556848, upload-time = "2025-10-08T01:38:35.697Z" }, ] [[package]] name = "sendgrid" -version = "6.12.4" +version = "6.12.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "ecdsa" }, + { name = "cryptography" }, { name = "python-http-client" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/31/62e00433878dccf33edf07f8efa417b9030a2464eb3b04bbd797a11b4447/sendgrid-6.12.4.tar.gz", hash = "sha256:9e88b849daf0fa4bdf256c3b5da9f5a3272402c0c2fd6b1928c9de440db0a03d", size = 50271, upload-time = "2025-06-12T10:29:37.213Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/fa/f718b2b953f99c1f0085811598ac7e31ccbd4229a81ec2a5290be868187a/sendgrid-6.12.5.tar.gz", hash = "sha256:ea9aae30cd55c332e266bccd11185159482edfc07c149b6cd15cf08869fabdb7", size = 50310, upload-time = "2025-09-19T06:23:09.229Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/9c/45d068fd831a65e6ed1e2ab3233de58784842afdc62fdcdd0a01bbb6b39d/sendgrid-6.12.4-py3-none-any.whl", hash = "sha256:9a211b96241e63bd5b9ed9afcc8608f4bcac426e4a319b3920ab877c8426e92c", size = 102122, upload-time = "2025-06-12T10:29:35.457Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/b3c3880a77082e8f7374954e0074aafafaa9bc78bdf9c8f5a92c2e7afc6a/sendgrid-6.12.5-py3-none-any.whl", hash = "sha256:96f92cc91634bf552fdb766b904bbb53968018da7ae41fdac4d1090dc0311ca8", size = 102173, upload-time = "2025-09-19T06:23:07.93Z" }, ] [[package]] @@ -5614,29 +5600,29 @@ wheels = [ [[package]] name = "shapely" -version = "2.1.1" +version = "2.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/3c/2da625233f4e605155926566c0e7ea8dda361877f48e8b1655e53456f252/shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772", size = 315422, upload-time = "2025-05-19T11:04:41.265Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/97/2df985b1e03f90c503796ad5ecd3d9ed305123b64d4ccb54616b30295b29/shapely-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587a1aa72bc858fab9b8c20427b5f6027b7cbc92743b8e2c73b9de55aa71c7a7", size = 1819368, upload-time = "2025-05-19T11:03:55.937Z" }, - { url = "https://files.pythonhosted.org/packages/56/17/504518860370f0a28908b18864f43d72f03581e2b6680540ca668f07aa42/shapely-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fa5c53b0791a4b998f9ad84aad456c988600757a96b0a05e14bba10cebaaaea", size = 1625362, upload-time = "2025-05-19T11:03:57.06Z" }, - { url = "https://files.pythonhosted.org/packages/36/a1/9677337d729b79fce1ef3296aac6b8ef4743419086f669e8a8070eff8f40/shapely-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aabecd038841ab5310d23495253f01c2a82a3aedae5ab9ca489be214aa458aa7", size = 2999005, upload-time = "2025-05-19T11:03:58.692Z" }, - { url = "https://files.pythonhosted.org/packages/a2/17/e09357274699c6e012bbb5a8ea14765a4d5860bb658df1931c9f90d53bd3/shapely-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586f6aee1edec04e16227517a866df3e9a2e43c1f635efc32978bb3dc9c63753", size = 3108489, upload-time = "2025-05-19T11:04:00.059Z" }, - { url = "https://files.pythonhosted.org/packages/17/5d/93a6c37c4b4e9955ad40834f42b17260ca74ecf36df2e81bb14d12221b90/shapely-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b9878b9e37ad26c72aada8de0c9cfe418d9e2ff36992a1693b7f65a075b28647", size = 3945727, upload-time = "2025-05-19T11:04:01.786Z" }, - { url = "https://files.pythonhosted.org/packages/a3/1a/ad696648f16fd82dd6bfcca0b3b8fbafa7aacc13431c7fc4c9b49e481681/shapely-2.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9a531c48f289ba355e37b134e98e28c557ff13965d4653a5228d0f42a09aed0", size = 4109311, upload-time = "2025-05-19T11:04:03.134Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/150dd245beab179ec0d4472bf6799bf18f21b1efbef59ac87de3377dbf1c/shapely-2.1.1-cp311-cp311-win32.whl", hash = "sha256:4866de2673a971820c75c0167b1f1cd8fb76f2d641101c23d3ca021ad0449bab", size = 1522982, upload-time = "2025-05-19T11:04:05.217Z" }, - { url = "https://files.pythonhosted.org/packages/93/5b/842022c00fbb051083c1c85430f3bb55565b7fd2d775f4f398c0ba8052ce/shapely-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:20a9d79958b3d6c70d8a886b250047ea32ff40489d7abb47d01498c704557a93", size = 1703872, upload-time = "2025-05-19T11:04:06.791Z" }, - { url = "https://files.pythonhosted.org/packages/fb/64/9544dc07dfe80a2d489060791300827c941c451e2910f7364b19607ea352/shapely-2.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2827365b58bf98efb60affc94a8e01c56dd1995a80aabe4b701465d86dcbba43", size = 1833021, upload-time = "2025-05-19T11:04:08.022Z" }, - { url = "https://files.pythonhosted.org/packages/07/aa/fb5f545e72e89b6a0f04a0effda144f5be956c9c312c7d4e00dfddbddbcf/shapely-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c551f7fa7f1e917af2347fe983f21f212863f1d04f08eece01e9c275903fad", size = 1643018, upload-time = "2025-05-19T11:04:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/03/46/61e03edba81de729f09d880ce7ae5c1af873a0814206bbfb4402ab5c3388/shapely-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78dec4d4fbe7b1db8dc36de3031767e7ece5911fb7782bc9e95c5cdec58fb1e9", size = 2986417, upload-time = "2025-05-19T11:04:10.56Z" }, - { url = "https://files.pythonhosted.org/packages/1f/1e/83ec268ab8254a446b4178b45616ab5822d7b9d2b7eb6e27cf0b82f45601/shapely-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:872d3c0a7b8b37da0e23d80496ec5973c4692920b90de9f502b5beb994bbaaef", size = 3098224, upload-time = "2025-05-19T11:04:11.903Z" }, - { url = "https://files.pythonhosted.org/packages/f1/44/0c21e7717c243e067c9ef8fa9126de24239f8345a5bba9280f7bb9935959/shapely-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e2b9125ebfbc28ecf5353511de62f75a8515ae9470521c9a693e4bb9fbe0cf1", size = 3925982, upload-time = "2025-05-19T11:04:13.224Z" }, - { url = "https://files.pythonhosted.org/packages/15/50/d3b4e15fefc103a0eb13d83bad5f65cd6e07a5d8b2ae920e767932a247d1/shapely-2.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4b96cea171b3d7f6786976a0520f178c42792897653ecca0c5422fb1e6946e6d", size = 4089122, upload-time = "2025-05-19T11:04:14.477Z" }, - { url = "https://files.pythonhosted.org/packages/bd/05/9a68f27fc6110baeedeeebc14fd86e73fa38738c5b741302408fb6355577/shapely-2.1.1-cp312-cp312-win32.whl", hash = "sha256:39dca52201e02996df02e447f729da97cfb6ff41a03cb50f5547f19d02905af8", size = 1522437, upload-time = "2025-05-19T11:04:16.203Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e9/a4560e12b9338842a1f82c9016d2543eaa084fce30a1ca11991143086b57/shapely-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:13d643256f81d55a50013eff6321142781cf777eb6a9e207c2c9e6315ba6044a", size = 1703479, upload-time = "2025-05-19T11:04:18.497Z" }, + { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" }, + { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" }, + { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" }, + { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" }, + { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" }, + { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" }, + { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" }, + { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" }, + { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" }, ] [[package]] @@ -5704,31 +5690,31 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.43" +version = "2.0.44" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/bc/d59b5d97d27229b0e009bd9098cd81af71c2fa5549c580a0a67b9bed0496/sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417", size = 9762949, upload-time = "2025-08-11T14:24:58.438Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/77/fa7189fe44114658002566c6fe443d3ed0ec1fa782feb72af6ef7fbe98e7/sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29", size = 2136472, upload-time = "2025-08-11T15:52:21.789Z" }, - { url = "https://files.pythonhosted.org/packages/99/ea/92ac27f2fbc2e6c1766bb807084ca455265707e041ba027c09c17d697867/sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631", size = 2126535, upload-time = "2025-08-11T15:52:23.109Z" }, - { url = "https://files.pythonhosted.org/packages/94/12/536ede80163e295dc57fff69724caf68f91bb40578b6ac6583a293534849/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685", size = 3297521, upload-time = "2025-08-11T15:50:33.536Z" }, - { url = "https://files.pythonhosted.org/packages/03/b5/cacf432e6f1fc9d156eca0560ac61d4355d2181e751ba8c0cd9cb232c8c1/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca", size = 3297343, upload-time = "2025-08-11T15:57:51.186Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ba/d4c9b526f18457667de4c024ffbc3a0920c34237b9e9dd298e44c7c00ee5/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d", size = 3232113, upload-time = "2025-08-11T15:50:34.949Z" }, - { url = "https://files.pythonhosted.org/packages/aa/79/c0121b12b1b114e2c8a10ea297a8a6d5367bc59081b2be896815154b1163/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3", size = 3258240, upload-time = "2025-08-11T15:57:52.983Z" }, - { url = "https://files.pythonhosted.org/packages/79/99/a2f9be96fb382f3ba027ad42f00dbe30fdb6ba28cda5f11412eee346bec5/sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921", size = 2101248, upload-time = "2025-08-11T15:55:01.855Z" }, - { url = "https://files.pythonhosted.org/packages/ee/13/744a32ebe3b4a7a9c7ea4e57babae7aa22070d47acf330d8e5a1359607f1/sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8", size = 2126109, upload-time = "2025-08-11T15:55:04.092Z" }, - { url = "https://files.pythonhosted.org/packages/61/db/20c78f1081446095450bdc6ee6cc10045fce67a8e003a5876b6eaafc5cc4/sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24", size = 2134891, upload-time = "2025-08-11T15:51:13.019Z" }, - { url = "https://files.pythonhosted.org/packages/45/0a/3d89034ae62b200b4396f0f95319f7d86e9945ee64d2343dcad857150fa2/sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83", size = 2123061, upload-time = "2025-08-11T15:51:14.319Z" }, - { url = "https://files.pythonhosted.org/packages/cb/10/2711f7ff1805919221ad5bee205971254845c069ee2e7036847103ca1e4c/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9", size = 3320384, upload-time = "2025-08-11T15:52:35.088Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0e/3d155e264d2ed2778484006ef04647bc63f55b3e2d12e6a4f787747b5900/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48", size = 3329648, upload-time = "2025-08-11T15:56:34.153Z" }, - { url = "https://files.pythonhosted.org/packages/5b/81/635100fb19725c931622c673900da5efb1595c96ff5b441e07e3dd61f2be/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687", size = 3258030, upload-time = "2025-08-11T15:52:36.933Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ed/a99302716d62b4965fded12520c1cbb189f99b17a6d8cf77611d21442e47/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe", size = 3294469, upload-time = "2025-08-11T15:56:35.553Z" }, - { url = "https://files.pythonhosted.org/packages/5d/a2/3a11b06715149bf3310b55a98b5c1e84a42cfb949a7b800bc75cb4e33abc/sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d", size = 2098906, upload-time = "2025-08-11T15:55:00.645Z" }, - { url = "https://files.pythonhosted.org/packages/bc/09/405c915a974814b90aa591280623adc6ad6b322f61fd5cff80aeaef216c9/sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a", size = 2126260, upload-time = "2025-08-11T15:55:02.965Z" }, - { url = "https://files.pythonhosted.org/packages/b8/d9/13bdde6521f322861fab67473cec4b1cc8999f3871953531cf61945fad92/sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc", size = 1924759, upload-time = "2025-08-11T15:39:53.024Z" }, + { url = "https://files.pythonhosted.org/packages/e3/81/15d7c161c9ddf0900b076b55345872ed04ff1ed6a0666e5e94ab44b0163c/sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd", size = 2140517, upload-time = "2025-10-10T15:36:15.64Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d5/4abd13b245c7d91bdf131d4916fd9e96a584dac74215f8b5bc945206a974/sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa", size = 2130738, upload-time = "2025-10-10T15:36:16.91Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3c/8418969879c26522019c1025171cefbb2a8586b6789ea13254ac602986c0/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e", size = 3304145, upload-time = "2025-10-10T15:34:19.569Z" }, + { url = "https://files.pythonhosted.org/packages/94/2d/fdb9246d9d32518bda5d90f4b65030b9bf403a935cfe4c36a474846517cb/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e", size = 3304511, upload-time = "2025-10-10T15:47:05.088Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fb/40f2ad1da97d5c83f6c1269664678293d3fe28e90ad17a1093b735420549/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399", size = 3235161, upload-time = "2025-10-10T15:34:21.193Z" }, + { url = "https://files.pythonhosted.org/packages/95/cb/7cf4078b46752dca917d18cf31910d4eff6076e5b513c2d66100c4293d83/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b", size = 3261426, upload-time = "2025-10-10T15:47:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/f8/3b/55c09b285cb2d55bdfa711e778bdffdd0dc3ffa052b0af41f1c5d6e582fa/sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3", size = 2105392, upload-time = "2025-10-10T15:38:20.051Z" }, + { url = "https://files.pythonhosted.org/packages/c7/23/907193c2f4d680aedbfbdf7bf24c13925e3c7c292e813326c1b84a0b878e/sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5", size = 2130293, upload-time = "2025-10-10T15:38:21.601Z" }, + { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675, upload-time = "2025-10-10T16:03:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726, upload-time = "2025-10-10T16:03:35.934Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603, upload-time = "2025-10-10T15:35:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842, upload-time = "2025-10-10T15:43:45.431Z" }, + { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558, upload-time = "2025-10-10T15:35:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570, upload-time = "2025-10-10T15:43:48.407Z" }, + { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447, upload-time = "2025-10-10T15:03:21.678Z" }, + { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912, upload-time = "2025-10-10T15:03:24.656Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, ] [[package]] @@ -5921,7 +5907,7 @@ wheels = [ [[package]] name = "testcontainers" -version = "4.10.0" +version = "4.13.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docker" }, @@ -5930,9 +5916,9 @@ dependencies = [ { name = "urllib3" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/49/9c618aff1c50121d183cdfbc3a4a5cf2727a2cde1893efe6ca55c7009196/testcontainers-4.10.0.tar.gz", hash = "sha256:03f85c3e505d8b4edeb192c72a961cebbcba0dd94344ae778b4a159cb6dcf8d3", size = 63327, upload-time = "2025-04-02T16:13:27.582Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/51/edac83edab339d8b4dce9a7b659163afb1ea7e011bfed1d5573d495a4485/testcontainers-4.13.2.tar.gz", hash = "sha256:2315f1e21b059427a9d11e8921f85fef322fbe0d50749bcca4eaa11271708ba4", size = 78692, upload-time = "2025-10-07T21:53:07.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/0a/824b0c1ecf224802125279c3effff2e25ed785ed046e67da6e53d928de4c/testcontainers-4.10.0-py3-none-any.whl", hash = "sha256:31ed1a81238c7e131a2a29df6db8f23717d892b592fa5a1977fd0dcd0c23fc23", size = 107414, upload-time = "2025-04-02T16:13:25.785Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5e/73aa94770f1df0595364aed526f31d54440db5492911e2857318ed326e51/testcontainers-4.13.2-py3-none-any.whl", hash = "sha256:0209baf8f4274b568cde95bef2cadf7b1d33b375321f793790462e235cd684ee", size = 124771, upload-time = "2025-10-07T21:53:05.937Z" }, ] [[package]] @@ -6007,31 +5993,27 @@ wheels = [ [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, ] [[package]] @@ -6061,7 +6043,7 @@ wheels = [ [[package]] name = "transformers" -version = "4.56.1" +version = "4.56.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -6075,39 +6057,39 @@ dependencies = [ { name = "tokenizers" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/89/21/dc88ef3da1e49af07ed69386a11047a31dcf1aaf4ded3bc4b173fbf94116/transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74", size = 9855473, upload-time = "2025-09-04T20:47:13.14Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/82/0bcfddd134cdf53440becb5e738257cc3cf34cf229d63b57bfd288e6579f/transformers-4.56.2.tar.gz", hash = "sha256:5e7c623e2d7494105c726dd10f6f90c2c99a55ebe86eef7233765abd0cb1c529", size = 9844296, upload-time = "2025-09-19T15:16:26.778Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/7c/283c3dd35e00e22a7803a0b2a65251347b745474a82399be058bde1c9f15/transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248", size = 11608197, upload-time = "2025-09-04T20:47:04.895Z" }, + { url = "https://files.pythonhosted.org/packages/70/26/2591b48412bde75e33bfd292034103ffe41743cacd03120e3242516cd143/transformers-4.56.2-py3-none-any.whl", hash = "sha256:79c03d0e85b26cb573c109ff9eafa96f3c8d4febfd8a0774e8bba32702dd6dde", size = 11608055, upload-time = "2025-09-19T15:16:23.736Z" }, ] [[package]] name = "ty" -version = "0.0.1a20" +version = "0.0.1a22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7a/82/a5e3b4bc5280ec49c4b0b43d0ff727d58c7df128752c9c6f97ad0b5f575f/ty-0.0.1a20.tar.gz", hash = "sha256:933b65a152f277aa0e23ba9027e5df2c2cc09e18293e87f2a918658634db5f15", size = 4194773, upload-time = "2025-09-03T12:35:46.775Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/87/eab73cdc990d1141b60237379975efc0e913bfa0d19083daab0f497444a6/ty-0.0.1a22.tar.gz", hash = "sha256:b20ec5362830a1e9e05654c15e88607fdbb45325ec130a9a364c6dd412ecbf55", size = 4312182, upload-time = "2025-10-10T13:07:15.88Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/c8/f7d39392043d5c04936f6cad90e50eb661965ed092ca4bfc01db917d7b8a/ty-0.0.1a20-py3-none-linux_armv6l.whl", hash = "sha256:f73a7aca1f0d38af4d6999b375eb00553f3bfcba102ae976756cc142e14f3450", size = 8443599, upload-time = "2025-09-03T12:35:04.289Z" }, - { url = "https://files.pythonhosted.org/packages/1e/57/5aec78f9b8a677b7439ccded7d66c3361e61247e0f6b14e659b00dd01008/ty-0.0.1a20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cad12c857ea4b97bf61e02f6796e13061ccca5e41f054cbd657862d80aa43bae", size = 8618102, upload-time = "2025-09-03T12:35:07.448Z" }, - { url = "https://files.pythonhosted.org/packages/15/20/50c9107d93cdb55676473d9dc4e2339af6af606660c9428d3b86a1b2a476/ty-0.0.1a20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f153b65c7fcb6b8b59547ddb6353761b3e8d8bb6f0edd15e3e3ac14405949f7a", size = 8192167, upload-time = "2025-09-03T12:35:09.706Z" }, - { url = "https://files.pythonhosted.org/packages/85/28/018b2f330109cee19e81c5ca9df3dc29f06c5778440eb9af05d4550c4302/ty-0.0.1a20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c4336987a6a781d4392a9fd7b3a39edb7e4f3dd4f860e03f46c932b52aefa2", size = 8349256, upload-time = "2025-09-03T12:35:11.76Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c9/2f8797a05587158f52b142278796ffd72c893bc5ad41840fce5aeb65c6f2/ty-0.0.1a20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ff75cd4c744d09914e8c9db8d99e02f82c9379ad56b0a3fc4c5c9c923cfa84e", size = 8271214, upload-time = "2025-09-03T12:35:13.741Z" }, - { url = "https://files.pythonhosted.org/packages/30/d4/2cac5e5eb9ee51941358cb3139aadadb59520cfaec94e4fcd2b166969748/ty-0.0.1a20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e26437772be7f7808868701f2bf9e14e706a6ec4c7d02dbd377ff94d7ba60c11", size = 9264939, upload-time = "2025-09-03T12:35:16.896Z" }, - { url = "https://files.pythonhosted.org/packages/93/96/a6f2b54e484b2c6a5488f217882237dbdf10f0fdbdb6cd31333d57afe494/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83a7ee12465841619b5eb3ca962ffc7d576bb1c1ac812638681aee241acbfbbe", size = 9743137, upload-time = "2025-09-03T12:35:19.799Z" }, - { url = "https://files.pythonhosted.org/packages/6e/67/95b40dcbec3d222f3af5fe5dd1ce066d42f8a25a2f70d5724490457048e7/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:726d0738be4459ac7ffae312ba96c5f486d6cbc082723f322555d7cba9397871", size = 9368153, upload-time = "2025-09-03T12:35:22.569Z" }, - { url = "https://files.pythonhosted.org/packages/2c/24/689fa4c4270b9ef9a53dc2b1d6ffade259ba2c4127e451f0629e130ea46a/ty-0.0.1a20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b481f26513f38543df514189fb16744690bcba8d23afee95a01927d93b46e36", size = 9099637, upload-time = "2025-09-03T12:35:24.94Z" }, - { url = "https://files.pythonhosted.org/packages/a1/5b/913011cbf3ea4030097fb3c4ce751856114c9e1a5e1075561a4c5242af9b/ty-0.0.1a20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7abbe3c02218c12228b1d7c5f98c57240029cc3bcb15b6997b707c19be3908c1", size = 8952000, upload-time = "2025-09-03T12:35:27.288Z" }, - { url = "https://files.pythonhosted.org/packages/df/f9/f5ba2ae455b20c5bb003f9940ef8142a8c4ed9e27de16e8f7472013609db/ty-0.0.1a20-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fff51c75ee3f7cc6d7722f2f15789ef8ffe6fd2af70e7269ac785763c906688e", size = 8217938, upload-time = "2025-09-03T12:35:29.54Z" }, - { url = "https://files.pythonhosted.org/packages/eb/62/17002cf9032f0981cdb8c898d02422c095c30eefd69ca62a8b705d15bd0f/ty-0.0.1a20-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b4124ab75e0e6f09fe7bc9df4a77ee43c5e0ef7e61b0c149d7c089d971437cbd", size = 8292369, upload-time = "2025-09-03T12:35:31.748Z" }, - { url = "https://files.pythonhosted.org/packages/28/d6/0879b1fb66afe1d01d45c7658f3849aa641ac4ea10679404094f3b40053e/ty-0.0.1a20-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8a138fa4f74e6ed34e9fd14652d132409700c7ff57682c2fed656109ebfba42f", size = 8811973, upload-time = "2025-09-03T12:35:33.997Z" }, - { url = "https://files.pythonhosted.org/packages/60/1e/70bf0348cfe8ba5f7532983f53c508c293ddf5fa9f942ed79a3c4d576df3/ty-0.0.1a20-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8eff8871d6b88d150e2a67beba2c57048f20c090c219f38ed02eebaada04c124", size = 9010990, upload-time = "2025-09-03T12:35:36.766Z" }, - { url = "https://files.pythonhosted.org/packages/b7/ca/03d85c7650359247b1ca3f38a0d869a608ef540450151920e7014ed58292/ty-0.0.1a20-py3-none-win32.whl", hash = "sha256:3c2ace3a22fab4bd79f84c74e3dab26e798bfba7006bea4008d6321c1bd6efc6", size = 8100746, upload-time = "2025-09-03T12:35:40.007Z" }, - { url = "https://files.pythonhosted.org/packages/94/53/7a1937b8c7a66d0c8ed7493de49ed454a850396fe137d2ae12ed247e0b2f/ty-0.0.1a20-py3-none-win_amd64.whl", hash = "sha256:f41e77ff118da3385915e13c3f366b3a2f823461de54abd2e0ca72b170ba0f19", size = 8748861, upload-time = "2025-09-03T12:35:42.175Z" }, - { url = "https://files.pythonhosted.org/packages/27/36/5a3a70c5d497d3332f9e63cabc9c6f13484783b832fecc393f4f1c0c4aa8/ty-0.0.1a20-py3-none-win_arm64.whl", hash = "sha256:d8ac1c5a14cda5fad1a8b53959d9a5d979fe16ce1cc2785ea8676fed143ac85f", size = 8269906, upload-time = "2025-09-03T12:35:45.045Z" }, + { url = "https://files.pythonhosted.org/packages/4d/30/83e2dbfbc70de8a1932b19daf05ce803d7d76cdc6251de1519a49cf1c27d/ty-0.0.1a22-py3-none-linux_armv6l.whl", hash = "sha256:6efba0c777881d2d072fa7375a64ad20357e825eff2a0b6ff9ec80399a04253b", size = 8581795, upload-time = "2025-10-10T13:06:44.396Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8c/5193534fc4a3569f517408828d077b26d6280fe8c2dd0bdc63db4403dcdb/ty-0.0.1a22-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2ada020eebe1b44403affdf45cd5c8d3fb8312c3e80469d795690093c0921f55", size = 8682602, upload-time = "2025-10-10T13:06:46.44Z" }, + { url = "https://files.pythonhosted.org/packages/22/4a/7ba53493bf37b61d3e0dfe6df910e6bc74c40d16c3effd84e15c0863d34e/ty-0.0.1a22-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ed4f11f1a5824ea10d3e46b1990d092c3f341b1d492c357d23bed2ac347fd253", size = 8278839, upload-time = "2025-10-10T13:06:48.688Z" }, + { url = "https://files.pythonhosted.org/packages/52/0a/d9862c41b9615de56d2158bfbb5177dbf5a65e94922d3dd13855f48cb91b/ty-0.0.1a22-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56f48d8f94292909d596dbeb56ff7f9f070bd316aa628b45c02ca2b2f5797f31", size = 8421483, upload-time = "2025-10-10T13:06:50.75Z" }, + { url = "https://files.pythonhosted.org/packages/a5/cb/3ebe0e45b80724d4c2f849fdf304179727fd06df7fee7cd12fe6c3efe49d/ty-0.0.1a22-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:733e9ac22885b6574de26bdbae439c960a06acc825a938d3780c9d498bb65339", size = 8419225, upload-time = "2025-10-10T13:06:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b5/da65f3f8ad31d881ca9987a3f6f26069a0cc649c9354adb7453ca62116bb/ty-0.0.1a22-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5135d662484e56809c77b3343614005585caadaa5c1cf643ed6a09303497652b", size = 9352336, upload-time = "2025-10-10T13:06:54.476Z" }, + { url = "https://files.pythonhosted.org/packages/a3/24/9c46f2eb16734ab0fcf3291486b1c5c528a1569f94541dc1f19f97dd2a5b/ty-0.0.1a22-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:87f297f99a98154d33a3f21991979418c65d8bf480f6a1bad1e54d46d2dc7df7", size = 9857840, upload-time = "2025-10-10T13:06:56.514Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ae/930c94bbbe5c049eae5355a197c39522844f55c7ab7fccd0ba061f618541/ty-0.0.1a22-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3310217eaa4dccf20b7336fcbeb072097addc6fde0c9d3f791dea437af0aa6dc", size = 9452611, upload-time = "2025-10-10T13:06:58.154Z" }, + { url = "https://files.pythonhosted.org/packages/a2/80/d8f594438465c352cf0ebd4072f5ca3be2871153a3cd279ed2f35ecd487c/ty-0.0.1a22-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b032e81012bf5228fd65f01b50e29eb409534b6aac28ee5c48ee3b7b860ddf", size = 9214875, upload-time = "2025-10-10T13:06:59.861Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/f852fb20ac27707de495c39a02aeb056e3368833b7e12888d43b1f61594d/ty-0.0.1a22-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3ffda8149cab0000a21e7a078142073e27a1a9ac03b9a0837aa2f53d1fbebcb", size = 8906715, upload-time = "2025-10-10T13:07:01.926Z" }, + { url = "https://files.pythonhosted.org/packages/40/4d/0e0b85b4179891cc3067a6e717f5161921c07873a4f545963fdf1dd3619c/ty-0.0.1a22-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:afa512e7dc78f0cf0b55f87394968ba59c46993c67bc0ef295962144fea85b12", size = 8350873, upload-time = "2025-10-10T13:07:03.999Z" }, + { url = "https://files.pythonhosted.org/packages/a1/1f/e70c63e12b4a0d97d4fd6f872dd199113666ad1b236e18838fa5e5d5502d/ty-0.0.1a22-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:069cdbbea6025f7ebbb5e9043c8d0daf760358df46df8304ef5ca5bb3e320aef", size = 8442568, upload-time = "2025-10-10T13:07:05.745Z" }, + { url = "https://files.pythonhosted.org/packages/de/3b/55518906cb3598f2b99ff1e86c838d77d006cab70cdd2a0a625d02ccb52c/ty-0.0.1a22-py3-none-musllinux_1_2_i686.whl", hash = "sha256:67d31d902e6fd67a4b3523604f635e71d2ec55acfb9118f984600584bfe0ff2a", size = 8896775, upload-time = "2025-10-10T13:07:08.02Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ea/60c654c27931bf84fa9cb463a4c4c49e8869c052fa607a6e930be717b619/ty-0.0.1a22-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f9e154f262162e6f76b01f318e469ac6c22ffce22b010c396ed34e81d8369821", size = 9054544, upload-time = "2025-10-10T13:07:09.675Z" }, + { url = "https://files.pythonhosted.org/packages/6c/60/9a6d5530d6829ccf656e6ae0fb13d70a4e2514f4fb8910266ebd54286620/ty-0.0.1a22-py3-none-win32.whl", hash = "sha256:37525433ca7b02a8fca4b8fa9dcde818bf3a413b539b9dbc8f7b39d124eb7c49", size = 8165703, upload-time = "2025-10-10T13:07:11.378Z" }, + { url = "https://files.pythonhosted.org/packages/14/9c/ac08c832643850d4e18cbc959abc69cd51d531fe11bdb691098b3cf2f562/ty-0.0.1a22-py3-none-win_amd64.whl", hash = "sha256:75d21cdeba8bcef247af89518d7ce98079cac4a55c4160cb76682ea40a18b92c", size = 8828319, upload-time = "2025-10-10T13:07:12.815Z" }, + { url = "https://files.pythonhosted.org/packages/22/df/38068fc44e3cfb455aeb41d0ff1850a4d3c9988010466d4a8d19860b8b9a/ty-0.0.1a22-py3-none-win_arm64.whl", hash = "sha256:1c7f040fe311e9696917417434c2a0e58402235be842c508002c6a2eff1398b0", size = 8367136, upload-time = "2025-10-10T13:07:14.518Z" }, ] [[package]] name = "typer" -version = "0.17.4" +version = "0.19.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -6115,9 +6097,9 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/92/e8/2a73ccf9874ec4c7638f172efc8972ceab13a0e3480b389d6ed822f7a822/typer-0.17.4.tar.gz", hash = "sha256:b77dc07d849312fd2bb5e7f20a7af8985c7ec360c45b051ed5412f64d8dc1580", size = 103734, upload-time = "2025-09-05T18:14:40.746Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755, upload-time = "2025-09-23T09:47:48.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/72/6b3e70d32e89a5cbb6a4513726c1ae8762165b027af569289e19ec08edd8/typer-0.17.4-py3-none-any.whl", hash = "sha256:015534a6edaa450e7007eba705d5c18c3349dcea50a6ad79a5ed530967575824", size = 46643, upload-time = "2025-09-05T18:14:39.166Z" }, + { url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748, upload-time = "2025-09-23T09:47:46.777Z" }, ] [[package]] @@ -6131,11 +6113,11 @@ wheels = [ [[package]] name = "types-awscrt" -version = "0.27.6" +version = "0.28.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/ce/5d84526a39f44c420ce61b16654193f8437d74b54f21597ea2ac65d89954/types_awscrt-0.27.6.tar.gz", hash = "sha256:9d3f1865a93b8b2c32f137514ac88cb048b5bc438739945ba19d972698995bfb", size = 16937, upload-time = "2025-08-13T01:54:54.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/19/a3a6377c9e2e389c1421c033a1830c29cac08f2e1e05a082ea84eb22c75f/types_awscrt-0.28.1.tar.gz", hash = "sha256:66d77ec283e1dc907526a44511a12624118723a396c36d3f3dd9855cb614ce14", size = 17410, upload-time = "2025-10-11T21:55:07.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/af/e3d20e3e81d235b3964846adf46a334645a8a9b25a0d3d472743eb079552/types_awscrt-0.27.6-py3-none-any.whl", hash = "sha256:18aced46da00a57f02eb97637a32e5894dc5aa3dc6a905ba3e5ed85b9f3c526b", size = 39626, upload-time = "2025-08-13T01:54:53.454Z" }, + { url = "https://files.pythonhosted.org/packages/ea/c7/0266b797d19b82aebe0e177efe35de7aabdc192bc1605ce3309331f0a505/types_awscrt-0.28.1-py3-none-any.whl", hash = "sha256:d88f43ef779f90b841ba99badb72fe153077225a4e426ae79e943184827b4443", size = 41851, upload-time = "2025-10-11T21:55:06.235Z" }, ] [[package]] @@ -6161,14 +6143,14 @@ wheels = [ [[package]] name = "types-cffi" -version = "1.17.0.20250822" +version = "1.17.0.20250915" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/0c/76a48cb6e742cac4d61a4ec632dd30635b6d302f5acdc2c0a27572ac7ae3/types_cffi-1.17.0.20250822.tar.gz", hash = "sha256:bf6f5a381ea49da7ff895fae69711271e6192c434470ce6139bf2b2e0d0fa08d", size = 17130, upload-time = "2025-08-22T03:04:02.445Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229, upload-time = "2025-09-15T03:01:25.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/f7/68029931e7539e3246b33386a19c475f234c71d2a878411847b20bb31960/types_cffi-1.17.0.20250822-py3-none-any.whl", hash = "sha256:183dd76c1871a48936d7b931488e41f0f25a7463abe10b5816be275fc11506d5", size = 20083, upload-time = "2025-08-22T03:04:01.466Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ec/092f2b74b49ec4855cdb53050deb9699f7105b8fda6fe034c0781b8687f3/types_cffi-1.17.0.20250915-py3-none-any.whl", hash = "sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c", size = 20112, upload-time = "2025-09-15T03:01:24.187Z" }, ] [[package]] @@ -6256,11 +6238,11 @@ wheels = [ [[package]] name = "types-html5lib" -version = "1.1.11.20250809" +version = "1.1.11.20250917" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/ab/6aa4c487ae6f4f9da5153143bdc9e9b4fbc2b105df7ef8127fb920dc1f21/types_html5lib-1.1.11.20250809.tar.gz", hash = "sha256:7976ec7426bb009997dc5e072bca3ed988dd747d0cbfe093c7dfbd3d5ec8bf57", size = 16793, upload-time = "2025-08-09T03:14:20.819Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/4b/a970718e8bd9324ee8fb8eaf02ff069f6d03c20d4523bb4232892ecc3d06/types_html5lib-1.1.11.20250917.tar.gz", hash = "sha256:7b52743377f33f9b4fd7385afbd2d457b8864ee51f90ff2a795ad9e8c053373a", size = 16868, upload-time = "2025-09-17T02:47:41.18Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/05/328a2d6ecbd8aa3e16512600da78b1fe4605125896794a21824f3cac6f14/types_html5lib-1.1.11.20250809-py3-none-any.whl", hash = "sha256:e5f48ab670ae4cdeafd88bbc47113d8126dcf08318e0b8d70df26ecc13eca9b6", size = 22867, upload-time = "2025-08-09T03:14:20.048Z" }, + { url = "https://files.pythonhosted.org/packages/78/8a/da91a9c64dcb5e69beb567519857411996d8ecae9f6f128bcef8260e7a8d/types_html5lib-1.1.11.20250917-py3-none-any.whl", hash = "sha256:b294fd06d60da205daeb2f615485ca4d475088d2eff1009cf427f4a80fcd5346", size = 22908, upload-time = "2025-09-17T02:47:40.39Z" }, ] [[package]] @@ -6322,20 +6304,20 @@ wheels = [ [[package]] name = "types-openpyxl" -version = "3.1.5.20250822" +version = "3.1.5.20250919" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/7f/ea358482217448deafdb9232f198603511d2efa99e429822256f2b38975a/types_openpyxl-3.1.5.20250822.tar.gz", hash = "sha256:c8704a163e3798290d182c13c75da85f68cd97ff9b35f0ebfb94cf72f8b67bb3", size = 100858, upload-time = "2025-08-22T03:03:31.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/12/8bc4a25d49f1e4b7bbca868daa3ee80b1983d8137b4986867b5b65ab2ecd/types_openpyxl-3.1.5.20250919.tar.gz", hash = "sha256:232b5906773eebace1509b8994cdadda043f692cfdba9bfbb86ca921d54d32d7", size = 100880, upload-time = "2025-09-19T02:54:39.997Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/e8/cac4728e8dcbeb69d6de7de26bb9edb508e9f5c82476ecda22b58b939e60/types_openpyxl-3.1.5.20250822-py3-none-any.whl", hash = "sha256:da7a430d99c48347acf2dc351695f9db6ff90ecb761fed577b4a98fef2d0f831", size = 166093, upload-time = "2025-08-22T03:03:30.686Z" }, + { url = "https://files.pythonhosted.org/packages/36/3c/d49cf3f4489a10e9ddefde18fd258f120754c5825d06d145d9a0aaac770b/types_openpyxl-3.1.5.20250919-py3-none-any.whl", hash = "sha256:bd06f18b12fd5e1c9f0b666ee6151d8140216afa7496f7ebb9fe9d33a1a3ce99", size = 166078, upload-time = "2025-09-19T02:54:38.657Z" }, ] [[package]] name = "types-pexpect" -version = "4.9.0.20250809" +version = "4.9.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/a2/29564e69dee62f0f887ba7bfffa82fa4975504952e6199b218d3b403becd/types_pexpect-4.9.0.20250809.tar.gz", hash = "sha256:17a53c785b847c90d0be9149b00b0254e6e92c21cd856e853dac810ddb20101f", size = 13240, upload-time = "2025-08-09T03:15:04.554Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/e6/cc43e306dc7de14ec7861c24ac4957f688741ae39ae685049695d796b587/types_pexpect-4.9.0.20250916.tar.gz", hash = "sha256:69e5fed6199687a730a572de780a5749248a4c5df2ff1521e194563475c9928d", size = 13322, upload-time = "2025-09-16T02:49:25.61Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/1b/4d557287e6672feb749cf0d8ef5eb19189aff043e73e509e3775febc1cf1/types_pexpect-4.9.0.20250809-py3-none-any.whl", hash = "sha256:d19d206b8a7c282dac9376f26f072e036d22e9cf3e7d8eba3f477500b1f39101", size = 17039, upload-time = "2025-08-09T03:15:03.528Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6d/7740e235a9fb2570968da7d386d7feb511ce68cd23472402ff8cdf7fc78f/types_pexpect-4.9.0.20250916-py3-none-any.whl", hash = "sha256:7fa43cb96042ac58bc74f7c28e5d85782be0ee01344149886849e9d90936fe8a", size = 17057, upload-time = "2025-09-16T02:49:24.546Z" }, ] [[package]] @@ -6349,20 +6331,20 @@ wheels = [ [[package]] name = "types-psutil" -version = "7.0.0.20250822" +version = "7.0.0.20251001" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6d/aa/09699c829d7cc4624138d3ae67eecd4de9574e55729b1c63ca3e5a657f86/types_psutil-7.0.0.20250822.tar.gz", hash = "sha256:226cbc0c0ea9cc0a50b8abcc1d91a26c876dcb40be238131f697883690419698", size = 20358, upload-time = "2025-08-22T03:02:04.556Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/91/b020f9100b196a1f247cd12575f68dcdad94f032c1e0c42987d7632142ce/types_psutil-7.0.0.20251001.tar.gz", hash = "sha256:60d696200ddae28677e7d88cdebd6e960294e85adefbaafe0f6e5d0e7b4c1963", size = 20469, upload-time = "2025-10-01T03:04:21.292Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/46/45006309e20859e12c024d91bb913e6b89a706cd6f9377031c9f7e274ece/types_psutil-7.0.0.20250822-py3-none-any.whl", hash = "sha256:81c82f01aba5a4510b9d8b28154f577b780be75a08954aed074aa064666edc09", size = 23110, upload-time = "2025-08-22T03:02:03.38Z" }, + { url = "https://files.pythonhosted.org/packages/c0/99/50f30e0b648e6f583165cb2e535b0256a02a03efa4868cb2f017ad25b3d8/types_psutil-7.0.0.20251001-py3-none-any.whl", hash = "sha256:adc31de8386d31c61bd4123112fd51e2c700c7502a001cad72a3d56ba6b463d1", size = 23164, upload-time = "2025-10-01T03:04:20.089Z" }, ] [[package]] name = "types-psycopg2" -version = "2.9.21.20250809" +version = "2.9.21.20251012" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/d0/66f3f04bab48bfdb2c8b795b2b3e75eb20c7d1fb0516916db3be6aa4a683/types_psycopg2-2.9.21.20250809.tar.gz", hash = "sha256:b7c2cbdcf7c0bd16240f59ba694347329b0463e43398de69784ea4dee45f3c6d", size = 26539, upload-time = "2025-08-09T03:14:54.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9b/b3/2d09eaf35a084cffd329c584970a3fa07101ca465c13cad1576d7c392587/types_psycopg2-2.9.21.20251012.tar.gz", hash = "sha256:4cdafd38927da0cfde49804f39ab85afd9c6e9c492800e42f1f0c1a1b0312935", size = 26710, upload-time = "2025-10-12T02:55:39.5Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/98/182497602921c47fadc8470d51a32e5c75343c8931c0b572a5c4ae3b948b/types_psycopg2-2.9.21.20250809-py3-none-any.whl", hash = "sha256:59b7b0ed56dcae9efae62b8373497274fc1a0484bdc5135cdacbe5a8f44e1d7b", size = 24824, upload-time = "2025-08-09T03:14:53.908Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/05feaf8cb51159f2c0af04b871dab7e98a2f83a3622f5f216331d2dd924c/types_psycopg2-2.9.21.20251012-py3-none-any.whl", hash = "sha256:712bad5c423fe979e357edbf40a07ca40ef775d74043de72bd4544ca328cc57e", size = 24883, upload-time = "2025-10-12T02:55:38.439Z" }, ] [[package]] @@ -6379,11 +6361,11 @@ wheels = [ [[package]] name = "types-pymysql" -version = "1.1.0.20250909" +version = "1.1.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/0f/bb4331221fd560379ec702d61a11d5a5eead9a2866bb39eae294bde29988/types_pymysql-1.1.0.20250909.tar.gz", hash = "sha256:5ba7230425635b8c59316353701b99a087b949e8002dfeff652be0b62cee445b", size = 22189, upload-time = "2025-09-09T02:55:31.039Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/12/bda1d977c07e0e47502bede1c44a986dd45946494d89e005e04cdeb0f8de/types_pymysql-1.1.0.20250916.tar.gz", hash = "sha256:98d75731795fcc06723a192786662bdfa760e1e00f22809c104fbb47bac5e29b", size = 22131, upload-time = "2025-09-16T02:49:22.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/35/5681d881506a31bbbd9f7d5f6edcbf65489835081965b539b0802a665036/types_pymysql-1.1.0.20250909-py3-none-any.whl", hash = "sha256:c9957d4c10a31748636da5c16b0a0eef6751354d05adcd1b86acb27e8df36fb6", size = 23179, upload-time = "2025-09-09T02:55:29.873Z" }, + { url = "https://files.pythonhosted.org/packages/21/eb/a225e32a6e7b196af67ab2f1b07363595f63255374cc3b88bfdab53b4ee8/types_pymysql-1.1.0.20250916-py3-none-any.whl", hash = "sha256:873eb9836bb5e3de4368cc7010ca72775f86e9692a5c7810f8c7f48da082e55b", size = 23063, upload-time = "2025-09-16T02:49:20.933Z" }, ] [[package]] @@ -6401,11 +6383,11 @@ wheels = [ [[package]] name = "types-python-dateutil" -version = "2.9.0.20250822" +version = "2.9.0.20251008" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0c/0a/775f8551665992204c756be326f3575abba58c4a3a52eef9909ef4536428/types_python_dateutil-2.9.0.20250822.tar.gz", hash = "sha256:84c92c34bd8e68b117bff742bc00b692a1e8531262d4507b33afcc9f7716cd53", size = 16084, upload-time = "2025-08-22T03:02:00.613Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/83/24ed25dd0c6277a1a170c180ad9eef5879ecc9a4745b58d7905a4588c80d/types_python_dateutil-2.9.0.20251008.tar.gz", hash = "sha256:c3826289c170c93ebd8360c3485311187df740166dbab9dd3b792e69f2bc1f9c", size = 16128, upload-time = "2025-10-08T02:51:34.93Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/d9/a29dfa84363e88b053bf85a8b7f212a04f0d7343a4d24933baa45c06e08b/types_python_dateutil-2.9.0.20250822-py3-none-any.whl", hash = "sha256:849d52b737e10a6dc6621d2bd7940ec7c65fcb69e6aa2882acf4e56b2b508ddc", size = 17892, upload-time = "2025-08-22T03:01:59.436Z" }, + { url = "https://files.pythonhosted.org/packages/da/af/5d24b8d49ef358468ecfdff5c556adf37f4fd28e336b96f923661a808329/types_python_dateutil-2.9.0.20251008-py3-none-any.whl", hash = "sha256:b9a5232c8921cf7661b29c163ccc56055c418ab2c6eabe8f917cbcc73a4c4157", size = 17934, upload-time = "2025-10-08T02:51:33.55Z" }, ] [[package]] @@ -6437,11 +6419,11 @@ wheels = [ [[package]] name = "types-pyyaml" -version = "6.0.12.20250822" +version = "6.0.12.20250915" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/85/90a442e538359ab5c9e30de415006fb22567aa4301c908c09f19e42975c2/types_pyyaml-6.0.12.20250822.tar.gz", hash = "sha256:259f1d93079d335730a9db7cff2bcaf65d7e04b4a56b5927d49a612199b59413", size = 17481, upload-time = "2025-08-22T03:02:16.209Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/8e/8f0aca667c97c0d76024b37cffa39e76e2ce39ca54a38f285a64e6ae33ba/types_pyyaml-6.0.12.20250822-py3-none-any.whl", hash = "sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098", size = 20314, upload-time = "2025-08-22T03:02:15.002Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, ] [[package]] @@ -6468,36 +6450,23 @@ wheels = [ [[package]] name = "types-requests" -version = "2.32.4.20250809" +version = "2.32.4.20250913" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" }, -] - -[[package]] -name = "types-requests-oauthlib" -version = "2.0.0.20250809" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "types-oauthlib" }, - { name = "types-requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ed/40/5eca857a2dbda0fedd69b7fd3f51cb0b6ece8d448327d29f0ae54612ec98/types_requests_oauthlib-2.0.0.20250809.tar.gz", hash = "sha256:f3b9b31e0394fe2c362f0d44bc9ef6d5c150a298d01089513cd54a51daec37a2", size = 11008, upload-time = "2025-08-09T03:17:50.705Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/38/8777f0ab409a7249777f230f6aefe0e9ba98355dc8b05fb31391fa30f312/types_requests_oauthlib-2.0.0.20250809-py3-none-any.whl", hash = "sha256:0d1af4907faf9f4a1b0f0afbc7ec488f1dd5561a2b5b6dad70f78091a1acfb76", size = 14319, upload-time = "2025-08-09T03:17:49.786Z" }, + { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" }, ] [[package]] name = "types-s3transfer" -version = "0.13.1" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/c5/23946fac96c9dd5815ec97afd1c8ad6d22efa76c04a79a4823f2f67692a5/types_s3transfer-0.13.1.tar.gz", hash = "sha256:ce488d79fdd7d3b9d39071939121eca814ec65de3aa36bdce1f9189c0a61cc80", size = 14181, upload-time = "2025-08-31T16:57:06.93Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/9b/8913198b7fc700acc1dcb84827137bb2922052e43dde0f4fb0ed2dc6f118/types_s3transfer-0.14.0.tar.gz", hash = "sha256:17f800a87c7eafab0434e9d87452c809c290ae906c2024c24261c564479e9c95", size = 14218, upload-time = "2025-10-11T21:11:27.892Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/dc/b3f9b5c93eed6ffe768f4972661250584d5e4f248b548029026964373bcd/types_s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:4ff730e464a3fd3785b5541f0f555c1bd02ad408cf82b6b7a95429f6b0d26b4a", size = 19617, upload-time = "2025-08-31T16:57:05.73Z" }, + { url = "https://files.pythonhosted.org/packages/92/c3/4dfb2e87c15ca582b7d956dfb7e549de1d005c758eb9a305e934e1b83fda/types_s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:108134854069a38b048e9b710b9b35904d22a9d0f37e4e1889c2e6b58e5b3253", size = 19697, upload-time = "2025-10-11T21:11:26.749Z" }, ] [[package]] @@ -6532,25 +6501,25 @@ wheels = [ [[package]] name = "types-six" -version = "1.17.0.20250515" +version = "1.17.0.20251009" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/78/344047eeced8d230140aa3d9503aa969acb61c6095e7308bbc1ff1de3865/types_six-1.17.0.20250515.tar.gz", hash = "sha256:f4f7f0398cb79304e88397336e642b15e96fbeacf5b96d7625da366b069d2d18", size = 15598, upload-time = "2025-05-15T03:04:19.806Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/f7/448215bc7695cfa0c8a7e0dcfa54fe31b1d52fb87004fed32e659dd85c80/types_six-1.17.0.20251009.tar.gz", hash = "sha256:efe03064ecd0ffb0f7afe133990a2398d8493d8d1c1cc10ff3dfe476d57ba44f", size = 15552, upload-time = "2025-10-09T02:54:26.02Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/85/5ee1c8e35b33b9c8ea1816d5a4e119c27f8bb1539b73b1f636f07aa64750/types_six-1.17.0.20250515-py3-none-any.whl", hash = "sha256:adfaa9568caf35e03d80ffa4ed765c33b282579c869b40bf4b6009c7d8db3fb1", size = 19987, upload-time = "2025-05-15T03:04:18.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2f/94baa623421940e3eb5d2fc63570ebb046f2bb4d9573b8787edab3ed2526/types_six-1.17.0.20251009-py3-none-any.whl", hash = "sha256:2494f4c2a58ada0edfe01ea84b58468732e43394c572d9cf5b1dd06d86c487a3", size = 19935, upload-time = "2025-10-09T02:54:25.096Z" }, ] [[package]] name = "types-tensorflow" -version = "2.18.0.20250809" +version = "2.18.0.20251008" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "types-protobuf" }, { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/07/84/d350f0170a043283cd805344658522b00d769d04753b5a1685c1c8a06731/types_tensorflow-2.18.0.20250809.tar.gz", hash = "sha256:9ed54cbb24c8b12d8c59b9a8afbf7c5f2d46d5e2bf42d00ececaaa79e21d7ed1", size = 257495, upload-time = "2025-08-09T03:17:36.093Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/0a/13bde03fb5a23faaadcca2d6914f865e444334133902310ea05e6ade780c/types_tensorflow-2.18.0.20251008.tar.gz", hash = "sha256:8db03d4dd391a362e2ea796ffdbccb03c082127606d4d852edb7ed9504745933", size = 257550, upload-time = "2025-10-08T02:51:51.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/1c/cc50c17971643a92d5973d35a3d35f017f9d759d95fb7fdafa568a59ba9c/types_tensorflow-2.18.0.20250809-py3-none-any.whl", hash = "sha256:e9aae9da92ddb9991ebd27117db2c2dffe29d7d019db2a70166fd0d099c4fa4f", size = 329000, upload-time = "2025-08-09T03:17:35.02Z" }, + { url = "https://files.pythonhosted.org/packages/66/cc/e50e49db621b0cf03c1f3d10be47389de41a02dc9924c3a83a9c1a55bf28/types_tensorflow-2.18.0.20251008-py3-none-any.whl", hash = "sha256:d6b0dd4d81ac6d9c5af803ebcc8ce0f65c5850c063e8b9789dc828898944b5f4", size = 329023, upload-time = "2025-10-08T02:51:50.024Z" }, ] [[package]] @@ -6598,14 +6567,14 @@ wheels = [ [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] @@ -6747,11 +6716,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.5.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" }, ] [[package]] @@ -6765,15 +6734,15 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.35.0" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, ] [package.optional-dependencies] @@ -6845,7 +6814,7 @@ wheels = [ [[package]] name = "wandb" -version = "0.21.4" +version = "0.22.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -6859,17 +6828,17 @@ dependencies = [ { name = "sentry-sdk" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/59/a8/aaa3f3f8e410f34442466aac10b1891b3084d35b98aef59ebcb4c0efb941/wandb-0.21.4.tar.gz", hash = "sha256:b350d50973409658deb455010fafcfa81e6be3470232e316286319e839ffb67b", size = 40175929, upload-time = "2025-09-11T21:14:29.161Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/a8/680bd77e11a278e6c14a2cb4646e8ab9525b2baaa81c3d12dc0f616aa4aa/wandb-0.22.2.tar.gz", hash = "sha256:510f5a1ac30d16921c36c3b932da852f046641d4aee98a86a7f5ec03a6e95bda", size = 41401439, upload-time = "2025-10-07T19:54:21.88Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/6b/3a8d9db18a4c4568599a8792c0c8b1f422d9864c7123e8301a9477fbf0ac/wandb-0.21.4-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:c681ef7adb09925251d8d995c58aa76ae86a46dbf8de3b67353ad99fdef232d5", size = 18845369, upload-time = "2025-09-11T21:14:02.879Z" }, - { url = "https://files.pythonhosted.org/packages/60/e0/d7d6818938ec6958c93d979f9a90ea3d06bdc41e130b30f8cd89ae03c245/wandb-0.21.4-py3-none-macosx_12_0_arm64.whl", hash = "sha256:d35acc65c10bb7ac55d1331f7b1b8ab761f368f7b051131515f081a56ea5febc", size = 18339122, upload-time = "2025-09-11T21:14:06.455Z" }, - { url = "https://files.pythonhosted.org/packages/13/29/9bb8ed4adf32bed30e4d5df74d956dd1e93b6fd4bbc29dbe84167c84804b/wandb-0.21.4-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:765e66b57b7be5f393ecebd9a9d2c382c9f979d19cdee4a3f118eaafed43fca1", size = 19081975, upload-time = "2025-09-11T21:14:09.317Z" }, - { url = "https://files.pythonhosted.org/packages/30/6e/4aa33bc2c56b70c0116e73687c72c7a674f4072442633b3b23270d2215e3/wandb-0.21.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06127ec49245d12fdb3922c1eca1ab611cefc94adabeaaaba7b069707c516cba", size = 18161358, upload-time = "2025-09-11T21:14:12.092Z" }, - { url = "https://files.pythonhosted.org/packages/f7/56/d9f845ecfd5e078cf637cb29d8abe3350b8a174924c54086168783454a8f/wandb-0.21.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48d4f65f1be5f5a25b868695e09cdbfe481678220df349a8c2cbed3992fb497f", size = 19602680, upload-time = "2025-09-11T21:14:14.987Z" }, - { url = "https://files.pythonhosted.org/packages/68/ea/237a3c2b679a35e02e577c5bf844d6a221a7d32925ab8d5230529e9f2841/wandb-0.21.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ebd11f78351a3ca22caa1045146a6d2ad9e62fed6d0de2e67a0db5710d75103a", size = 18166392, upload-time = "2025-09-11T21:14:17.478Z" }, - { url = "https://files.pythonhosted.org/packages/12/e3/dbf2c575c79c99d94f16ce1a2cbbb2529d5029a76348c1ddac7e47f6873f/wandb-0.21.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:595b9e77591a805653e05db8b892805ee0a5317d147ef4976353e4f1cc16ebdc", size = 19678800, upload-time = "2025-09-11T21:14:20.264Z" }, - { url = "https://files.pythonhosted.org/packages/fa/eb/4ed04879d697772b8eb251c0e5af9a4ff7e2cc2b3fcd4b8eee91253ec2f1/wandb-0.21.4-py3-none-win32.whl", hash = "sha256:f9c86eb7eb7d40c6441533428188b1ae3205674e80c940792d850e2c1fe8d31e", size = 18738950, upload-time = "2025-09-11T21:14:23.08Z" }, - { url = "https://files.pythonhosted.org/packages/c3/4a/86c5e19600cb6a616a45f133c26826b46133499cd72d592772929d530ccd/wandb-0.21.4-py3-none-win_amd64.whl", hash = "sha256:2da3d5bb310a9f9fb7f680f4aef285348095a4cc6d1ce22b7343ba4e3fffcd84", size = 18738953, upload-time = "2025-09-11T21:14:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/e7/b3/8c637fb594cfd574ce9c9f7d0ac2f2d12742eb38ec59dcbb713beae95343/wandb-0.22.2-py3-none-macosx_12_0_arm64.whl", hash = "sha256:2e29c9fa4462b5411b2cd2175ae33eff4309c91de7c426bca6bc8e7abc7e5dec", size = 18677549, upload-time = "2025-10-07T19:54:00.839Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f3/e309a726eaebddad6b8d9a73a50891e5796962ec8a091bb6a61d31692d1e/wandb-0.22.2-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:c42d594cd7a9da4fd39ecdb0abbc081b61f304123277b2b6c4ba84283956fd21", size = 19715188, upload-time = "2025-10-07T19:54:03.805Z" }, + { url = "https://files.pythonhosted.org/packages/f9/73/fad59910215876008f4781b57d828d1b19b3677c9b46af615e7229746435/wandb-0.22.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5188d84e66d3fd584f3b3ae4d2a70e78f29403c0528e6aecaa4188a1fcf54d8", size = 18463148, upload-time = "2025-10-07T19:54:05.676Z" }, + { url = "https://files.pythonhosted.org/packages/87/11/572c1913b5b92e4c519f735adfae572b46f2d79d99ede63eec0d6a272d6e/wandb-0.22.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88ccd484af9f21cfc127976793c3cf66cfe1acd75bd8cd650086a64e88bac4bf", size = 19908645, upload-time = "2025-10-07T19:54:07.693Z" }, + { url = "https://files.pythonhosted.org/packages/6d/0d/133aa82f5a505ba638b4fda5014cefddfe7f1f6238ef4afc0871ec61c41f/wandb-0.22.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:abf0ed175e791af64110e0a0b99ce02bbbbd1017722bc32d3bc328efb86450cd", size = 18501348, upload-time = "2025-10-07T19:54:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d5/776203be2601872f01dacc6a5b4274106ec0db7cd3bf2cdb3b741f8fc932/wandb-0.22.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:44e77c56403b90bf3473a7ca3bfc4d42c636b7c0e31a5fb9cd0382f08302f74b", size = 20001756, upload-time = "2025-10-07T19:54:12.452Z" }, + { url = "https://files.pythonhosted.org/packages/30/43/ae3fa46e20b1d9a6508dd9abe716d57205c038ed4661c5c98ace48a60eac/wandb-0.22.2-py3-none-win32.whl", hash = "sha256:44d12bd379dbe15be5ceed6bdf23803d42f648ba0dd111297b4c47a3c7be6dbd", size = 19075950, upload-time = "2025-10-07T19:54:14.892Z" }, + { url = "https://files.pythonhosted.org/packages/09/59/c174321e868205f7a659d1e5ec51f546e62267296d6f4179bb9119294964/wandb-0.22.2-py3-none-win_amd64.whl", hash = "sha256:c95eb221bf316c0872f7ac55071856b9f25f95a2de983ada48acf653ce259386", size = 19075953, upload-time = "2025-10-07T19:54:16.837Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a2/c7c24fda78513cab5686949d8cb36459dbbccbbb4b2b6fc67237ece31a00/wandb-0.22.2-py3-none-win_arm64.whl", hash = "sha256:20d2ab9aa10445aab3d60914a980f002a4f66566e28b0cd156b1e462f0080a0d", size = 17383217, upload-time = "2025-10-07T19:54:19.384Z" }, ] [[package]] @@ -6915,11 +6884,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, ] [[package]] @@ -6948,16 +6917,20 @@ wheels = [ [[package]] name = "weaviate-client" -version = "3.24.2" +version = "4.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, - { name = "requests" }, + { name = "deprecation" }, + { name = "grpcio" }, + { name = "httpx" }, + { name = "protobuf" }, + { name = "pydantic" }, { name = "validators" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/c1/3285a21d8885f2b09aabb65edb9a8e062a35c2d7175e1bb024fa096582ab/weaviate-client-3.24.2.tar.gz", hash = "sha256:6914c48c9a7e5ad0be9399271f9cb85d6f59ab77476c6d4e56a3925bf149edaa", size = 199332, upload-time = "2023-10-04T08:37:54.26Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/0e/e4582b007427187a9fde55fa575db4b766c81929d2b43a3dd8becce50567/weaviate_client-4.17.0.tar.gz", hash = "sha256:731d58d84b0989df4db399b686357ed285fb95971a492ccca8dec90bb2343c51", size = 769019, upload-time = "2025-09-26T11:20:27.381Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/98/3136d05f93e30cf29e1db280eaadf766df18d812dfe7994bcced653b2340/weaviate_client-3.24.2-py3-none-any.whl", hash = "sha256:bc50ca5fcebcd48de0d00f66700b0cf7c31a97c4cd3d29b4036d77c5d1d9479b", size = 107968, upload-time = "2023-10-04T08:37:52.511Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c5/2da3a45866da7a935dab8ad07be05dcaee48b3ad4955144583b651929be7/weaviate_client-4.17.0-py3-none-any.whl", hash = "sha256:60e4a355b90537ee1e942ab0b76a94750897a13d9cf13c5a6decbd166d0ca8b5", size = 582763, upload-time = "2025-09-26T11:20:25.864Z" }, ] [[package]] @@ -6971,11 +6944,11 @@ wheels = [ [[package]] name = "websocket-client" -version = "1.8.0" +version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, ] [[package]] @@ -7084,20 +7057,20 @@ wheels = [ [[package]] name = "xlsxwriter" -version = "3.2.5" +version = "3.2.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/47/7704bac42ac6fe1710ae099b70e6a1e68ed173ef14792b647808c357da43/xlsxwriter-3.2.5.tar.gz", hash = "sha256:7e88469d607cdc920151c0ab3ce9cf1a83992d4b7bc730c5ffdd1a12115a7dbe", size = 213306, upload-time = "2025-06-17T08:59:14.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/2c/c06ef49dc36e7954e55b802a8b231770d286a9758b3d936bd1e04ce5ba88/xlsxwriter-3.2.9.tar.gz", hash = "sha256:254b1c37a368c444eac6e2f867405cc9e461b0ed97a3233b2ac1e574efb4140c", size = 215940, upload-time = "2025-09-16T00:16:21.63Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/34/a22e6664211f0c8879521328000bdcae9bf6dbafa94a923e531f6d5b3f73/xlsxwriter-3.2.5-py3-none-any.whl", hash = "sha256:4f4824234e1eaf9d95df9a8fe974585ff91d0f5e3d3f12ace5b71e443c1c6abd", size = 172347, upload-time = "2025-06-17T08:59:13.453Z" }, + { url = "https://files.pythonhosted.org/packages/3a/0c/3662f4a66880196a590b202f0db82d919dd2f89e99a27fadef91c4a33d41/xlsxwriter-3.2.9-py3-none-any.whl", hash = "sha256:9a5db42bc5dff014806c58a20b9eae7322a134abb6fce3c92c181bfb275ec5b3", size = 175315, upload-time = "2025-09-16T00:16:20.108Z" }, ] [[package]] name = "xmltodict" -version = "0.15.1" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/7a/42f705c672e77dc3ce85a6823bb289055323aac30de7c4b9eca1e28b2c17/xmltodict-0.15.1.tar.gz", hash = "sha256:3d8d49127f3ce6979d40a36dbcad96f8bab106d232d24b49efdd4bd21716983c", size = 62984, upload-time = "2025-09-08T18:33:19.349Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725, upload-time = "2025-09-17T21:59:26.459Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/4e/001c53a22f6bd5f383f49915a53e40f0cab2d3f1884d968f3ae14be367b7/xmltodict-0.15.1-py2.py3-none-any.whl", hash = "sha256:dcd84b52f30a15be5ac4c9099a0cb234df8758624b035411e329c5c1e7a49089", size = 11260, upload-time = "2025-09-08T18:33:17.87Z" }, + { url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893, upload-time = "2025-09-17T21:59:24.859Z" }, ] [[package]] @@ -7169,65 +7142,62 @@ wheels = [ [[package]] name = "zope-interface" -version = "8.0" +version = "8.0.1" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "setuptools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/21/a6af230243831459f7238764acb3086a9cf96dbf405d8084d30add1ee2e7/zope_interface-8.0.tar.gz", hash = "sha256:b14d5aac547e635af749ce20bf49a3f5f93b8a854d2a6b1e95d4d5e5dc618f7d", size = 253397, upload-time = "2025-09-12T07:17:13.571Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/3a/7fcf02178b8fad0a51e67e32765cd039ae505d054d744d76b8c2bbcba5ba/zope_interface-8.0.1.tar.gz", hash = "sha256:eba5610d042c3704a48222f7f7c6ab5b243ed26f917e2bc69379456b115e02d1", size = 253746, upload-time = "2025-09-25T05:55:51.285Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/6f/a16fc92b643313a55a0d2ccb040dd69048372f0a8f64107570256e664e5c/zope_interface-8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec1da7b9156ae000cea2d19bad83ddb5c50252f9d7b186da276d17768c67a3cb", size = 207652, upload-time = "2025-09-12T07:23:51.746Z" }, - { url = "https://files.pythonhosted.org/packages/01/0c/6bebd9417072c3eb6163228783cabb4890e738520b45562ade1cbf7d19d6/zope_interface-8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:160ba50022b342451baf516de3e3a2cd2d8c8dbac216803889a5eefa67083688", size = 208096, upload-time = "2025-09-12T07:23:52.895Z" }, - { url = "https://files.pythonhosted.org/packages/62/f1/03c4d2b70ce98828760dfc19f34be62526ea8b7f57160a009d338f396eb4/zope_interface-8.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:879bb5bf937cde4acd738264e87f03c7bf7d45478f7c8b9dc417182b13d81f6c", size = 254770, upload-time = "2025-09-12T07:58:18.379Z" }, - { url = "https://files.pythonhosted.org/packages/bb/73/06400c668d7d334d2296d23b3dacace43f45d6e721c6f6d08ea512703ede/zope_interface-8.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fb931bf55c66a092c5fbfb82a0ff3cc3221149b185bde36f0afc48acb8dcd92", size = 259542, upload-time = "2025-09-12T08:00:27.632Z" }, - { url = "https://files.pythonhosted.org/packages/d9/28/565b5f41045aa520853410d33b420f605018207a854fba3d93ed85e7bef2/zope_interface-8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1858d1e5bb2c5ae766890708184a603eb484bb7454e306e967932a9f3c558b07", size = 260720, upload-time = "2025-09-12T08:29:19.238Z" }, - { url = "https://files.pythonhosted.org/packages/c5/46/6c6b0df12665fec622133932a361829b6e6fbe255e6ce01768eedbcb7fa0/zope_interface-8.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e88c66ebedd1e839082f308b8372a50ef19423e01ee2e09600b80e765a10234", size = 211914, upload-time = "2025-09-12T07:23:19.858Z" }, - { url = "https://files.pythonhosted.org/packages/ae/42/9c79e4b2172e2584727cbc35bba1ea6884c15f1a77fe2b80ed8358893bb2/zope_interface-8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b80447a3a5c7347f4ebf3e50de319c8d2a5dabd7de32f20899ac50fc275b145d", size = 208359, upload-time = "2025-09-12T07:23:40.746Z" }, - { url = "https://files.pythonhosted.org/packages/d9/3a/77b5e3dbaced66141472faf788ea20e9b395076ea6fd30e2fde4597047b1/zope_interface-8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:67047a4470cb2fddb5ba5105b0160a1d1c30ce4b300cf264d0563136adac4eac", size = 208547, upload-time = "2025-09-12T07:23:42.088Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d3/a920b3787373e717384ef5db2cafaae70d451b8850b9b4808c024867dd06/zope_interface-8.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:1bee9c1b42513148f98d3918affd829804a5c992c000c290dc805f25a75a6a3f", size = 258986, upload-time = "2025-09-12T07:58:20.681Z" }, - { url = "https://files.pythonhosted.org/packages/4d/37/c7f5b1ccfcbb0b90d57d02b5744460e9f77a84932689ca8d99a842f330b2/zope_interface-8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:804ebacb2776eb89a57d9b5e9abec86930e0ee784a0005030801ae2f6c04d5d8", size = 264438, upload-time = "2025-09-12T08:00:28.921Z" }, - { url = "https://files.pythonhosted.org/packages/43/eb/fd6fefc92618bdf16fbfd71fb43ed206f99b8db5a0dd55797f4e33d7dd75/zope_interface-8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c4d9d3982aaa88b177812cd911ceaf5ffee4829e86ab3273c89428f2c0c32cc4", size = 263971, upload-time = "2025-09-12T08:29:20.693Z" }, - { url = "https://files.pythonhosted.org/packages/d9/ca/f99f4ef959b2541f0a3e05768d9ff48ad055d4bed00c7a438b088d54196a/zope_interface-8.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea1f2e47bc0124a03ee1e5fb31aee5dfde876244bcc552b9e3eb20b041b350d7", size = 212031, upload-time = "2025-09-12T07:23:04.755Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2f/c10c739bcb9b072090c97c2e08533777497190daa19d190d72b4cce9c7cb/zope_interface-8.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4bd01022d2e1bce4a4a4ed9549edb25393c92e607d7daa6deff843f1f68b479d", size = 207903, upload-time = "2025-09-25T05:58:21.671Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e1/9845ac3697f108d9a1af6912170c59a23732090bbfb35955fe77e5544955/zope_interface-8.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:29be8db8b712d94f1c05e24ea230a879271d787205ba1c9a6100d1d81f06c69a", size = 208345, upload-time = "2025-09-25T05:58:24.217Z" }, + { url = "https://files.pythonhosted.org/packages/f2/49/6573bc8b841cfab18e80c8e8259f1abdbbf716140011370de30231be79ad/zope_interface-8.0.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:51ae1b856565b30455b7879fdf0a56a88763b401d3f814fa9f9542d7410dbd7e", size = 255027, upload-time = "2025-09-25T05:58:19.975Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fd/908b0fd4b1ab6e412dfac9bd2b606f2893ef9ba3dd36d643f5e5b94c57b3/zope_interface-8.0.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d2e7596149cb1acd1d4d41b9f8fe2ffc0e9e29e2e91d026311814181d0d9efaf", size = 259800, upload-time = "2025-09-25T05:58:11.487Z" }, + { url = "https://files.pythonhosted.org/packages/dc/78/8419a2b4e88410520ed4b7f93bbd25a6d4ae66c4e2b131320f2b90f43077/zope_interface-8.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b2737c11c34fb9128816759864752d007ec4f987b571c934c30723ed881a7a4f", size = 260978, upload-time = "2025-09-25T06:26:24.483Z" }, + { url = "https://files.pythonhosted.org/packages/e5/90/caf68152c292f1810e2bd3acd2177badf08a740aa8a348714617d6c9ad0b/zope_interface-8.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:cf66e4bf731aa7e0ced855bb3670e8cda772f6515a475c6a107bad5cb6604103", size = 212155, upload-time = "2025-09-25T05:59:40.318Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/0f08713ddda834c428ebf97b2a7fd8dea50c0100065a8955924dbd94dae8/zope_interface-8.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:115f27c1cc95ce7a517d960ef381beedb0a7ce9489645e80b9ab3cbf8a78799c", size = 208609, upload-time = "2025-09-25T05:58:53.698Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/d423045f54dc81e0991ec655041e7a0eccf6b2642535839dd364b35f4d7f/zope_interface-8.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af655c573b84e3cb6a4f6fd3fbe04e4dc91c63c6b6f99019b3713ef964e589bc", size = 208797, upload-time = "2025-09-25T05:58:56.258Z" }, + { url = "https://files.pythonhosted.org/packages/c6/43/39d4bb3f7a80ebd261446792493cfa4e198badd47107224f5b6fe1997ad9/zope_interface-8.0.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:23f82ef9b2d5370750cc1bf883c3b94c33d098ce08557922a3fbc7ff3b63dfe1", size = 259242, upload-time = "2025-09-25T05:58:21.602Z" }, + { url = "https://files.pythonhosted.org/packages/da/29/49effcff64ef30731e35520a152a9dfcafec86cf114b4c2aff942e8264ba/zope_interface-8.0.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35a1565d5244997f2e629c5c68715b3d9d9036e8df23c4068b08d9316dcb2822", size = 264696, upload-time = "2025-09-25T05:58:13.351Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/b947673ec9a258eeaa20208dd2f6127d9fbb3e5071272a674ebe02063a78/zope_interface-8.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:029ea1db7e855a475bf88d9910baab4e94d007a054810e9007ac037a91c67c6f", size = 264229, upload-time = "2025-09-25T06:26:26.226Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ee/eed6efd1fc3788d1bef7a814e0592d8173b7fe601c699b935009df035fc2/zope_interface-8.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0beb3e7f7dc153944076fcaf717a935f68d39efa9fce96ec97bafcc0c2ea6cab", size = 212270, upload-time = "2025-09-25T05:58:53.584Z" }, ] [[package]] name = "zstandard" -version = "0.24.0" +version = "0.25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/1b/c20b2ef1d987627765dcd5bf1dadb8ef6564f00a87972635099bb76b7a05/zstandard-0.24.0.tar.gz", hash = "sha256:fe3198b81c00032326342d973e526803f183f97aa9e9a98e3f897ebafe21178f", size = 905681, upload-time = "2025-08-17T18:36:36.352Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/1f/5c72806f76043c0ef9191a2b65281dacdf3b65b0828eb13bb2c987c4fb90/zstandard-0.24.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:addfc23e3bd5f4b6787b9ca95b2d09a1a67ad5a3c318daaa783ff90b2d3a366e", size = 795228, upload-time = "2025-08-17T18:21:46.978Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ba/3059bd5cd834666a789251d14417621b5c61233bd46e7d9023ea8bc1043a/zstandard-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b005bcee4be9c3984b355336283afe77b2defa76ed6b89332eced7b6fa68b68", size = 640520, upload-time = "2025-08-17T18:21:48.162Z" }, - { url = "https://files.pythonhosted.org/packages/57/07/f0e632bf783f915c1fdd0bf68614c4764cae9dd46ba32cbae4dd659592c3/zstandard-0.24.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:3f96a9130171e01dbb6c3d4d9925d604e2131a97f540e223b88ba45daf56d6fb", size = 5347682, upload-time = "2025-08-17T18:21:50.266Z" }, - { url = "https://files.pythonhosted.org/packages/a6/4c/63523169fe84773a7462cd090b0989cb7c7a7f2a8b0a5fbf00009ba7d74d/zstandard-0.24.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd0d3d16e63873253bad22b413ec679cf6586e51b5772eb10733899832efec42", size = 5057650, upload-time = "2025-08-17T18:21:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/c6/16/49013f7ef80293f5cebf4c4229535a9f4c9416bbfd238560edc579815dbe/zstandard-0.24.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:b7a8c30d9bf4bd5e4dcfe26900bef0fcd9749acde45cdf0b3c89e2052fda9a13", size = 5404893, upload-time = "2025-08-17T18:21:54.54Z" }, - { url = "https://files.pythonhosted.org/packages/4d/38/78e8bcb5fc32a63b055f2b99e0be49b506f2351d0180173674f516cf8a7a/zstandard-0.24.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:52cd7d9fa0a115c9446abb79b06a47171b7d916c35c10e0c3aa6f01d57561382", size = 5452389, upload-time = "2025-08-17T18:21:56.822Z" }, - { url = "https://files.pythonhosted.org/packages/55/8a/81671f05619edbacd49bd84ce6899a09fc8299be20c09ae92f6618ccb92d/zstandard-0.24.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0f6fc2ea6e07e20df48752e7700e02e1892c61f9a6bfbacaf2c5b24d5ad504b", size = 5558888, upload-time = "2025-08-17T18:21:58.68Z" }, - { url = "https://files.pythonhosted.org/packages/49/cc/e83feb2d7d22d1f88434defbaeb6e5e91f42a4f607b5d4d2d58912b69d67/zstandard-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e46eb6702691b24ddb3e31e88b4a499e31506991db3d3724a85bd1c5fc3cfe4e", size = 5048038, upload-time = "2025-08-17T18:22:00.642Z" }, - { url = "https://files.pythonhosted.org/packages/08/c3/7a5c57ff49ef8943877f85c23368c104c2aea510abb339a2dc31ad0a27c3/zstandard-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5e3b9310fd7f0d12edc75532cd9a56da6293840c84da90070d692e0bb15f186", size = 5573833, upload-time = "2025-08-17T18:22:02.402Z" }, - { url = "https://files.pythonhosted.org/packages/f9/00/64519983cd92535ba4bdd4ac26ac52db00040a52d6c4efb8d1764abcc343/zstandard-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76cdfe7f920738ea871f035568f82bad3328cbc8d98f1f6988264096b5264efd", size = 4961072, upload-time = "2025-08-17T18:22:04.384Z" }, - { url = "https://files.pythonhosted.org/packages/72/ab/3a08a43067387d22994fc87c3113636aa34ccd2914a4d2d188ce365c5d85/zstandard-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3f2fe35ec84908dddf0fbf66b35d7c2878dbe349552dd52e005c755d3493d61c", size = 5268462, upload-time = "2025-08-17T18:22:06.095Z" }, - { url = "https://files.pythonhosted.org/packages/49/cf/2abb3a1ad85aebe18c53e7eca73223f1546ddfa3bf4d2fb83fc5a064c5ca/zstandard-0.24.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:aa705beb74ab116563f4ce784fa94771f230c05d09ab5de9c397793e725bb1db", size = 5443319, upload-time = "2025-08-17T18:22:08.572Z" }, - { url = "https://files.pythonhosted.org/packages/40/42/0dd59fc2f68f1664cda11c3b26abdf987f4e57cb6b6b0f329520cd074552/zstandard-0.24.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:aadf32c389bb7f02b8ec5c243c38302b92c006da565e120dfcb7bf0378f4f848", size = 5822355, upload-time = "2025-08-17T18:22:10.537Z" }, - { url = "https://files.pythonhosted.org/packages/99/c0/ea4e640fd4f7d58d6f87a1e7aca11fb886ac24db277fbbb879336c912f63/zstandard-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e40cd0fc734aa1d4bd0e7ad102fd2a1aefa50ce9ef570005ffc2273c5442ddc3", size = 5365257, upload-time = "2025-08-17T18:22:13.159Z" }, - { url = "https://files.pythonhosted.org/packages/27/a9/92da42a5c4e7e4003271f2e1f0efd1f37cfd565d763ad3604e9597980a1c/zstandard-0.24.0-cp311-cp311-win32.whl", hash = "sha256:cda61c46343809ecda43dc620d1333dd7433a25d0a252f2dcc7667f6331c7b61", size = 435559, upload-time = "2025-08-17T18:22:17.29Z" }, - { url = "https://files.pythonhosted.org/packages/e2/8e/2c8e5c681ae4937c007938f954a060fa7c74f36273b289cabdb5ef0e9a7e/zstandard-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:3b95fc06489aa9388400d1aab01a83652bc040c9c087bd732eb214909d7fb0dd", size = 505070, upload-time = "2025-08-17T18:22:14.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/10/a2f27a66bec75e236b575c9f7b0d7d37004a03aa2dcde8e2decbe9ed7b4d/zstandard-0.24.0-cp311-cp311-win_arm64.whl", hash = "sha256:ad9fd176ff6800a0cf52bcf59c71e5de4fa25bf3ba62b58800e0f84885344d34", size = 461507, upload-time = "2025-08-17T18:22:15.964Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/0bd281d9154bba7fc421a291e263911e1d69d6951aa80955b992a48289f6/zstandard-0.24.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a2bda8f2790add22773ee7a4e43c90ea05598bffc94c21c40ae0a9000b0133c3", size = 795710, upload-time = "2025-08-17T18:22:19.189Z" }, - { url = "https://files.pythonhosted.org/packages/36/26/b250a2eef515caf492e2d86732e75240cdac9d92b04383722b9753590c36/zstandard-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cc76de75300f65b8eb574d855c12518dc25a075dadb41dd18f6322bda3fe15d5", size = 640336, upload-time = "2025-08-17T18:22:20.466Z" }, - { url = "https://files.pythonhosted.org/packages/79/bf/3ba6b522306d9bf097aac8547556b98a4f753dc807a170becaf30dcd6f01/zstandard-0.24.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d2b3b4bda1a025b10fe0269369475f420177f2cb06e0f9d32c95b4873c9f80b8", size = 5342533, upload-time = "2025-08-17T18:22:22.326Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ec/22bc75bf054e25accdf8e928bc68ab36b4466809729c554ff3a1c1c8bce6/zstandard-0.24.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b84c6c210684286e504022d11ec294d2b7922d66c823e87575d8b23eba7c81f", size = 5062837, upload-time = "2025-08-17T18:22:24.416Z" }, - { url = "https://files.pythonhosted.org/packages/48/cc/33edfc9d286e517fb5b51d9c3210e5bcfce578d02a675f994308ca587ae1/zstandard-0.24.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c59740682a686bf835a1a4d8d0ed1eefe31ac07f1c5a7ed5f2e72cf577692b00", size = 5393855, upload-time = "2025-08-17T18:22:26.786Z" }, - { url = "https://files.pythonhosted.org/packages/73/36/59254e9b29da6215fb3a717812bf87192d89f190f23817d88cb8868c47ac/zstandard-0.24.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6324fde5cf5120fbf6541d5ff3c86011ec056e8d0f915d8e7822926a5377193a", size = 5451058, upload-time = "2025-08-17T18:22:28.885Z" }, - { url = "https://files.pythonhosted.org/packages/9a/c7/31674cb2168b741bbbe71ce37dd397c9c671e73349d88ad3bca9e9fae25b/zstandard-0.24.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:51a86bd963de3f36688553926a84e550d45d7f9745bd1947d79472eca27fcc75", size = 5546619, upload-time = "2025-08-17T18:22:31.115Z" }, - { url = "https://files.pythonhosted.org/packages/e6/01/1a9f22239f08c00c156f2266db857545ece66a6fc0303d45c298564bc20b/zstandard-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d82ac87017b734f2fb70ff93818c66f0ad2c3810f61040f077ed38d924e19980", size = 5046676, upload-time = "2025-08-17T18:22:33.077Z" }, - { url = "https://files.pythonhosted.org/packages/a7/91/6c0cf8fa143a4988a0361380ac2ef0d7cb98a374704b389fbc38b5891712/zstandard-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92ea7855d5bcfb386c34557516c73753435fb2d4a014e2c9343b5f5ba148b5d8", size = 5576381, upload-time = "2025-08-17T18:22:35.391Z" }, - { url = "https://files.pythonhosted.org/packages/e2/77/1526080e22e78871e786ccf3c84bf5cec9ed25110a9585507d3c551da3d6/zstandard-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3adb4b5414febf074800d264ddf69ecade8c658837a83a19e8ab820e924c9933", size = 4953403, upload-time = "2025-08-17T18:22:37.266Z" }, - { url = "https://files.pythonhosted.org/packages/6e/d0/a3a833930bff01eab697eb8abeafb0ab068438771fa066558d96d7dafbf9/zstandard-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6374feaf347e6b83ec13cc5dcfa70076f06d8f7ecd46cc71d58fac798ff08b76", size = 5267396, upload-time = "2025-08-17T18:22:39.757Z" }, - { url = "https://files.pythonhosted.org/packages/f3/5e/90a0db9a61cd4769c06374297ecfcbbf66654f74cec89392519deba64d76/zstandard-0.24.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:13fc548e214df08d896ee5f29e1f91ee35db14f733fef8eabea8dca6e451d1e2", size = 5433269, upload-time = "2025-08-17T18:22:42.131Z" }, - { url = "https://files.pythonhosted.org/packages/ce/58/fc6a71060dd67c26a9c5566e0d7c99248cbe5abfda6b3b65b8f1a28d59f7/zstandard-0.24.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0a416814608610abf5488889c74e43ffa0343ca6cf43957c6b6ec526212422da", size = 5814203, upload-time = "2025-08-17T18:22:44.017Z" }, - { url = "https://files.pythonhosted.org/packages/5c/6a/89573d4393e3ecbfa425d9a4e391027f58d7810dec5cdb13a26e4cdeef5c/zstandard-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0d66da2649bb0af4471699aeb7a83d6f59ae30236fb9f6b5d20fb618ef6c6777", size = 5359622, upload-time = "2025-08-17T18:22:45.802Z" }, - { url = "https://files.pythonhosted.org/packages/60/ff/2cbab815d6f02a53a9d8d8703bc727d8408a2e508143ca9af6c3cca2054b/zstandard-0.24.0-cp312-cp312-win32.whl", hash = "sha256:ff19efaa33e7f136fe95f9bbcc90ab7fb60648453b03f95d1de3ab6997de0f32", size = 435968, upload-time = "2025-08-17T18:22:49.493Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a3/8f96b8ddb7ad12344218fbd0fd2805702dafd126ae9f8a1fb91eef7b33da/zstandard-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc05f8a875eb651d1cc62e12a4a0e6afa5cd0cc231381adb830d2e9c196ea895", size = 505195, upload-time = "2025-08-17T18:22:47.193Z" }, - { url = "https://files.pythonhosted.org/packages/a3/4a/bfca20679da63bfc236634ef2e4b1b4254203098b0170e3511fee781351f/zstandard-0.24.0-cp312-cp312-win_arm64.whl", hash = "sha256:b04c94718f7a8ed7cdd01b162b6caa1954b3c9d486f00ecbbd300f149d2b2606", size = 461605, upload-time = "2025-08-17T18:22:48.317Z" }, + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, ] diff --git a/dev/basedpyright-check b/dev/basedpyright-check index ef58ed1f57..1c87b27d6f 100755 --- a/dev/basedpyright-check +++ b/dev/basedpyright-check @@ -10,7 +10,7 @@ PATH_TO_CHECK="$1" # run basedpyright checks if [ -n "$PATH_TO_CHECK" ]; then - uv run --directory api --dev basedpyright "$PATH_TO_CHECK" + uv run --directory api --dev -- basedpyright --threads $(nproc) "$PATH_TO_CHECK" else - uv run --directory api --dev basedpyright + uv run --directory api --dev -- basedpyright --threads $(nproc) fi diff --git a/dev/pytest/pytest_artifacts.sh b/dev/pytest/pytest_artifacts.sh index 3086ef5cc4..29cacdcc07 100755 --- a/dev/pytest/pytest_artifacts.sh +++ b/dev/pytest/pytest_artifacts.sh @@ -4,4 +4,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/artifact_tests/ +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/artifact_tests/ diff --git a/dev/pytest/pytest_model_runtime.sh b/dev/pytest/pytest_model_runtime.sh index 2cbbbbfd81..fd68dbe697 100755 --- a/dev/pytest/pytest_model_runtime.sh +++ b/dev/pytest/pytest_model_runtime.sh @@ -4,7 +4,9 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/integration_tests/model_runtime/anthropic \ +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-180}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/model_runtime/anthropic \ api/tests/integration_tests/model_runtime/azure_openai \ api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm \ api/tests/integration_tests/model_runtime/google api/tests/integration_tests/model_runtime/xinference \ diff --git a/dev/pytest/pytest_testcontainers.sh b/dev/pytest/pytest_testcontainers.sh index e55a436138..f92f8821bf 100755 --- a/dev/pytest/pytest_testcontainers.sh +++ b/dev/pytest/pytest_testcontainers.sh @@ -4,4 +4,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/test_containers_integration_tests +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/test_containers_integration_tests diff --git a/dev/pytest/pytest_tools.sh b/dev/pytest/pytest_tools.sh index d10934626f..989784f078 100755 --- a/dev/pytest/pytest_tools.sh +++ b/dev/pytest/pytest_tools.sh @@ -4,4 +4,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/integration_tests/tools +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/tools diff --git a/dev/pytest/pytest_unit_tests.sh b/dev/pytest/pytest_unit_tests.sh index 1a1819ca28..496cb40952 100755 --- a/dev/pytest/pytest_unit_tests.sh +++ b/dev/pytest/pytest_unit_tests.sh @@ -4,5 +4,7 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-20}" + # libs -pytest api/tests/unit_tests +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/unit_tests diff --git a/dev/pytest/pytest_vdb.sh b/dev/pytest/pytest_vdb.sh index 7f617a9c05..3c11a079cc 100755 --- a/dev/pytest/pytest_vdb.sh +++ b/dev/pytest/pytest_vdb.sh @@ -4,7 +4,9 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/integration_tests/vdb/chroma \ +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-180}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/vdb/chroma \ api/tests/integration_tests/vdb/milvus \ api/tests/integration_tests/vdb/pgvecto_rs \ api/tests/integration_tests/vdb/pgvector \ diff --git a/dev/pytest/pytest_workflow.sh b/dev/pytest/pytest_workflow.sh index b63d49069f..941c8d3e7e 100755 --- a/dev/pytest/pytest_workflow.sh +++ b/dev/pytest/pytest_workflow.sh @@ -4,4 +4,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/integration_tests/workflow +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/workflow diff --git a/dev/start-worker b/dev/start-worker index a2af04c01c..83d7bf0f3c 100755 --- a/dev/start-worker +++ b/dev/start-worker @@ -5,7 +5,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/.." - uv --directory api run \ - celery -A app.celery worker \ - -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation + celery -A app.celery worker \ + -P gevent -c 1 --loglevel INFO -Q dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline diff --git a/docker/.env.example b/docker/.env.example index c0f084796e..7af92248dc 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -45,7 +45,7 @@ APP_WEB_URL= # Recommendation: use a dedicated domain (e.g., https://upload.example.com). # Alternatively, use http://:5001 or http://api:5001, # ensuring port 5001 is externally accessible (see docker-compose.yaml). -FILES_URL=http://api:5001 +FILES_URL= # INTERNAL_FILES_URL is used for plugin daemon communication within Docker network. # Set this to the internal Docker service URL for proper plugin file access. @@ -201,6 +201,10 @@ ENABLE_WEBSITE_JINAREADER=true ENABLE_WEBSITE_FIRECRAWL=true ENABLE_WEBSITE_WATERCRAWL=true +# Enable inline LaTeX rendering with single dollar signs ($...$) in the web frontend +# Default is false for security reasons to prevent conflicts with regular text +NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false + # ------------------------------ # Database Configuration # The database uses PostgreSQL. Please use the public schema. @@ -259,6 +263,20 @@ POSTGRES_MAINTENANCE_WORK_MEM=64MB # Reference: https://www.postgresql.org/docs/current/runtime-config-query.html#GUC-EFFECTIVE-CACHE-SIZE POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB +# Sets the maximum allowed duration of any statement before termination. +# Default is 0 (no timeout). +# +# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-STATEMENT-TIMEOUT +# A value of 0 prevents the server from timing out statements. +POSTGRES_STATEMENT_TIMEOUT=0 + +# Sets the maximum allowed duration of any idle in-transaction session before termination. +# Default is 0 (no timeout). +# +# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-IDLE-IN-TRANSACTION-SESSION-TIMEOUT +# A value of 0 prevents the server from terminating idle sessions. +POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0 + # ------------------------------ # Redis Configuration # This Redis configuration is used for caching and for pub/sub during conversation. @@ -302,7 +320,7 @@ REDIS_CLUSTERS_PASSWORD= # Celery Configuration # ------------------------------ -# Use standalone redis as the broker, and redis db 1 for celery broker. (redis_username is usually set by defualt as empty) +# Use standalone redis as the broker, and redis db 1 for celery broker. (redis_username is usually set by default as empty) # Format as follows: `redis://:@:/`. # Example: redis://:difyai123456@redis:6379/1 # If use Redis Sentinel, format as follows: `sentinel://:@:/` @@ -330,6 +348,11 @@ WEB_API_CORS_ALLOW_ORIGINS=* # Specifies the allowed origins for cross-origin requests to the console API, # e.g. https://cloud.dify.ai or * for all origins. CONSOLE_CORS_ALLOW_ORIGINS=* +# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains. +# Provide the registrable domain (e.g. example.com); leading dots are optional. +COOKIE_DOMAIN= +# The frontend reads NEXT_PUBLIC_COOKIE_DOMAIN to align cookie handling with the API. +NEXT_PUBLIC_COOKIE_DOMAIN= # ------------------------------ # File Storage Configuration @@ -449,7 +472,7 @@ SUPABASE_URL=your-server-url # ------------------------------ # The type of vector store to use. -# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`, `clickzetta`. +# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`, `clickzetta`, `alibabacloud_mysql`. VECTOR_STORE=weaviate # Prefix used to create collection name in vector database VECTOR_INDEX_NAME_PREFIX=Vector_index @@ -580,6 +603,15 @@ ORACLE_WALLET_LOCATION=/app/api/storage/wallet ORACLE_WALLET_PASSWORD=dify ORACLE_IS_AUTONOMOUS=false +# AlibabaCloud MySQL configuration, only available when VECTOR_STORE is `alibabcloud_mysql` +ALIBABACLOUD_MYSQL_HOST=127.0.0.1 +ALIBABACLOUD_MYSQL_PORT=3306 +ALIBABACLOUD_MYSQL_USER=root +ALIBABACLOUD_MYSQL_PASSWORD=difyai123456 +ALIBABACLOUD_MYSQL_DATABASE=dify +ALIBABACLOUD_MYSQL_MAX_CONNECTION=5 +ALIBABACLOUD_MYSQL_HNSW_M=6 + # relyt configurations, only available when VECTOR_STORE is `relyt` RELYT_HOST=db RELYT_PORT=5432 @@ -867,14 +899,14 @@ CODE_MAX_NUMBER=9223372036854775807 CODE_MIN_NUMBER=-9223372036854775808 CODE_MAX_DEPTH=5 CODE_MAX_PRECISION=20 -CODE_MAX_STRING_LENGTH=80000 +CODE_MAX_STRING_LENGTH=400000 CODE_MAX_STRING_ARRAY_LENGTH=30 CODE_MAX_OBJECT_ARRAY_LENGTH=30 CODE_MAX_NUMBER_ARRAY_LENGTH=1000 CODE_EXECUTION_CONNECT_TIMEOUT=10 CODE_EXECUTION_READ_TIMEOUT=60 CODE_EXECUTION_WRITE_TIMEOUT=10 -TEMPLATE_TRANSFORM_MAX_LENGTH=80000 +TEMPLATE_TRANSFORM_MAX_LENGTH=400000 # Workflow runtime configuration WORKFLOW_MAX_EXECUTION_STEPS=500 @@ -930,6 +962,16 @@ WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100 HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 HTTP_REQUEST_NODE_SSL_VERIFY=True + +# HTTP request node timeout configuration +# Maximum timeout values (in seconds) that users can set in HTTP request nodes +# - Connect timeout: Time to wait for establishing connection (default: 10s) +# - Read timeout: Time to wait for receiving response data (default: 600s, 10 minutes) +# - Write timeout: Time to wait for sending request data (default: 600s, 10 minutes) +HTTP_REQUEST_MAX_CONNECT_TIMEOUT=10 +HTTP_REQUEST_MAX_READ_TIMEOUT=600 +HTTP_REQUEST_MAX_WRITE_TIMEOUT=600 + # Base64 encoded CA certificate data for custom certificate verification (PEM format, optional) # HTTP_REQUEST_NODE_SSL_CERT_DATA=LS0tLS1CRUdJTi... # Base64 encoded client certificate data for mutual TLS authentication (PEM format, optional) @@ -1213,6 +1255,7 @@ MARKETPLACE_ENABLED=true MARKETPLACE_API_URL=https://marketplace.dify.ai FORCE_VERIFYING_SIGNATURE=true +ENFORCE_LANGGENIUS_PLUGIN_SIGNATURES=true PLUGIN_STDIO_BUFFER_SIZE=1024 PLUGIN_STDIO_MAX_BUFFER_SIZE=5242880 diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 685fc325d0..fd63f5f00c 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.2 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.2 restart: always environment: # Use the shared environment variables. @@ -58,7 +58,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.2 restart: always environment: # Use the shared environment variables. @@ -76,11 +76,12 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.9.0 + image: langgenius/dify-web:1.9.2 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} APP_API_URL: ${APP_API_URL:-} + NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-} SENTRY_DSN: ${WEB_SENTRY_DSN:-} NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0} TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} @@ -115,6 +116,8 @@ services: -c 'work_mem=${POSTGRES_WORK_MEM:-4MB}' -c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}' -c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}' + -c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-0}' + -c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}' volumes: - ./volumes/db/data:/var/lib/postgresql/data healthcheck: @@ -177,7 +180,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.3.0-local + image: langgenius/dify-plugin-daemon:0.3.3-local restart: always environment: # Use the shared environment variables. @@ -329,9 +332,8 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - - "" - weaviate restart: always volumes: diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index d350503f27..0497e9d1f6 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -15,6 +15,8 @@ services: -c 'work_mem=${POSTGRES_WORK_MEM:-4MB}' -c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}' -c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}' + -c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-0}' + -c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}' volumes: - ${PGDATA_HOST_VOLUME:-./volumes/db/data}:/var/lib/postgresql/data ports: @@ -85,7 +87,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.3.0-local + image: langgenius/dify-plugin-daemon:0.3.3-local restart: always env_file: - ./middleware.env @@ -181,7 +183,7 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - "" - weaviate @@ -206,6 +208,7 @@ services: AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai} ports: - "${EXPOSE_WEAVIATE_PORT:-8080}:8080" + - "${EXPOSE_WEAVIATE_GRPC_PORT:-50051}:50051" networks: # create a network between sandbox, api and ssrf_proxy, and can not access outside. diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 2617f84e7d..34dfc19032 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -10,7 +10,7 @@ x-shared-env: &shared-api-worker-env SERVICE_API_URL: ${SERVICE_API_URL:-} APP_API_URL: ${APP_API_URL:-} APP_WEB_URL: ${APP_WEB_URL:-} - FILES_URL: ${FILES_URL:-http://api:5001} + FILES_URL: ${FILES_URL:-} INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-} LANG: ${LANG:-en_US.UTF-8} LC_ALL: ${LC_ALL:-en_US.UTF-8} @@ -51,6 +51,7 @@ x-shared-env: &shared-api-worker-env ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true} ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true} ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true} + NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: ${NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX:-false} DB_USERNAME: ${DB_USERNAME:-postgres} DB_PASSWORD: ${DB_PASSWORD:-difyai123456} DB_HOST: ${DB_HOST:-db} @@ -68,6 +69,8 @@ x-shared-env: &shared-api-worker-env POSTGRES_WORK_MEM: ${POSTGRES_WORK_MEM:-4MB} POSTGRES_MAINTENANCE_WORK_MEM: ${POSTGRES_MAINTENANCE_WORK_MEM:-64MB} POSTGRES_EFFECTIVE_CACHE_SIZE: ${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB} + POSTGRES_STATEMENT_TIMEOUT: ${POSTGRES_STATEMENT_TIMEOUT:-0} + POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: ${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0} REDIS_HOST: ${REDIS_HOST:-redis} REDIS_PORT: ${REDIS_PORT:-6379} REDIS_USERNAME: ${REDIS_USERNAME:-} @@ -96,6 +99,8 @@ x-shared-env: &shared-api-worker-env CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1} WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*} CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*} + COOKIE_DOMAIN: ${COOKIE_DOMAIN:-} + NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-} STORAGE_TYPE: ${STORAGE_TYPE:-opendal} OPENDAL_SCHEME: ${OPENDAL_SCHEME:-fs} OPENDAL_FS_ROOT: ${OPENDAL_FS_ROOT:-storage} @@ -244,6 +249,13 @@ x-shared-env: &shared-api-worker-env ORACLE_WALLET_LOCATION: ${ORACLE_WALLET_LOCATION:-/app/api/storage/wallet} ORACLE_WALLET_PASSWORD: ${ORACLE_WALLET_PASSWORD:-dify} ORACLE_IS_AUTONOMOUS: ${ORACLE_IS_AUTONOMOUS:-false} + ALIBABACLOUD_MYSQL_HOST: ${ALIBABACLOUD_MYSQL_HOST:-127.0.0.1} + ALIBABACLOUD_MYSQL_PORT: ${ALIBABACLOUD_MYSQL_PORT:-3306} + ALIBABACLOUD_MYSQL_USER: ${ALIBABACLOUD_MYSQL_USER:-root} + ALIBABACLOUD_MYSQL_PASSWORD: ${ALIBABACLOUD_MYSQL_PASSWORD:-difyai123456} + ALIBABACLOUD_MYSQL_DATABASE: ${ALIBABACLOUD_MYSQL_DATABASE:-dify} + ALIBABACLOUD_MYSQL_MAX_CONNECTION: ${ALIBABACLOUD_MYSQL_MAX_CONNECTION:-5} + ALIBABACLOUD_MYSQL_HNSW_M: ${ALIBABACLOUD_MYSQL_HNSW_M:-6} RELYT_HOST: ${RELYT_HOST:-db} RELYT_PORT: ${RELYT_PORT:-5432} RELYT_USER: ${RELYT_USER:-postgres} @@ -390,14 +402,14 @@ x-shared-env: &shared-api-worker-env CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808} CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5} CODE_MAX_PRECISION: ${CODE_MAX_PRECISION:-20} - CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-80000} + CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-400000} CODE_MAX_STRING_ARRAY_LENGTH: ${CODE_MAX_STRING_ARRAY_LENGTH:-30} CODE_MAX_OBJECT_ARRAY_LENGTH: ${CODE_MAX_OBJECT_ARRAY_LENGTH:-30} CODE_MAX_NUMBER_ARRAY_LENGTH: ${CODE_MAX_NUMBER_ARRAY_LENGTH:-1000} CODE_EXECUTION_CONNECT_TIMEOUT: ${CODE_EXECUTION_CONNECT_TIMEOUT:-10} CODE_EXECUTION_READ_TIMEOUT: ${CODE_EXECUTION_READ_TIMEOUT:-60} CODE_EXECUTION_WRITE_TIMEOUT: ${CODE_EXECUTION_WRITE_TIMEOUT:-10} - TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-80000} + TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-400000} WORKFLOW_MAX_EXECUTION_STEPS: ${WORKFLOW_MAX_EXECUTION_STEPS:-500} WORKFLOW_MAX_EXECUTION_TIME: ${WORKFLOW_MAX_EXECUTION_TIME:-1200} WORKFLOW_CALL_MAX_DEPTH: ${WORKFLOW_CALL_MAX_DEPTH:-5} @@ -418,6 +430,9 @@ x-shared-env: &shared-api-worker-env HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True} + HTTP_REQUEST_MAX_CONNECT_TIMEOUT: ${HTTP_REQUEST_MAX_CONNECT_TIMEOUT:-10} + HTTP_REQUEST_MAX_READ_TIMEOUT: ${HTTP_REQUEST_MAX_READ_TIMEOUT:-600} + HTTP_REQUEST_MAX_WRITE_TIMEOUT: ${HTTP_REQUEST_MAX_WRITE_TIMEOUT:-600} RESPECT_XFORWARD_HEADERS_ENABLED: ${RESPECT_XFORWARD_HEADERS_ENABLED:-false} SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128} SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128} @@ -532,6 +547,7 @@ x-shared-env: &shared-api-worker-env MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-true} MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} + ENFORCE_LANGGENIUS_PLUGIN_SIGNATURES: ${ENFORCE_LANGGENIUS_PLUGIN_SIGNATURES:-true} PLUGIN_STDIO_BUFFER_SIZE: ${PLUGIN_STDIO_BUFFER_SIZE:-1024} PLUGIN_STDIO_MAX_BUFFER_SIZE: ${PLUGIN_STDIO_MAX_BUFFER_SIZE:-5242880} PLUGIN_PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} @@ -599,7 +615,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.2 restart: always environment: # Use the shared environment variables. @@ -628,7 +644,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.2 restart: always environment: # Use the shared environment variables. @@ -655,7 +671,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.2 restart: always environment: # Use the shared environment variables. @@ -673,11 +689,12 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.9.0 + image: langgenius/dify-web:1.9.2 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} APP_API_URL: ${APP_API_URL:-} + NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-} SENTRY_DSN: ${WEB_SENTRY_DSN:-} NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0} TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} @@ -712,6 +729,8 @@ services: -c 'work_mem=${POSTGRES_WORK_MEM:-4MB}' -c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}' -c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}' + -c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-0}' + -c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}' volumes: - ./volumes/db/data:/var/lib/postgresql/data healthcheck: @@ -774,7 +793,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.3.0-local + image: langgenius/dify-plugin-daemon:0.3.3-local restart: always environment: # Use the shared environment variables. @@ -926,9 +945,8 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - - "" - weaviate restart: always volumes: diff --git a/docker/middleware.env.example b/docker/middleware.env.example index 2eba62f594..24629c2d89 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -40,6 +40,20 @@ POSTGRES_MAINTENANCE_WORK_MEM=64MB # Reference: https://www.postgresql.org/docs/current/runtime-config-query.html#GUC-EFFECTIVE-CACHE-SIZE POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB +# Sets the maximum allowed duration of any statement before termination. +# Default is 0 (no timeout). +# +# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-STATEMENT-TIMEOUT +# A value of 0 prevents the server from timing out statements. +POSTGRES_STATEMENT_TIMEOUT=0 + +# Sets the maximum allowed duration of any idle in-transaction session before termination. +# Default is 0 (no timeout). +# +# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-IDLE-IN-TRANSACTION-SESSION-TIMEOUT +# A value of 0 prevents the server from terminating idle sessions. +POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0 + # ----------------------------- # Environment Variables for redis Service # ----------------------------- diff --git a/README/README_AR.md b/docs/ar-SA/README.md similarity index 79% rename from README/README_AR.md rename to docs/ar-SA/README.md index df29db73da..30920ed983 100644 --- a/README/README_AR.md +++ b/docs/ar-SA/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

@@ -97,7 +99,7 @@
-أسهل طريقة لبدء تشغيل خادم Dify هي تشغيل ملف [docker-compose.yml](docker/docker-compose.yaml) الخاص بنا. قبل تشغيل أمر التثبيت، تأكد من تثبيت [Docker](https://docs.docker.com/get-docker/) و [Docker Compose](https://docs.docker.com/compose/install/) على جهازك: +أسهل طريقة لبدء تشغيل خادم Dify هي تشغيل ملف [docker-compose.yml](../../docker/docker-compose.yaml) الخاص بنا. قبل تشغيل أمر التثبيت، تأكد من تثبيت [Docker](https://docs.docker.com/get-docker/) و [Docker Compose](https://docs.docker.com/compose/install/) على جهازك: ```bash cd docker @@ -111,7 +113,15 @@ docker compose up -d ## الخطوات التالية -إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments). +إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](../../docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments). + +### مراقبة المقاييس باستخدام Grafana + +استيراد لوحة التحكم إلى Grafana، باستخدام قاعدة بيانات PostgreSQL الخاصة بـ Dify كمصدر للبيانات، لمراقبة المقاييس بدقة للتطبيقات والمستأجرين والرسائل وغير ذلك. + +- [لوحة تحكم Grafana بواسطة @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### النشر باستخدام Kubernetes يوجد مجتمع خاص بـ [Helm Charts](https://helm.sh/) وملفات YAML التي تسمح بتنفيذ Dify على Kubernetes للنظام من الإيجابيات العلوية. @@ -185,12 +195,4 @@ docker compose up -d ## الرخصة -هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. - -## الكشف عن الأمان - -لحماية خصوصيتك، يرجى تجنب نشر مشكلات الأمان على GitHub. بدلاً من ذلك، أرسل أسئلتك إلى وسنقدم لك إجابة أكثر تفصيلاً. - -## الرخصة - -هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. +هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. diff --git a/README/README_BN.md b/docs/bn-BD/README.md similarity index 82% rename from README/README_BN.md rename to docs/bn-BD/README.md index b0a64a6cfe..5430364ef9 100644 --- a/README/README_BN.md +++ b/docs/bn-BD/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 ডিফাই ওয়ার্কফ্লো ফাইল আপলোড পরিচিতি: গুগল নোটবুক-এলএম পডকাস্ট পুনর্নির্মাণ @@ -39,18 +39,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

ডিফাই একটি ওপেন-সোর্স LLM অ্যাপ ডেভেলপমেন্ট প্ল্যাটফর্ম। এটি ইন্টুইটিভ ইন্টারফেস, এজেন্টিক AI ওয়ার্কফ্লো, RAG পাইপলাইন, এজেন্ট ক্যাপাবিলিটি, মডেল ম্যানেজমেন্ট, মনিটরিং সুবিধা এবং আরও অনেক কিছু একত্রিত করে, যা দ্রুত প্রোটোটাইপ থেকে প্রোডাকশন পর্যন্ত নিয়ে যেতে সহায়তা করে। @@ -64,7 +65,7 @@
-ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে : +ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](../../docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে : ```bash cd dify @@ -128,9 +129,17 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## Advanced Setup -যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। +যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](../../docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। যেকোনো পরিবর্তন করার পর, অনুগ্রহ করে `docker-compose up -d` পুনরায় চালান। ভেরিয়েবলের সম্পূর্ণ তালিকা [এখানে] (https://docs.dify.ai/getting-started/install-self-hosted/environments) খুঁজে পেতে পারেন। +### Grafana দিয়ে মেট্রিক্স মনিটরিং + +Dify-এর PostgreSQL ডাটাবেসকে ডেটা সোর্স হিসাবে ব্যবহার করে, অ্যাপ, টেন্যান্ট, মেসেজ ইত্যাদির গ্র্যানুলারিটিতে মেট্রিক্স মনিটর করার জন্য Grafana-তে ড্যাশবোর্ড ইম্পোর্ট করুন। + +- [@bowenliang123 কর্তৃক Grafana ড্যাশবোর্ড](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Kubernetes এর সাথে ডেপ্লয়মেন্ট + যদি আপনি একটি হাইলি এভেইলেবল সেটআপ কনফিগার করতে চান, তাহলে কমিউনিটি [Helm Charts](https://helm.sh/) এবং YAML ফাইল রয়েছে যা Dify কে Kubernetes-এ ডিপ্লয় করার প্রক্রিয়া বর্ণনা করে। - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) @@ -175,7 +184,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## Contributing -যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)। +যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন। একই সাথে, সোশ্যাল মিডিয়া এবং ইভেন্ট এবং কনফারেন্সে এটি শেয়ার করে Dify কে সমর্থন করুন। > আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন। @@ -203,4 +212,4 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## লাইসেন্স -এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](../LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে। +এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](../../LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে। diff --git a/CONTRIBUTING/CONTRIBUTING_DE.md b/docs/de-DE/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_DE.md rename to docs/de-DE/CONTRIBUTING.md index c9e52c4fd7..db12006b30 100644 --- a/CONTRIBUTING/CONTRIBUTING_DE.md +++ b/docs/de-DE/CONTRIBUTING.md @@ -6,7 +6,7 @@ Wir müssen wendig sein und schnell liefern, aber wir möchten auch sicherstelle Dieser Leitfaden ist, wie Dify selbst, in ständiger Entwicklung. Wir sind dankbar für Ihr Verständnis, falls er manchmal hinter dem eigentlichen Projekt zurückbleibt, und begrüßen jedes Feedback zur Verbesserung. -Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](../LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](../../LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Bevor Sie loslegen diff --git a/README/README_DE.md b/docs/de-DE/README.md similarity index 78% rename from README/README_DE.md rename to docs/de-DE/README.md index d1a5837ab4..6c49fbdfc3 100644 --- a/README/README_DE.md +++ b/docs/de-DE/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Einführung in Dify Workflow File Upload: Google NotebookLM Podcast nachbilden @@ -39,18 +39,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre intuitive Benutzeroberfläche vereint agentenbasierte KI-Workflows, RAG-Pipelines, Agentenfunktionen, Modellverwaltung, Überwachungsfunktionen und mehr, sodass Sie schnell von einem Prototyp in die Produktion übergehen können. @@ -64,7 +65,7 @@ Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre in
-Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind: +Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](../../docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind: ```bash cd dify @@ -127,7 +128,15 @@ Star Dify auf GitHub und lassen Sie sich sofort über neue Releases benachrichti ## Erweiterte Einstellungen -Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](../../docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments). + +### Metriküberwachung mit Grafana + +Importieren Sie das Dashboard in Grafana, wobei Sie die PostgreSQL-Datenbank von Dify als Datenquelle verwenden, um Metriken in der Granularität von Apps, Mandanten, Nachrichten und mehr zu überwachen. + +- [Grafana-Dashboard von @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Bereitstellung mit Kubernetes Falls Sie eine hochverfügbare Konfiguration einrichten möchten, gibt es von der Community bereitgestellte [Helm Charts](https://helm.sh/) und YAML-Dateien, die es ermöglichen, Dify auf Kubernetes bereitzustellen. @@ -173,14 +182,14 @@ Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline ## Contributing -Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_DE.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. +Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](./CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. > Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c). ## Gemeinschaft & Kontakt - [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen. -- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. - [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. @@ -200,4 +209,4 @@ Um Ihre Privatsphäre zu schützen, vermeiden Sie es bitte, Sicherheitsprobleme ## Lizenz -Dieses Repository steht unter der [Dify Open Source License](../LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. +Dieses Repository steht unter der [Dify Open Source License](../../LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. diff --git a/CONTRIBUTING/CONTRIBUTING_ES.md b/docs/es-ES/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_ES.md rename to docs/es-ES/CONTRIBUTING.md index 764c678fb2..6cd80651c4 100644 --- a/CONTRIBUTING/CONTRIBUTING_ES.md +++ b/docs/es-ES/CONTRIBUTING.md @@ -6,7 +6,7 @@ Necesitamos ser ágiles y enviar rápidamente dado donde estamos, pero también Esta guía, como Dify mismo, es un trabajo en constante progreso. Agradecemos mucho tu comprensión si a veces se queda atrás del proyecto real, y damos la bienvenida a cualquier comentario para que podamos mejorar. -En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](../LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](../../LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Antes de empezar diff --git a/README/README_ES.md b/docs/es-ES/README.md similarity index 78% rename from README/README_ES.md rename to docs/es-ES/README.md index 60f0a06868..ae83d416e3 100644 --- a/README/README_ES.md +++ b/docs/es-ES/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Dale estrella a Dify en GitHub y serás notificado instantáneamente de las nuev
-La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina: +La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](../../docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina: ```bash cd docker @@ -122,10 +124,18 @@ Después de ejecutarlo, puedes acceder al panel de control de Dify en tu navegad ## Próximos pasos -Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](../../docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). . Después de realizar los cambios, ejecuta `docker-compose up -d` nuevamente. Puedes ver la lista completa de variables de entorno [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### Monitorización de Métricas con Grafana + +Importe el panel a Grafana, utilizando la base de datos PostgreSQL de Dify como fuente de datos, para monitorizar métricas en granularidad de aplicaciones, inquilinos, mensajes y más. + +- [Panel de Grafana por @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Implementación con Kubernetes + Si desea configurar una configuración de alta disponibilidad, la comunidad proporciona [Gráficos Helm](https://helm.sh/) y archivos YAML, a través de los cuales puede desplegar Dify en Kubernetes. - [Gráfico Helm por @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) @@ -170,7 +180,7 @@ Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @ ## Contribuir -Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_ES.md). +Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](./CONTRIBUTING.md). Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias. > Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c). @@ -184,7 +194,7 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en ## Comunidad y Contacto - [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas. -- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. - [X(Twitter)](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. @@ -198,12 +208,4 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En ## Licencia -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. - -## Divulgación de Seguridad - -Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada. - -## Licencia - -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. +Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. diff --git a/CONTRIBUTING/CONTRIBUTING_FR.md b/docs/fr-FR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_FR.md rename to docs/fr-FR/CONTRIBUTING.md index 8df491a0a0..74e44ca734 100644 --- a/CONTRIBUTING/CONTRIBUTING_FR.md +++ b/docs/fr-FR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Nous devons être agiles et livrer rapidement compte tenu de notre position, mai Ce guide, comme Dify lui-même, est un travail en constante évolution. Nous apprécions grandement votre compréhension si parfois il est en retard par rapport au projet réel, et nous accueillons tout commentaire pour nous aider à nous améliorer. -En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](../LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](../../LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Avant de vous lancer diff --git a/README/README_FR.md b/docs/fr-FR/README.md similarity index 78% rename from README/README_FR.md rename to docs/fr-FR/README.md index a782bd16f8..b7d006a927 100644 --- a/README/README_FR.md +++ b/docs/fr-FR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Mettez une étoile à Dify sur GitHub et soyez instantanément informé des nouv
-La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine: +La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](../../docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine: ```bash cd docker @@ -122,7 +124,15 @@ Après l'exécution, vous pouvez accéder au tableau de bord Dify dans votre nav ## Prochaines étapes -Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](../../docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments). + +### Surveillance des Métriques avec Grafana + +Importez le tableau de bord dans Grafana, en utilisant la base de données PostgreSQL de Dify comme source de données, pour surveiller les métriques avec une granularité d'applications, de locataires, de messages et plus. + +- [Tableau de bord Grafana par @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Déploiement avec Kubernetes Si vous souhaitez configurer une configuration haute disponibilité, la communauté fournit des [Helm Charts](https://helm.sh/) et des fichiers YAML, à travers lesquels vous pouvez déployer Dify sur Kubernetes. @@ -168,7 +178,7 @@ Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart ## Contribuer -Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_FR.md). +Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](./CONTRIBUTING.md). Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences. > Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c). @@ -182,7 +192,7 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le ## Communauté & Contact - [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions. -- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté. - [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté. @@ -196,12 +206,4 @@ Pour protéger votre vie privée, veuillez éviter de publier des problèmes de ## Licence -Ce référentiel est disponible sous la [Licence open source Dify](../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. - -## Divulgation de sécurité - -Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée. - -## Licence - -Ce référentiel est disponible sous la [Licence open source Dify](../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. +Ce référentiel est disponible sous la [Licence open source Dify](../../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. diff --git a/CONTRIBUTING/CONTRIBUTING_JA.md b/docs/ja-JP/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_JA.md rename to docs/ja-JP/CONTRIBUTING.md index dd3d6cbfc5..4ee7d8c963 100644 --- a/CONTRIBUTING/CONTRIBUTING_JA.md +++ b/docs/ja-JP/CONTRIBUTING.md @@ -6,7 +6,7 @@ Difyに貢献しようとお考えですか?素晴らしいですね。私た このガイドは、Dify自体と同様に、常に進化し続けています。実際のプロジェクトの進行状況と多少のずれが生じる場合もございますが、ご理解いただけますと幸いです。改善のためのフィードバックも歓迎いたします。 -ライセンスについては、[ライセンスと貢献者同意書](../LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。 +ライセンスについては、[ライセンスと貢献者同意書](../../LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。 ## 始める前に diff --git a/README/README_JA.md b/docs/ja-JP/README.md similarity index 77% rename from README/README_JA.md rename to docs/ja-JP/README.md index 23cd0e692b..f9e700d1df 100644 --- a/README/README_JA.md +++ b/docs/ja-JP/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -109,7 +111,7 @@ GitHub上でDifyにスターを付けることで、Difyに関する新しいニ
-Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。 +Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](../../docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。 ```bash cd docker @@ -123,7 +125,15 @@ docker compose up -d ## 次のステップ -設定をカスタマイズする必要がある場合は、[.env.example](docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。 +設定をカスタマイズする必要がある場合は、[.env.example](../../docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。 + +### Grafanaを使用したメトリクス監視 + +Grafanaにダッシュボードをインポートし、DifyのPostgreSQLデータベースをデータソースとして使用して、アプリ、テナント、メッセージなどの粒度でメトリクスを監視します。 + +- [@bowenliang123によるGrafanaダッシュボード](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Kubernetesでのデプロイ 高可用性設定を設定する必要がある場合、コミュニティは[Helm Charts](https://helm.sh/)とYAMLファイルにより、DifyをKubernetesにデプロイすることができます。 @@ -169,7 +179,7 @@ docker compose up -d ## 貢献 -コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_JA.md)を参照してください。 +コードに貢献したい方は、[Contribution Guide](./CONTRIBUTING.md)を参照してください。 同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。 > Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。 @@ -183,10 +193,10 @@ docker compose up -d ## コミュニティ & お問い合わせ - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。 -- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](../CONTRIBUTING/CONTRIBUTING_JA.md)を参照してください +- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](./CONTRIBUTING.md)を参照してください - [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。 - [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 ## ライセンス -このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](../LICENSE)の下で利用可能です。 +このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](../../LICENSE)の下で利用可能です。 diff --git a/CONTRIBUTING/CONTRIBUTING_KR.md b/docs/ko-KR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_KR.md rename to docs/ko-KR/CONTRIBUTING.md index f94d5bfbc9..9c171c3561 100644 --- a/CONTRIBUTING/CONTRIBUTING_KR.md +++ b/docs/ko-KR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Dify에 기여하려고 하시는군요 - 정말 멋집니다, 당신이 무엇 이 가이드는 Dify 자체와 마찬가지로 끊임없이 진행 중인 작업입니다. 때로는 실제 프로젝트보다 뒤처질 수 있다는 점을 이해해 주시면 감사하겠으며, 개선을 위한 피드백은 언제든지 환영합니다. -라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](../LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다. +라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](../../LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다. ## 시작하기 전에 diff --git a/README/README_KR.md b/docs/ko-KR/README.md similarity index 76% rename from README/README_KR.md rename to docs/ko-KR/README.md index e1a2a82677..4e4b82e920 100644 --- a/README/README_KR.md +++ b/docs/ko-KR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify 클라우드 · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify는 오픈 소스 LLM 앱 개발 플랫폼입니다. 직관적인 인터페이스를 통해 AI 워크플로우, RAG 파이프라인, 에이전트 기능, 모델 관리, 관찰 기능 등을 결합하여 프로토타입에서 프로덕션까지 빠르게 전환할 수 있습니다. 주요 기능 목록은 다음과 같습니다:

@@ -102,7 +104,7 @@ GitHub에서 Dify에 별표를 찍어 새로운 릴리스를 즉시 알림 받
-Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요. +Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](../../docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요. ```bash cd docker @@ -116,7 +118,15 @@ docker compose up -d ## 다음 단계 -구성을 사용자 정의해야 하는 경우 [.env.example](docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다. +구성을 사용자 정의해야 하는 경우 [.env.example](../../docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다. + +### Grafana를 사용한 메트릭 모니터링 + +Dify의 PostgreSQL 데이터베이스를 데이터 소스로 사용하여 앱, 테넌트, 메시지 등에 대한 세분화된 메트릭을 모니터링하기 위해 대시보드를 Grafana로 가져옵니다. + +- [@bowenliang123의 Grafana 대시보드](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Kubernetes를 통한 배포 Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했다는 커뮤니티가 제공하는 [Helm Charts](https://helm.sh/)와 YAML 파일이 존재합니다. @@ -162,7 +172,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 기여 -코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_KR.md)를 참조하세요. +코드에 기여하고 싶은 분들은 [기여 가이드](./CONTRIBUTING.md)를 참조하세요. 동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다. > 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요. @@ -176,7 +186,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 커뮤니티 & 연락처 - [GitHub 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다. -- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. +- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](./CONTRIBUTING.md)를 참조하세요. - [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. - [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. @@ -190,4 +200,4 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 라이선스 -이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](../LICENSE)에 따라 사용할 수 있습니다. +이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](../../LICENSE)에 따라 사용할 수 있습니다. diff --git a/CONTRIBUTING/CONTRIBUTING_PT.md b/docs/pt-BR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_PT.md rename to docs/pt-BR/CONTRIBUTING.md index 2aec1e2196..737b2ddce2 100644 --- a/CONTRIBUTING/CONTRIBUTING_PT.md +++ b/docs/pt-BR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Precisamos ser ágeis e entregar rapidamente considerando onde estamos, mas tamb Este guia, como o próprio Dify, é um trabalho em constante evolução. Agradecemos muito a sua compreensão se às vezes ele ficar atrasado em relação ao projeto real, e damos as boas-vindas a qualquer feedback para que possamos melhorar. -Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](../LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](../../LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Antes de começar diff --git a/README/README_PT.md b/docs/pt-BR/README.md similarity index 76% rename from README/README_PT.md rename to docs/pt-BR/README.md index 91132aade4..f96b18eabb 100644 --- a/README/README_PT.md +++ b/docs/pt-BR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Introduzindo o Dify Workflow com Upload de Arquivo: Recrie o Podcast Google NotebookLM @@ -39,18 +39,20 @@

- README em Inglês - 简体中文版自述文件 - 日本語のREADME - README em Espanhol - README em Francês - README tlhIngan Hol - README em Coreano - README em Árabe - README em Turco - README em Vietnamita - README em Português - BR - README in বাংলা + README em Inglês + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README em Espanhol + README em Francês + README tlhIngan Hol + README em Coreano + README em Árabe + README em Turco + README em Vietnamita + README em Português - BR + README in Deutsch + README in বাংলা

Dify é uma plataforma de desenvolvimento de aplicativos LLM de código aberto. Sua interface intuitiva combina workflow de IA, pipeline RAG, capacidades de agente, gerenciamento de modelos, recursos de observabilidade e muito mais, permitindo que você vá rapidamente do protótipo à produção. Aqui está uma lista das principais funcionalidades: @@ -108,7 +110,7 @@ Dê uma estrela no Dify no GitHub e seja notificado imediatamente sobre novos la
-A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina: +A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](../../docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina: ```bash cd docker @@ -122,7 +124,15 @@ Após a execução, você pode acessar o painel do Dify no navegador em [http:// ## Próximos passos -Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](../../docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments). + +### Monitoramento de Métricas com Grafana + +Importe o dashboard para o Grafana, usando o banco de dados PostgreSQL do Dify como fonte de dados, para monitorar métricas na granularidade de aplicativos, inquilinos, mensagens e muito mais. + +- [Dashboard do Grafana por @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Implantação com Kubernetes Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts](https://helm.sh/) e arquivos YAML contribuídos pela comunidade que permitem a implantação do Dify no Kubernetes. @@ -168,7 +178,7 @@ Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by ## Contribuindo -Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_PT.md). +Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](./CONTRIBUTING.md). Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências. > Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c). @@ -182,7 +192,7 @@ Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em ## Comunidade e contato - [Discussões no GitHub](https://github.com/langgenius/dify/discussions). Melhor para: compartilhar feedback e fazer perguntas. -- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Melhor para: compartilhar suas aplicações e interagir com a comunidade. - [X(Twitter)](https://twitter.com/dify_ai). Melhor para: compartilhar suas aplicações e interagir com a comunidade. @@ -196,4 +206,4 @@ Para proteger sua privacidade, evite postar problemas de segurança no GitHub. E ## Licença -Este repositório está disponível sob a [Licença de Código Aberto Dify](../LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. +Este repositório está disponível sob a [Licença de Código Aberto Dify](../../LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. diff --git a/README/README_SI.md b/docs/sl-SI/README.md similarity index 80% rename from README/README_SI.md rename to docs/sl-SI/README.md index 8cd78c065c..04dc3b5dff 100644 --- a/README/README_SI.md +++ b/docs/sl-SI/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Predstavljamo nalaganje datotek Dify Workflow: znova ustvarite Google NotebookLM Podcast @@ -36,18 +36,20 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README Slovenščina - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README Slovenščina + README in Deutsch + README in বাংলা

Dify je odprtokodna platforma za razvoj aplikacij LLM. Njegov intuitivni vmesnik združuje agentski potek dela z umetno inteligenco, cevovod RAG, zmogljivosti agentov, upravljanje modelov, funkcije opazovanja in več, kar vam omogoča hiter prehod od prototipa do proizvodnje. @@ -126,6 +128,14 @@ Star Dify on GitHub and be instantly notified of new releases. Če morate prilagoditi konfiguracijo, si oglejte komentarje v naši datoteki .env.example in posodobite ustrezne vrednosti v svoji .env datoteki. Poleg tega boste morda morali prilagoditi docker-compose.yamlsamo datoteko, na primer spremeniti različice slike, preslikave vrat ali namestitve nosilca, glede na vaše specifično okolje in zahteve za uvajanje. Po kakršnih koli spremembah ponovno zaženite docker-compose up -d. Celoten seznam razpoložljivih spremenljivk okolja najdete tukaj . +### Spremljanje metrik z Grafana + +Uvoz nadzorne plošče v Grafana, z uporabo Difyjeve PostgreSQL baze podatkov kot vir podatkov, za spremljanje metrike glede na podrobnost aplikacij, najemnikov, sporočil in drugega. + +- [Nadzorna plošča Grafana avtorja @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Namestitev s Kubernetes + Če želite konfigurirati visoko razpoložljivo nastavitev, so na voljo Helm Charts in datoteke YAML, ki jih prispeva skupnost, ki omogočajo uvedbo Difyja v Kubernetes. - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) @@ -169,7 +179,7 @@ Z enim klikom namestite Dify v AKS z uporabo [Azure Devops Pipeline Helm Chart b ## Prispevam -Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. +Za tiste, ki bi radi prispevali kodo, si oglejte naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. > Iščemo sodelavce za pomoč pri prevajanju Difyja v jezike, ki niso mandarinščina ali angleščina. Če želite pomagati, si oglejte i18n README za več informacij in nam pustite komentar v global-userskanalu našega strežnika skupnosti Discord . @@ -196,4 +206,4 @@ Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj ## Licenca -To skladišče je na voljo pod [odprtokodno licenco Dify](../LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami. +To skladišče je na voljo pod [odprtokodno licenco Dify](../../LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami. diff --git a/README/README_KL.md b/docs/tlh/README.md similarity index 79% rename from README/README_KL.md rename to docs/tlh/README.md index cae02f56fe..b1e3016efd 100644 --- a/README/README_KL.md +++ b/docs/tlh/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Star Dify on GitHub and be instantly notified of new releases.
-The easiest way to start the Dify server is to run our [docker-compose.yml](docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine: +The easiest way to start the Dify server is to run our [docker-compose.yml](../../docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine: ```bash cd docker @@ -122,7 +124,7 @@ After running, you can access the Dify dashboard in your browser at [http://loca ## Next steps -If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). +If you need to customize the configuration, please refer to the comments in our [.env.example](../../docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes. @@ -181,10 +183,7 @@ At the same time, please consider supporting Dify by sharing it on social media ## Community & Contact -- \[GitHub Discussion\](https://github.com/langgenius/dify/discussions - -). Best for: sharing feedback and asking questions. - +- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions. - [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. - [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. @@ -199,4 +198,4 @@ To protect your privacy, please avoid posting security issues on GitHub. Instead ## License -This repository is available under the [Dify Open Source License](../LICENSE), which is essentially Apache 2.0 with a few additional restrictions. +This repository is available under the [Dify Open Source License](../../LICENSE), which is essentially Apache 2.0 with a few additional restrictions. diff --git a/CONTRIBUTING/CONTRIBUTING_TR.md b/docs/tr-TR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_TR.md rename to docs/tr-TR/CONTRIBUTING.md index 1932a3ab34..59227d31a9 100644 --- a/CONTRIBUTING/CONTRIBUTING_TR.md +++ b/docs/tr-TR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Bulunduğumuz noktada çevik olmamız ve hızlı hareket etmemiz gerekiyor, anca Bu rehber, Dify'ın kendisi gibi, sürekli gelişen bir çalışmadır. Bazen gerçek projenin gerisinde kalırsa anlayışınız için çok minnettarız ve gelişmemize yardımcı olacak her türlü geri bildirimi memnuniyetle karşılıyoruz. -Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](../LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar. +Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](../../LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar. ## Başlamadan Önce diff --git a/README/README_TR.md b/docs/tr-TR/README.md similarity index 77% rename from README/README_TR.md rename to docs/tr-TR/README.md index 9836c6be61..965a1704be 100644 --- a/README/README_TR.md +++ b/docs/tr-TR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Bulut · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify, açık kaynaklı bir LLM uygulama geliştirme platformudur. Sezgisel arayüzü, AI iş akışı, RAG pipeline'ı, ajan yetenekleri, model yönetimi, gözlemlenebilirlik özellikleri ve daha fazlasını birleştirerek, prototipten üretime hızlıca geçmenizi sağlar. İşte temel özelliklerin bir listesi: @@ -102,7 +104,7 @@ GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun. > - RAM >= 4GB
-Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun: +Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](../../docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun: ```bash cd docker @@ -116,7 +118,15 @@ docker compose up -d ## Sonraki adımlar -Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz. +Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](../../docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz. + +### Grafana ile Metrik İzleme + +Uygulamalar, kiracılar, mesajlar ve daha fazlasının granularitesinde metrikleri izlemek için Dify'nin PostgreSQL veritabanını veri kaynağı olarak kullanarak panoyu Grafana'ya aktarın. + +- [@bowenliang123 tarafından Grafana Panosu](%E9%93%BE%E6%8E%A5) + +### Kubernetes ile Dağıtım Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'ın Kubernetes üzerine dağıtılmasına olanak tanıyan topluluk katkılı [Helm Charts](https://helm.sh/) ve YAML dosyaları mevcuttur. @@ -161,7 +171,7 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter ## Katkıda Bulunma -Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_TR.md) bakabilirsiniz. +Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](./CONTRIBUTING.md) bakabilirsiniz. Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün. > Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın. @@ -175,7 +185,7 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p ## Topluluk & iletişim - [GitHub Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için. -- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın. +- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](./CONTRIBUTING.md) bakın. - [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. - [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. @@ -189,4 +199,4 @@ Gizliliğinizi korumak için, lütfen güvenlik sorunlarını GitHub'da paylaşm ## Lisans -Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](../LICENSE) altında kullanıma sunulmuştur. +Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](../../LICENSE) altında kullanıma sunulmuştur. diff --git a/CONTRIBUTING/CONTRIBUTING_VI.md b/docs/vi-VN/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_VI.md rename to docs/vi-VN/CONTRIBUTING.md index b9844c4869..fa1d875f83 100644 --- a/CONTRIBUTING/CONTRIBUTING_VI.md +++ b/docs/vi-VN/CONTRIBUTING.md @@ -6,7 +6,7 @@ Chúng tôi cần phải nhanh nhẹn và triển khai nhanh chóng, nhưng cũn Hướng dẫn này, giống như Dify, đang được phát triển liên tục. Chúng tôi rất cảm kích sự thông cảm của bạn nếu đôi khi nó chưa theo kịp dự án thực tế, và hoan nghênh mọi phản hồi để cải thiện. -Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](../LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](../../LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Trước khi bắt đầu diff --git a/README/README_VI.md b/docs/vi-VN/README.md similarity index 78% rename from README/README_VI.md rename to docs/vi-VN/README.md index 22d74eb31d..51f7c5d994 100644 --- a/README/README_VI.md +++ b/docs/vi-VN/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify là một nền tảng phát triển ứng dụng LLM mã nguồn mở. Giao diện trực quan kết hợp quy trình làm việc AI, mô hình RAG, khả năng tác nhân, quản lý mô hình, tính năng quan sát và hơn thế nữa, cho phép bạn nhanh chóng chuyển từ nguyên mẫu sang sản phẩm. Đây là danh sách các tính năng cốt lõi: @@ -103,7 +105,7 @@ Yêu thích Dify trên GitHub và được thông báo ngay lập tức về cá
-Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn: +Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](../../docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn: ```bash cd docker @@ -117,7 +119,15 @@ Sau khi chạy, bạn có thể truy cập bảng điều khiển Dify trong tr ## Các bước tiếp theo -Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](../../docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments). + +### Giám sát Số liệu với Grafana + +Nhập bảng điều khiển vào Grafana, sử dụng cơ sở dữ liệu PostgreSQL của Dify làm nguồn dữ liệu, để giám sát số liệu theo mức độ chi tiết của ứng dụng, người thuê, tin nhắn và hơn thế nữa. + +- [Bảng điều khiển Grafana của @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Triển khai với Kubernetes Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có các [Helm Charts](https://helm.sh/) và tệp YAML do cộng đồng đóng góp cho phép Dify được triển khai trên Kubernetes. @@ -162,7 +172,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Đóng góp -Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_VI.md) của chúng tôi. +Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](./CONTRIBUTING.md) của chúng tôi. Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị. > Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi. @@ -176,7 +186,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Cộng đồng & liên hệ - [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi. -- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. +- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](./CONTRIBUTING.md) của chúng tôi. - [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. - [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. @@ -190,4 +200,4 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Giấy phép -Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](../LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung. +Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](../../LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung. diff --git a/docs/weaviate/WEAVIATE_MIGRATION_GUIDE/README.md b/docs/weaviate/WEAVIATE_MIGRATION_GUIDE/README.md new file mode 100644 index 0000000000..b2599e8c2e --- /dev/null +++ b/docs/weaviate/WEAVIATE_MIGRATION_GUIDE/README.md @@ -0,0 +1,187 @@ +# Weaviate Migration Guide: v1.19 → v1.27 + +## Overview + +Dify has upgraded from Weaviate v1.19 to v1.27 with the Python client updated from v3.24 to v4.17. + +## What Changed + +### Breaking Changes + +1. **Weaviate Server**: `1.19.0` → `1.27.0` +1. **Python Client**: `weaviate-client~=3.24.0` → `weaviate-client==4.17.0` +1. **gRPC Required**: Weaviate v1.27 requires gRPC port `50051` (in addition to HTTP port `8080`) +1. **Docker Compose**: Added temporary entrypoint overrides for client installation + +### Key Improvements + +- Faster vector operations via gRPC +- Improved batch processing +- Better error handling + +## Migration Steps + +### For Docker Users + +#### Step 1: Backup Your Data + +```bash +cd docker +docker compose down +sudo cp -r ./volumes/weaviate ./volumes/weaviate_backup_$(date +%Y%m%d) +``` + +#### Step 2: Update Dify + +```bash +git pull origin main +docker compose pull +``` + +#### Step 3: Start Services + +```bash +docker compose up -d +sleep 30 +curl http://localhost:8080/v1/meta +``` + +#### Step 4: Verify Migration + +```bash +# Check both ports are accessible +curl http://localhost:8080/v1/meta +netstat -tulpn | grep 50051 + +# Test in Dify UI: +# 1. Go to Knowledge Base +# 2. Test search functionality +# 3. Upload a test document +``` + +### For Source Installation + +#### Step 1: Update Dependencies + +```bash +cd api +uv sync --dev +uv run python -c "import weaviate; print(weaviate.__version__)" +# Should show: 4.17.0 +``` + +#### Step 2: Update Weaviate Server + +```bash +cd docker +docker compose -f docker-compose.middleware.yaml --profile weaviate up -d weaviate +curl http://localhost:8080/v1/meta +netstat -tulpn | grep 50051 +``` + +## Troubleshooting + +### Error: "No module named 'weaviate.classes'" + +**Solution**: + +```bash +cd api +uv sync --reinstall-package weaviate-client +uv run python -c "import weaviate; print(weaviate.__version__)" +# Should show: 4.17.0 +``` + +### Error: "gRPC health check failed" + +**Solution**: + +```bash +# Check Weaviate ports +docker ps | grep weaviate +# Should show: 0.0.0.0:8080->8080/tcp, 0.0.0.0:50051->50051/tcp + +# If missing gRPC port, add to docker-compose: +# ports: +# - "8080:8080" +# - "50051:50051" +``` + +### Error: "Weaviate version 1.19.0 is not supported" + +**Solution**: + +```bash +# Update Weaviate image in docker-compose +# Change: semitechnologies/weaviate:1.19.0 +# To: semitechnologies/weaviate:1.27.0 +docker compose down +docker compose up -d +``` + +### Data Migration Failed + +**Solution**: + +```bash +cd docker +docker compose down +sudo rm -rf ./volumes/weaviate +sudo cp -r ./volumes/weaviate_backup_YYYYMMDD ./volumes/weaviate +docker compose up -d +``` + +## Rollback Instructions + +```bash +# 1. Stop services +docker compose down + +# 2. Restore data backup +sudo rm -rf ./volumes/weaviate +sudo cp -r ./volumes/weaviate_backup_YYYYMMDD ./volumes/weaviate + +# 3. Checkout previous version +git checkout + +# 4. Restart services +docker compose up -d +``` + +## Compatibility + +| Component | Old Version | New Version | Compatible | +|-----------|-------------|-------------|------------| +| Weaviate Server | 1.19.0 | 1.27.0 | ✅ Yes | +| weaviate-client | ~3.24.0 | ==4.17.0 | ✅ Yes | +| Existing Data | v1.19 format | v1.27 format | ✅ Yes | + +## Testing Checklist + +Before deploying to production: + +- [ ] Backup all Weaviate data +- [ ] Test in staging environment +- [ ] Verify existing collections are accessible +- [ ] Test vector search functionality +- [ ] Test document upload and retrieval +- [ ] Monitor gRPC connection stability +- [ ] Check performance metrics + +## Support + +If you encounter issues: + +1. Check GitHub Issues: https://github.com/langgenius/dify/issues +1. Create a bug report with: + - Error messages + - Docker logs: `docker compose logs weaviate` + - Dify version + - Migration steps attempted + +## Important Notes + +- **Data Safety**: Existing vector data remains fully compatible +- **No Re-indexing**: No need to rebuild vector indexes +- **Temporary Workaround**: The entrypoint overrides are temporary until next Dify release +- **Performance**: May see improved performance due to gRPC usage diff --git a/CONTRIBUTING/CONTRIBUTING_CN.md b/docs/zh-CN/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_CN.md rename to docs/zh-CN/CONTRIBUTING.md index 8c52d8939c..5b71467804 100644 --- a/CONTRIBUTING/CONTRIBUTING_CN.md +++ b/docs/zh-CN/CONTRIBUTING.md @@ -6,7 +6,7 @@ 本指南和 Dify 一样在不断完善中。如果有任何滞后于项目实际情况的地方,恳请谅解,我们也欢迎任何改进建议。 -关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](../LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 +关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](../../LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 ## 开始之前 diff --git a/README/README_CN.md b/docs/zh-CN/README.md similarity index 77% rename from README/README_CN.md rename to docs/zh-CN/README.md index 9501992bd2..888a0d7f12 100644 --- a/README/README_CN.md +++ b/docs/zh-CN/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)
Dify 云服务 · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা
# @@ -111,7 +113,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI ### 快速启动 -启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): +启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](../../docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): ```bash cd docker @@ -123,7 +125,13 @@ docker compose up -d ### 自定义配置 -如果您需要自定义配置,请参考 [.env.example](docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。 +如果您需要自定义配置,请参考 [.env.example](../../docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。 + +### 使用 Grafana 进行指标监控 + +将仪表板导入 Grafana,使用 Dify 的 PostgreSQL 数据库作为数据源,以监控应用、租户、消息等粒度的指标。 + +- [由 @bowenliang123 提供的 Grafana 仪表板](https://github.com/bowenliang123/dify-grafana-dashboard) #### 使用 Helm Chart 或 Kubernetes 资源清单(YAML)部署 @@ -180,7 +188,7 @@ docker compose up -d ## Contributing -对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_CN.md)。 +对于那些想要贡献代码的人,请参阅我们的[贡献指南](./CONTRIBUTING.md)。 同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。 > 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。 @@ -196,7 +204,7 @@ docker compose up -d 我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括:提交代码、问题、新想法,或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。 - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。 -- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](../CONTRIBUTING.md)。 +- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](./CONTRIBUTING.md)。 - [电子邮件支持](mailto:hello@dify.ai?subject=%5BGitHub%5DQuestions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。 - [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。 - [X(Twitter)](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。 @@ -208,4 +216,4 @@ docker compose up -d ## License -本仓库遵循 [Dify Open Source License](../LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。 +本仓库遵循 [Dify Open Source License](../../LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。 diff --git a/CONTRIBUTING/CONTRIBUTING_TW.md b/docs/zh-TW/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_TW.md rename to docs/zh-TW/CONTRIBUTING.md index 7fba220a22..1d5f02efa1 100644 --- a/CONTRIBUTING/CONTRIBUTING_TW.md +++ b/docs/zh-TW/CONTRIBUTING.md @@ -6,7 +6,7 @@ 這份指南與 Dify 一樣,都在持續完善中。如果指南內容有落後於實際專案的情況,還請見諒,也歡迎提供改進建議。 -關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](../LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 +關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](../../LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 ## 開始之前 diff --git a/README/README_TW.md b/docs/zh-TW/README.md similarity index 78% rename from README/README_TW.md rename to docs/zh-TW/README.md index b9c0b81246..d8c484a6d4 100644 --- a/README/README_TW.md +++ b/docs/zh-TW/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 介紹 Dify 工作流程檔案上傳功能:重現 Google NotebookLM Podcast @@ -39,18 +39,18 @@

- README in English - 繁體中文文件 - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch

Dify 是一個開源的 LLM 應用程式開發平台。其直觀的界面結合了智能代理工作流程、RAG 管道、代理功能、模型管理、可觀察性功能等,讓您能夠快速從原型進展到生產環境。 @@ -64,7 +64,7 @@ Dify 是一個開源的 LLM 應用程式開發平台。其直觀的界面結合
-啟動 Dify 伺服器最簡單的方式是透過 [docker compose](docker/docker-compose.yaml)。在使用以下命令運行 Dify 之前,請確保您的機器已安裝 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): +啟動 Dify 伺服器最簡單的方式是透過 [docker compose](../../docker/docker-compose.yaml)。在使用以下命令運行 Dify 之前,請確保您的機器已安裝 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): ```bash cd dify @@ -128,7 +128,15 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 進階設定 -如果您需要自定義配置,請參考我們的 [.env.example](docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。 +如果您需要自定義配置,請參考我們的 [.env.example](../../docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。 + +### 使用 Grafana 進行指標監控 + +將儀表板匯入 Grafana,使用 Dify 的 PostgreSQL 資料庫作為資料來源,以監控應用程式、租戶、訊息等顆粒度的指標。 + +- [由 @bowenliang123 提供的 Grafana 儀表板](https://github.com/bowenliang123/dify-grafana-dashboard) + +### 使用 Kubernetes 部署 如果您想配置高可用性設置,社區貢獻的 [Helm Charts](https://helm.sh/) 和 Kubernetes 資源清單(YAML)允許在 Kubernetes 上部署 Dify。 @@ -173,7 +181,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 貢獻 -對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_TW.md)。 +對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](./CONTRIBUTING.md)。 同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。 > 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。 @@ -181,7 +189,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 社群與聯絡方式 - [GitHub Discussion](https://github.com/langgenius/dify/discussions):最適合分享反饋和提問。 -- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 +- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](./CONTRIBUTING.md)。 - [Discord](https://discord.gg/FngNHpbcY7):最適合分享您的應用程式並與社群互動。 - [X(Twitter)](https://twitter.com/dify_ai):最適合分享您的應用程式並與社群互動。 @@ -201,4 +209,4 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 授權條款 -本代碼庫採用 [Dify 開源授權](../LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。 +本代碼庫採用 [Dify 開源授權](../../LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。 diff --git a/scripts/stress-test/setup/import_workflow_app.py b/scripts/stress-test/setup/import_workflow_app.py index 86d0239e35..41a76bd29b 100755 --- a/scripts/stress-test/setup/import_workflow_app.py +++ b/scripts/stress-test/setup/import_workflow_app.py @@ -8,7 +8,7 @@ sys.path.append(str(Path(__file__).parent.parent)) import json import httpx -from common import Logger, config_helper +from common import Logger, config_helper # type: ignore[import] def import_workflow_app() -> None: diff --git a/sdks/python-client/MANIFEST.in b/sdks/python-client/MANIFEST.in index 12f44237a2..34b7e8711c 100644 --- a/sdks/python-client/MANIFEST.in +++ b/sdks/python-client/MANIFEST.in @@ -1 +1,3 @@ recursive-include dify_client *.py +include README.md +include LICENSE diff --git a/sdks/python-client/README.md b/sdks/python-client/README.md index 34b14b3a94..ebfb5f5397 100644 --- a/sdks/python-client/README.md +++ b/sdks/python-client/README.md @@ -10,6 +10,8 @@ First, install `dify-client` python sdk package: pip install dify-client ``` +### Synchronous Usage + Write your code with sdk: - completion generate with `blocking` response_mode @@ -221,3 +223,187 @@ answer = result.get("data").get("outputs") print(answer["answer"]) ``` + +- Dataset Management + +```python +from dify_client import KnowledgeBaseClient + +api_key = "your_api_key" +dataset_id = "your_dataset_id" + +# Use context manager to ensure proper resource cleanup +with KnowledgeBaseClient(api_key, dataset_id) as kb_client: + # Get dataset information + dataset_info = kb_client.get_dataset() + dataset_info.raise_for_status() + print(dataset_info.json()) + + # Update dataset configuration + update_response = kb_client.update_dataset( + name="Updated Dataset Name", + description="Updated description", + indexing_technique="high_quality" + ) + update_response.raise_for_status() + print(update_response.json()) + + # Batch update document status + batch_response = kb_client.batch_update_document_status( + action="enable", + document_ids=["doc_id_1", "doc_id_2", "doc_id_3"] + ) + batch_response.raise_for_status() + print(batch_response.json()) +``` + +- Conversation Variables Management + +```python +from dify_client import ChatClient + +api_key = "your_api_key" + +# Use context manager to ensure proper resource cleanup +with ChatClient(api_key) as chat_client: + # Get all conversation variables + variables = chat_client.get_conversation_variables( + conversation_id="conversation_id", + user="user_id" + ) + variables.raise_for_status() + print(variables.json()) + + # Update a specific conversation variable + update_var = chat_client.update_conversation_variable( + conversation_id="conversation_id", + variable_id="variable_id", + value="new_value", + user="user_id" + ) + update_var.raise_for_status() + print(update_var.json()) +``` + +### Asynchronous Usage + +The SDK provides full async/await support for all API operations using `httpx.AsyncClient`. All async clients mirror their synchronous counterparts but require `await` for method calls. + +- async chat with `blocking` response_mode + +```python +import asyncio +from dify_client import AsyncChatClient + +api_key = "your_api_key" + +async def main(): + # Use async context manager for proper resource cleanup + async with AsyncChatClient(api_key) as client: + response = await client.create_chat_message( + inputs={}, + query="Hello, how are you?", + user="user_id", + response_mode="blocking" + ) + response.raise_for_status() + result = response.json() + print(result.get('answer')) + +# Run the async function +asyncio.run(main()) +``` + +- async completion with `streaming` response_mode + +```python +import asyncio +import json +from dify_client import AsyncCompletionClient + +api_key = "your_api_key" + +async def main(): + async with AsyncCompletionClient(api_key) as client: + response = await client.create_completion_message( + inputs={"query": "What's the weather?"}, + response_mode="streaming", + user="user_id" + ) + response.raise_for_status() + + # Stream the response + async for line in response.aiter_lines(): + if line.startswith('data:'): + data = line[5:].strip() + if data: + chunk = json.loads(data) + print(chunk.get('answer', ''), end='', flush=True) + +asyncio.run(main()) +``` + +- async workflow execution + +```python +import asyncio +from dify_client import AsyncWorkflowClient + +api_key = "your_api_key" + +async def main(): + async with AsyncWorkflowClient(api_key) as client: + response = await client.run( + inputs={"query": "What is machine learning?"}, + response_mode="blocking", + user="user_id" + ) + response.raise_for_status() + result = response.json() + print(result.get("data").get("outputs")) + +asyncio.run(main()) +``` + +- async dataset management + +```python +import asyncio +from dify_client import AsyncKnowledgeBaseClient + +api_key = "your_api_key" +dataset_id = "your_dataset_id" + +async def main(): + async with AsyncKnowledgeBaseClient(api_key, dataset_id) as kb_client: + # Get dataset information + dataset_info = await kb_client.get_dataset() + dataset_info.raise_for_status() + print(dataset_info.json()) + + # List documents + docs = await kb_client.list_documents(page=1, page_size=10) + docs.raise_for_status() + print(docs.json()) + +asyncio.run(main()) +``` + +**Benefits of Async Usage:** + +- **Better Performance**: Handle multiple concurrent API requests efficiently +- **Non-blocking I/O**: Don't block the event loop during network operations +- **Scalability**: Ideal for applications handling many simultaneous requests +- **Modern Python**: Leverages Python's native async/await syntax + +**Available Async Clients:** + +- `AsyncDifyClient` - Base async client +- `AsyncChatClient` - Async chat operations +- `AsyncCompletionClient` - Async completion operations +- `AsyncWorkflowClient` - Async workflow operations +- `AsyncKnowledgeBaseClient` - Async dataset/knowledge base operations +- `AsyncWorkspaceClient` - Async workspace operations + +``` +``` diff --git a/sdks/python-client/dify_client/__init__.py b/sdks/python-client/dify_client/__init__.py index e866472f45..ced093b20a 100644 --- a/sdks/python-client/dify_client/__init__.py +++ b/sdks/python-client/dify_client/__init__.py @@ -4,12 +4,31 @@ from dify_client.client import ( DifyClient, KnowledgeBaseClient, WorkflowClient, + WorkspaceClient, +) + +from dify_client.async_client import ( + AsyncChatClient, + AsyncCompletionClient, + AsyncDifyClient, + AsyncKnowledgeBaseClient, + AsyncWorkflowClient, + AsyncWorkspaceClient, ) __all__ = [ + # Synchronous clients "ChatClient", "CompletionClient", "DifyClient", "KnowledgeBaseClient", "WorkflowClient", + "WorkspaceClient", + # Asynchronous clients + "AsyncChatClient", + "AsyncCompletionClient", + "AsyncDifyClient", + "AsyncKnowledgeBaseClient", + "AsyncWorkflowClient", + "AsyncWorkspaceClient", ] diff --git a/sdks/python-client/dify_client/async_client.py b/sdks/python-client/dify_client/async_client.py new file mode 100644 index 0000000000..984f668d0c --- /dev/null +++ b/sdks/python-client/dify_client/async_client.py @@ -0,0 +1,808 @@ +"""Asynchronous Dify API client. + +This module provides async/await support for all Dify API operations using httpx.AsyncClient. +All client classes mirror their synchronous counterparts but require `await` for method calls. + +Example: + import asyncio + from dify_client import AsyncChatClient + + async def main(): + async with AsyncChatClient(api_key="your-key") as client: + response = await client.create_chat_message( + inputs={}, + query="Hello", + user="user-123" + ) + print(response.json()) + + asyncio.run(main()) +""" + +import json +import os +from typing import Literal, Dict, List, Any, IO + +import aiofiles +import httpx + + +class AsyncDifyClient: + """Asynchronous Dify API client. + + This client uses httpx.AsyncClient for efficient async connection pooling. + It's recommended to use this client as a context manager: + + Example: + async with AsyncDifyClient(api_key="your-key") as client: + response = await client.get_app_info() + """ + + def __init__( + self, + api_key: str, + base_url: str = "https://api.dify.ai/v1", + timeout: float = 60.0, + ): + """Initialize the async Dify client. + + Args: + api_key: Your Dify API key + base_url: Base URL for the Dify API + timeout: Request timeout in seconds (default: 60.0) + """ + self.api_key = api_key + self.base_url = base_url + self._client = httpx.AsyncClient( + base_url=base_url, + timeout=httpx.Timeout(timeout, connect=5.0), + ) + + async def __aenter__(self): + """Support async context manager protocol.""" + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Clean up resources when exiting async context.""" + await self.aclose() + + async def aclose(self): + """Close the async HTTP client and release resources.""" + if hasattr(self, "_client"): + await self._client.aclose() + + async def _send_request( + self, + method: str, + endpoint: str, + json: dict | None = None, + params: dict | None = None, + stream: bool = False, + **kwargs, + ): + """Send an async HTTP request to the Dify API. + + Args: + method: HTTP method (GET, POST, PUT, PATCH, DELETE) + endpoint: API endpoint path + json: JSON request body + params: Query parameters + stream: Whether to stream the response + **kwargs: Additional arguments to pass to httpx.request + + Returns: + httpx.Response object + """ + headers = { + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + } + + response = await self._client.request( + method, + endpoint, + json=json, + params=params, + headers=headers, + **kwargs, + ) + + return response + + async def _send_request_with_files(self, method: str, endpoint: str, data: dict, files: dict): + """Send an async HTTP request with file uploads. + + Args: + method: HTTP method (POST, PUT, etc.) + endpoint: API endpoint path + data: Form data + files: Files to upload + + Returns: + httpx.Response object + """ + headers = {"Authorization": f"Bearer {self.api_key}"} + + response = await self._client.request( + method, + endpoint, + data=data, + headers=headers, + files=files, + ) + + return response + + async def message_feedback(self, message_id: str, rating: Literal["like", "dislike"], user: str): + """Send feedback for a message.""" + data = {"rating": rating, "user": user} + return await self._send_request("POST", f"/messages/{message_id}/feedbacks", data) + + async def get_application_parameters(self, user: str): + """Get application parameters.""" + params = {"user": user} + return await self._send_request("GET", "/parameters", params=params) + + async def file_upload(self, user: str, files: dict): + """Upload a file.""" + data = {"user": user} + return await self._send_request_with_files("POST", "/files/upload", data=data, files=files) + + async def text_to_audio(self, text: str, user: str, streaming: bool = False): + """Convert text to audio.""" + data = {"text": text, "user": user, "streaming": streaming} + return await self._send_request("POST", "/text-to-audio", json=data) + + async def get_meta(self, user: str): + """Get metadata.""" + params = {"user": user} + return await self._send_request("GET", "/meta", params=params) + + async def get_app_info(self): + """Get basic application information including name, description, tags, and mode.""" + return await self._send_request("GET", "/info") + + async def get_app_site_info(self): + """Get application site information.""" + return await self._send_request("GET", "/site") + + async def get_file_preview(self, file_id: str): + """Get file preview by file ID.""" + return await self._send_request("GET", f"/files/{file_id}/preview") + + +class AsyncCompletionClient(AsyncDifyClient): + """Async client for Completion API operations.""" + + async def create_completion_message( + self, + inputs: dict, + response_mode: Literal["blocking", "streaming"], + user: str, + files: dict | None = None, + ): + """Create a completion message. + + Args: + inputs: Input variables for the completion + response_mode: Response mode ('blocking' or 'streaming') + user: User identifier + files: Optional files to include + + Returns: + httpx.Response object + """ + data = { + "inputs": inputs, + "response_mode": response_mode, + "user": user, + "files": files, + } + return await self._send_request( + "POST", + "/completion-messages", + data, + stream=(response_mode == "streaming"), + ) + + +class AsyncChatClient(AsyncDifyClient): + """Async client for Chat API operations.""" + + async def create_chat_message( + self, + inputs: dict, + query: str, + user: str, + response_mode: Literal["blocking", "streaming"] = "blocking", + conversation_id: str | None = None, + files: dict | None = None, + ): + """Create a chat message. + + Args: + inputs: Input variables for the chat + query: User query/message + user: User identifier + response_mode: Response mode ('blocking' or 'streaming') + conversation_id: Optional conversation ID for context + files: Optional files to include + + Returns: + httpx.Response object + """ + data = { + "inputs": inputs, + "query": query, + "user": user, + "response_mode": response_mode, + "files": files, + } + if conversation_id: + data["conversation_id"] = conversation_id + + return await self._send_request( + "POST", + "/chat-messages", + data, + stream=(response_mode == "streaming"), + ) + + async def get_suggested(self, message_id: str, user: str): + """Get suggested questions for a message.""" + params = {"user": user} + return await self._send_request("GET", f"/messages/{message_id}/suggested", params=params) + + async def stop_message(self, task_id: str, user: str): + """Stop a running message generation.""" + data = {"user": user} + return await self._send_request("POST", f"/chat-messages/{task_id}/stop", data) + + async def get_conversations( + self, + user: str, + last_id: str | None = None, + limit: int | None = None, + pinned: bool | None = None, + ): + """Get list of conversations.""" + params = {"user": user, "last_id": last_id, "limit": limit, "pinned": pinned} + return await self._send_request("GET", "/conversations", params=params) + + async def get_conversation_messages( + self, + user: str, + conversation_id: str | None = None, + first_id: str | None = None, + limit: int | None = None, + ): + """Get messages from a conversation.""" + params = { + "user": user, + "conversation_id": conversation_id, + "first_id": first_id, + "limit": limit, + } + return await self._send_request("GET", "/messages", params=params) + + async def rename_conversation(self, conversation_id: str, name: str, auto_generate: bool, user: str): + """Rename a conversation.""" + data = {"name": name, "auto_generate": auto_generate, "user": user} + return await self._send_request("POST", f"/conversations/{conversation_id}/name", data) + + async def delete_conversation(self, conversation_id: str, user: str): + """Delete a conversation.""" + data = {"user": user} + return await self._send_request("DELETE", f"/conversations/{conversation_id}", data) + + async def audio_to_text(self, audio_file: IO[bytes] | tuple, user: str): + """Convert audio to text.""" + data = {"user": user} + files = {"file": audio_file} + return await self._send_request_with_files("POST", "/audio-to-text", data, files) + + # Annotation APIs + async def annotation_reply_action( + self, + action: Literal["enable", "disable"], + score_threshold: float, + embedding_provider_name: str, + embedding_model_name: str, + ): + """Enable or disable annotation reply feature.""" + data = { + "score_threshold": score_threshold, + "embedding_provider_name": embedding_provider_name, + "embedding_model_name": embedding_model_name, + } + return await self._send_request("POST", f"/apps/annotation-reply/{action}", json=data) + + async def get_annotation_reply_status(self, action: Literal["enable", "disable"], job_id: str): + """Get the status of an annotation reply action job.""" + return await self._send_request("GET", f"/apps/annotation-reply/{action}/status/{job_id}") + + async def list_annotations(self, page: int = 1, limit: int = 20, keyword: str | None = None): + """List annotations for the application.""" + params = {"page": page, "limit": limit, "keyword": keyword} + return await self._send_request("GET", "/apps/annotations", params=params) + + async def create_annotation(self, question: str, answer: str): + """Create a new annotation.""" + data = {"question": question, "answer": answer} + return await self._send_request("POST", "/apps/annotations", json=data) + + async def update_annotation(self, annotation_id: str, question: str, answer: str): + """Update an existing annotation.""" + data = {"question": question, "answer": answer} + return await self._send_request("PUT", f"/apps/annotations/{annotation_id}", json=data) + + async def delete_annotation(self, annotation_id: str): + """Delete an annotation.""" + return await self._send_request("DELETE", f"/apps/annotations/{annotation_id}") + + # Conversation Variables APIs + async def get_conversation_variables(self, conversation_id: str, user: str): + """Get all variables for a specific conversation. + + Args: + conversation_id: The conversation ID to query variables for + user: User identifier + + Returns: + Response from the API containing: + - variables: List of conversation variables with their values + - conversation_id: The conversation ID + """ + params = {"user": user} + url = f"/conversations/{conversation_id}/variables" + return await self._send_request("GET", url, params=params) + + async def update_conversation_variable(self, conversation_id: str, variable_id: str, value: Any, user: str): + """Update a specific conversation variable. + + Args: + conversation_id: The conversation ID + variable_id: The variable ID to update + value: New value for the variable + user: User identifier + + Returns: + Response from the API with updated variable information + """ + data = {"value": value, "user": user} + url = f"/conversations/{conversation_id}/variables/{variable_id}" + return await self._send_request("PATCH", url, json=data) + + +class AsyncWorkflowClient(AsyncDifyClient): + """Async client for Workflow API operations.""" + + async def run( + self, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): + """Run a workflow.""" + data = {"inputs": inputs, "response_mode": response_mode, "user": user} + return await self._send_request("POST", "/workflows/run", data) + + async def stop(self, task_id: str, user: str): + """Stop a running workflow task.""" + data = {"user": user} + return await self._send_request("POST", f"/workflows/tasks/{task_id}/stop", data) + + async def get_result(self, workflow_run_id: str): + """Get workflow run result.""" + return await self._send_request("GET", f"/workflows/run/{workflow_run_id}") + + async def get_workflow_logs( + self, + keyword: str = None, + status: Literal["succeeded", "failed", "stopped"] | None = None, + page: int = 1, + limit: int = 20, + created_at__before: str = None, + created_at__after: str = None, + created_by_end_user_session_id: str = None, + created_by_account: str = None, + ): + """Get workflow execution logs with optional filtering.""" + params = { + "page": page, + "limit": limit, + "keyword": keyword, + "status": status, + "created_at__before": created_at__before, + "created_at__after": created_at__after, + "created_by_end_user_session_id": created_by_end_user_session_id, + "created_by_account": created_by_account, + } + return await self._send_request("GET", "/workflows/logs", params=params) + + async def run_specific_workflow( + self, + workflow_id: str, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): + """Run a specific workflow by workflow ID.""" + data = {"inputs": inputs, "response_mode": response_mode, "user": user} + return await self._send_request( + "POST", + f"/workflows/{workflow_id}/run", + data, + stream=(response_mode == "streaming"), + ) + + +class AsyncWorkspaceClient(AsyncDifyClient): + """Async client for workspace-related operations.""" + + async def get_available_models(self, model_type: str): + """Get available models by model type.""" + url = f"/workspaces/current/models/model-types/{model_type}" + return await self._send_request("GET", url) + + +class AsyncKnowledgeBaseClient(AsyncDifyClient): + """Async client for Knowledge Base API operations.""" + + def __init__( + self, + api_key: str, + base_url: str = "https://api.dify.ai/v1", + dataset_id: str | None = None, + timeout: float = 60.0, + ): + """Construct an AsyncKnowledgeBaseClient object. + + Args: + api_key: API key of Dify + base_url: Base URL of Dify API + dataset_id: ID of the dataset + timeout: Request timeout in seconds + """ + super().__init__(api_key=api_key, base_url=base_url, timeout=timeout) + self.dataset_id = dataset_id + + def _get_dataset_id(self): + """Get the dataset ID, raise error if not set.""" + if self.dataset_id is None: + raise ValueError("dataset_id is not set") + return self.dataset_id + + async def create_dataset(self, name: str, **kwargs): + """Create a new dataset.""" + return await self._send_request("POST", "/datasets", {"name": name}, **kwargs) + + async def list_datasets(self, page: int = 1, page_size: int = 20, **kwargs): + """List all datasets.""" + return await self._send_request("GET", "/datasets", params={"page": page, "limit": page_size}, **kwargs) + + async def create_document_by_text(self, name: str, text: str, extra_params: dict | None = None, **kwargs): + """Create a document by text. + + Args: + name: Name of the document + text: Text content of the document + extra_params: Extra parameters for the API + + Returns: + Response from the API + """ + data = { + "indexing_technique": "high_quality", + "process_rule": {"mode": "automatic"}, + "name": name, + "text": text, + } + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + url = f"/datasets/{self._get_dataset_id()}/document/create_by_text" + return await self._send_request("POST", url, json=data, **kwargs) + + async def update_document_by_text( + self, + document_id: str, + name: str, + text: str, + extra_params: dict | None = None, + **kwargs, + ): + """Update a document by text.""" + data = {"name": name, "text": text} + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_text" + return await self._send_request("POST", url, json=data, **kwargs) + + async def create_document_by_file( + self, + file_path: str, + original_document_id: str | None = None, + extra_params: dict | None = None, + ): + """Create a document by file.""" + async with aiofiles.open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + data = { + "process_rule": {"mode": "automatic"}, + "indexing_technique": "high_quality", + } + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + if original_document_id is not None: + data["original_document_id"] = original_document_id + url = f"/datasets/{self._get_dataset_id()}/document/create_by_file" + return await self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) + + async def update_document_by_file(self, document_id: str, file_path: str, extra_params: dict | None = None): + """Update a document by file.""" + async with aiofiles.open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + data = {} + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_file" + return await self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) + + async def batch_indexing_status(self, batch_id: str, **kwargs): + """Get the status of the batch indexing.""" + url = f"/datasets/{self._get_dataset_id()}/documents/{batch_id}/indexing-status" + return await self._send_request("GET", url, **kwargs) + + async def delete_dataset(self): + """Delete this dataset.""" + url = f"/datasets/{self._get_dataset_id()}" + return await self._send_request("DELETE", url) + + async def delete_document(self, document_id: str): + """Delete a document.""" + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}" + return await self._send_request("DELETE", url) + + async def list_documents( + self, + page: int | None = None, + page_size: int | None = None, + keyword: str | None = None, + **kwargs, + ): + """Get a list of documents in this dataset.""" + params = { + "page": page, + "limit": page_size, + "keyword": keyword, + } + url = f"/datasets/{self._get_dataset_id()}/documents" + return await self._send_request("GET", url, params=params, **kwargs) + + async def add_segments(self, document_id: str, segments: list[dict], **kwargs): + """Add segments to a document.""" + data = {"segments": segments} + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments" + return await self._send_request("POST", url, json=data, **kwargs) + + async def query_segments( + self, + document_id: str, + keyword: str | None = None, + status: str | None = None, + **kwargs, + ): + """Query segments in this document. + + Args: + document_id: ID of the document + keyword: Query keyword (optional) + status: Status of the segment (optional, e.g., 'completed') + **kwargs: Additional parameters to pass to the API. + Can include a 'params' dict for extra query parameters. + + Returns: + Response from the API + """ + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments" + params = { + "keyword": keyword, + "status": status, + } + if "params" in kwargs: + params.update(kwargs.pop("params")) + return await self._send_request("GET", url, params=params, **kwargs) + + async def delete_document_segment(self, document_id: str, segment_id: str): + """Delete a segment from a document.""" + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments/{segment_id}" + return await self._send_request("DELETE", url) + + async def update_document_segment(self, document_id: str, segment_id: str, segment_data: dict, **kwargs): + """Update a segment in a document.""" + data = {"segment": segment_data} + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments/{segment_id}" + return await self._send_request("POST", url, json=data, **kwargs) + + # Advanced Knowledge Base APIs + async def hit_testing( + self, + query: str, + retrieval_model: Dict[str, Any] = None, + external_retrieval_model: Dict[str, Any] = None, + ): + """Perform hit testing on the dataset.""" + data = {"query": query} + if retrieval_model: + data["retrieval_model"] = retrieval_model + if external_retrieval_model: + data["external_retrieval_model"] = external_retrieval_model + url = f"/datasets/{self._get_dataset_id()}/hit-testing" + return await self._send_request("POST", url, json=data) + + async def get_dataset_metadata(self): + """Get dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return await self._send_request("GET", url) + + async def create_dataset_metadata(self, metadata_data: Dict[str, Any]): + """Create dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return await self._send_request("POST", url, json=metadata_data) + + async def update_dataset_metadata(self, metadata_id: str, metadata_data: Dict[str, Any]): + """Update dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/{metadata_id}" + return await self._send_request("PATCH", url, json=metadata_data) + + async def get_built_in_metadata(self): + """Get built-in metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in" + return await self._send_request("GET", url) + + async def manage_built_in_metadata(self, action: str, metadata_data: Dict[str, Any] = None): + """Manage built-in metadata with specified action.""" + data = metadata_data or {} + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in/{action}" + return await self._send_request("POST", url, json=data) + + async def update_documents_metadata(self, operation_data: List[Dict[str, Any]]): + """Update metadata for multiple documents.""" + url = f"/datasets/{self._get_dataset_id()}/documents/metadata" + data = {"operation_data": operation_data} + return await self._send_request("POST", url, json=data) + + # Dataset Tags APIs + async def list_dataset_tags(self): + """List all dataset tags.""" + return await self._send_request("GET", "/datasets/tags") + + async def bind_dataset_tags(self, tag_ids: List[str]): + """Bind tags to dataset.""" + data = {"tag_ids": tag_ids, "target_id": self._get_dataset_id()} + return await self._send_request("POST", "/datasets/tags/binding", json=data) + + async def unbind_dataset_tag(self, tag_id: str): + """Unbind a single tag from dataset.""" + data = {"tag_id": tag_id, "target_id": self._get_dataset_id()} + return await self._send_request("POST", "/datasets/tags/unbinding", json=data) + + async def get_dataset_tags(self): + """Get tags for current dataset.""" + url = f"/datasets/{self._get_dataset_id()}/tags" + return await self._send_request("GET", url) + + # RAG Pipeline APIs + async def get_datasource_plugins(self, is_published: bool = True): + """Get datasource plugins for RAG pipeline.""" + params = {"is_published": is_published} + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource-plugins" + return await self._send_request("GET", url, params=params) + + async def run_datasource_node( + self, + node_id: str, + inputs: Dict[str, Any], + datasource_type: str, + is_published: bool = True, + credential_id: str = None, + ): + """Run a datasource node in RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "is_published": is_published, + } + if credential_id: + data["credential_id"] = credential_id + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource/nodes/{node_id}/run" + return await self._send_request("POST", url, json=data, stream=True) + + async def run_rag_pipeline( + self, + inputs: Dict[str, Any], + datasource_type: str, + datasource_info_list: List[Dict[str, Any]], + start_node_id: str, + is_published: bool = True, + response_mode: Literal["streaming", "blocking"] = "blocking", + ): + """Run RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "datasource_info_list": datasource_info_list, + "start_node_id": start_node_id, + "is_published": is_published, + "response_mode": response_mode, + } + url = f"/datasets/{self._get_dataset_id()}/pipeline/run" + return await self._send_request("POST", url, json=data, stream=response_mode == "streaming") + + async def upload_pipeline_file(self, file_path: str): + """Upload file for RAG pipeline.""" + async with aiofiles.open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + return await self._send_request_with_files("POST", "/datasets/pipeline/file-upload", {}, files) + + # Dataset Management APIs + async def get_dataset(self, dataset_id: str | None = None): + """Get detailed information about a specific dataset.""" + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}" + return await self._send_request("GET", url) + + async def update_dataset( + self, + dataset_id: str | None = None, + name: str | None = None, + description: str | None = None, + indexing_technique: str | None = None, + embedding_model: str | None = None, + embedding_model_provider: str | None = None, + retrieval_model: Dict[str, Any] | None = None, + **kwargs, + ): + """Update dataset configuration. + + Args: + dataset_id: Dataset ID (optional, uses current dataset_id if not provided) + name: New dataset name + description: New dataset description + indexing_technique: Indexing technique ('high_quality' or 'economy') + embedding_model: Embedding model name + embedding_model_provider: Embedding model provider + retrieval_model: Retrieval model configuration dict + **kwargs: Additional parameters to pass to the API + + Returns: + Response from the API with updated dataset information + """ + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}" + + payload = { + "name": name, + "description": description, + "indexing_technique": indexing_technique, + "embedding_model": embedding_model, + "embedding_model_provider": embedding_model_provider, + "retrieval_model": retrieval_model, + } + + data = {k: v for k, v in payload.items() if v is not None} + data.update(kwargs) + + return await self._send_request("PATCH", url, json=data) + + async def batch_update_document_status( + self, + action: Literal["enable", "disable", "archive", "un_archive"], + document_ids: List[str], + dataset_id: str | None = None, + ): + """Batch update document status.""" + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}/documents/status/{action}" + data = {"document_ids": document_ids} + return await self._send_request("PATCH", url, json=data) diff --git a/sdks/python-client/dify_client/client.py b/sdks/python-client/dify_client/client.py index 201391eae9..41c5abe16d 100644 --- a/sdks/python-client/dify_client/client.py +++ b/sdks/python-client/dify_client/client.py @@ -1,31 +1,114 @@ import json -from typing import Literal -import requests +import os +from typing import Literal, Dict, List, Any, IO + +import httpx class DifyClient: - def __init__(self, api_key, base_url: str = "https://api.dify.ai/v1"): + """Synchronous Dify API client. + + This client uses httpx.Client for efficient connection pooling and resource management. + It's recommended to use this client as a context manager: + + Example: + with DifyClient(api_key="your-key") as client: + response = client.get_app_info() + """ + + def __init__( + self, + api_key: str, + base_url: str = "https://api.dify.ai/v1", + timeout: float = 60.0, + ): + """Initialize the Dify client. + + Args: + api_key: Your Dify API key + base_url: Base URL for the Dify API + timeout: Request timeout in seconds (default: 60.0) + """ self.api_key = api_key self.base_url = base_url + self._client = httpx.Client( + base_url=base_url, + timeout=httpx.Timeout(timeout, connect=5.0), + ) + + def __enter__(self): + """Support context manager protocol.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Clean up resources when exiting context.""" + self.close() + + def close(self): + """Close the HTTP client and release resources.""" + if hasattr(self, "_client"): + self._client.close() def _send_request( - self, method: str, endpoint: str, json: dict | None = None, params: dict | None = None, stream: bool = False + self, + method: str, + endpoint: str, + json: dict | None = None, + params: dict | None = None, + stream: bool = False, + **kwargs, ): + """Send an HTTP request to the Dify API. + + Args: + method: HTTP method (GET, POST, PUT, PATCH, DELETE) + endpoint: API endpoint path + json: JSON request body + params: Query parameters + stream: Whether to stream the response + **kwargs: Additional arguments to pass to httpx.request + + Returns: + httpx.Response object + """ headers = { "Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json", } - url = f"{self.base_url}{endpoint}" - response = requests.request(method, url, json=json, params=params, headers=headers, stream=stream) + # httpx.Client automatically prepends base_url + response = self._client.request( + method, + endpoint, + json=json, + params=params, + headers=headers, + **kwargs, + ) return response - def _send_request_with_files(self, method, endpoint, data, files): + def _send_request_with_files(self, method: str, endpoint: str, data: dict, files: dict): + """Send an HTTP request with file uploads. + + Args: + method: HTTP method (POST, PUT, etc.) + endpoint: API endpoint path + data: Form data + files: Files to upload + + Returns: + httpx.Response object + """ headers = {"Authorization": f"Bearer {self.api_key}"} - url = f"{self.base_url}{endpoint}" - response = requests.request(method, url, data=data, headers=headers, files=files) + response = self._client.request( + method, + endpoint, + data=data, + headers=headers, + files=files, + ) return response @@ -49,10 +132,26 @@ class DifyClient: params = {"user": user} return self._send_request("GET", "/meta", params=params) + def get_app_info(self): + """Get basic application information including name, description, tags, and mode.""" + return self._send_request("GET", "/info") + + def get_app_site_info(self): + """Get application site information.""" + return self._send_request("GET", "/site") + + def get_file_preview(self, file_id: str): + """Get file preview by file ID.""" + return self._send_request("GET", f"/files/{file_id}/preview") + class CompletionClient(DifyClient): def create_completion_message( - self, inputs: dict, response_mode: Literal["blocking", "streaming"], user: str, files: dict | None = None + self, + inputs: dict, + response_mode: Literal["blocking", "streaming"], + user: str, + files: dict | None = None, ): data = { "inputs": inputs, @@ -64,7 +163,7 @@ class CompletionClient(DifyClient): "POST", "/completion-messages", data, - stream=True if response_mode == "streaming" else False, + stream=(response_mode == "streaming"), ) @@ -92,7 +191,7 @@ class ChatClient(DifyClient): "POST", "/chat-messages", data, - stream=True if response_mode == "streaming" else False, + stream=(response_mode == "streaming"), ) def get_suggested(self, message_id: str, user: str): @@ -144,9 +243,86 @@ class ChatClient(DifyClient): files = {"file": audio_file} return self._send_request_with_files("POST", "/audio-to-text", data, files) + # Annotation APIs + def annotation_reply_action( + self, + action: Literal["enable", "disable"], + score_threshold: float, + embedding_provider_name: str, + embedding_model_name: str, + ): + """Enable or disable annotation reply feature.""" + data = { + "score_threshold": score_threshold, + "embedding_provider_name": embedding_provider_name, + "embedding_model_name": embedding_model_name, + } + return self._send_request("POST", f"/apps/annotation-reply/{action}", json=data) + + def get_annotation_reply_status(self, action: Literal["enable", "disable"], job_id: str): + """Get the status of an annotation reply action job.""" + return self._send_request("GET", f"/apps/annotation-reply/{action}/status/{job_id}") + + def list_annotations(self, page: int = 1, limit: int = 20, keyword: str | None = None): + """List annotations for the application.""" + params = {"page": page, "limit": limit, "keyword": keyword} + return self._send_request("GET", "/apps/annotations", params=params) + + def create_annotation(self, question: str, answer: str): + """Create a new annotation.""" + data = {"question": question, "answer": answer} + return self._send_request("POST", "/apps/annotations", json=data) + + def update_annotation(self, annotation_id: str, question: str, answer: str): + """Update an existing annotation.""" + data = {"question": question, "answer": answer} + return self._send_request("PUT", f"/apps/annotations/{annotation_id}", json=data) + + def delete_annotation(self, annotation_id: str): + """Delete an annotation.""" + return self._send_request("DELETE", f"/apps/annotations/{annotation_id}") + + # Conversation Variables APIs + def get_conversation_variables(self, conversation_id: str, user: str): + """Get all variables for a specific conversation. + + Args: + conversation_id: The conversation ID to query variables for + user: User identifier + + Returns: + Response from the API containing: + - variables: List of conversation variables with their values + - conversation_id: The conversation ID + """ + params = {"user": user} + url = f"/conversations/{conversation_id}/variables" + return self._send_request("GET", url, params=params) + + def update_conversation_variable(self, conversation_id: str, variable_id: str, value: Any, user: str): + """Update a specific conversation variable. + + Args: + conversation_id: The conversation ID + variable_id: The variable ID to update + value: New value for the variable + user: User identifier + + Returns: + Response from the API with updated variable information + """ + data = {"value": value, "user": user} + url = f"/conversations/{conversation_id}/variables/{variable_id}" + return self._send_request("PATCH", url, json=data) + class WorkflowClient(DifyClient): - def run(self, inputs: dict, response_mode: Literal["blocking", "streaming"] = "streaming", user: str = "abc-123"): + def run( + self, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): data = {"inputs": inputs, "response_mode": response_mode, "user": user} return self._send_request("POST", "/workflows/run", data) @@ -157,6 +333,58 @@ class WorkflowClient(DifyClient): def get_result(self, workflow_run_id): return self._send_request("GET", f"/workflows/run/{workflow_run_id}") + def get_workflow_logs( + self, + keyword: str = None, + status: Literal["succeeded", "failed", "stopped"] | None = None, + page: int = 1, + limit: int = 20, + created_at__before: str = None, + created_at__after: str = None, + created_by_end_user_session_id: str = None, + created_by_account: str = None, + ): + """Get workflow execution logs with optional filtering.""" + params = {"page": page, "limit": limit} + if keyword: + params["keyword"] = keyword + if status: + params["status"] = status + if created_at__before: + params["created_at__before"] = created_at__before + if created_at__after: + params["created_at__after"] = created_at__after + if created_by_end_user_session_id: + params["created_by_end_user_session_id"] = created_by_end_user_session_id + if created_by_account: + params["created_by_account"] = created_by_account + return self._send_request("GET", "/workflows/logs", params=params) + + def run_specific_workflow( + self, + workflow_id: str, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): + """Run a specific workflow by workflow ID.""" + data = {"inputs": inputs, "response_mode": response_mode, "user": user} + return self._send_request( + "POST", + f"/workflows/{workflow_id}/run", + data, + stream=(response_mode == "streaming"), + ) + + +class WorkspaceClient(DifyClient): + """Client for workspace-related operations.""" + + def get_available_models(self, model_type: str): + """Get available models by model type.""" + url = f"/workspaces/current/models/model-types/{model_type}" + return self._send_request("GET", url) + class KnowledgeBaseClient(DifyClient): def __init__( @@ -186,7 +414,7 @@ class KnowledgeBaseClient(DifyClient): return self._send_request("POST", "/datasets", {"name": name}, **kwargs) def list_datasets(self, page: int = 1, page_size: int = 20, **kwargs): - return self._send_request("GET", f"/datasets?page={page}&limit={page_size}", **kwargs) + return self._send_request("GET", "/datasets", params={"page": page, "limit": page_size}, **kwargs) def create_document_by_text(self, name, text, extra_params: dict | None = None, **kwargs): """ @@ -226,7 +454,12 @@ class KnowledgeBaseClient(DifyClient): return self._send_request("POST", url, json=data, **kwargs) def update_document_by_text( - self, document_id: str, name: str, text: str, extra_params: dict | None = None, **kwargs + self, + document_id: str, + name: str, + text: str, + extra_params: dict | None = None, + **kwargs, ): """ Update a document by text. @@ -261,7 +494,10 @@ class KnowledgeBaseClient(DifyClient): return self._send_request("POST", url, json=data, **kwargs) def create_document_by_file( - self, file_path: str, original_document_id: str | None = None, extra_params: dict | None = None + self, + file_path: str, + original_document_id: str | None = None, + extra_params: dict | None = None, ): """ Create a document by file. @@ -288,17 +524,18 @@ class KnowledgeBaseClient(DifyClient): } :return: Response from the API """ - files = {"file": open(file_path, "rb")} - data = { - "process_rule": {"mode": "automatic"}, - "indexing_technique": "high_quality", - } - if extra_params is not None and isinstance(extra_params, dict): - data.update(extra_params) - if original_document_id is not None: - data["original_document_id"] = original_document_id - url = f"/datasets/{self._get_dataset_id()}/document/create_by_file" - return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) + with open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + data = { + "process_rule": {"mode": "automatic"}, + "indexing_technique": "high_quality", + } + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + if original_document_id is not None: + data["original_document_id"] = original_document_id + url = f"/datasets/{self._get_dataset_id()}/document/create_by_file" + return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) def update_document_by_file(self, document_id: str, file_path: str, extra_params: dict | None = None): """ @@ -326,12 +563,13 @@ class KnowledgeBaseClient(DifyClient): } :return: """ - files = {"file": open(file_path, "rb")} - data = {} - if extra_params is not None and isinstance(extra_params, dict): - data.update(extra_params) - url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_file" - return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) + with open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + data = {} + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_file" + return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) def batch_indexing_status(self, batch_id: str, **kwargs): """ @@ -409,6 +647,8 @@ class KnowledgeBaseClient(DifyClient): :param document_id: ID of the document :param keyword: query keyword, optional :param status: status of the segment, optional, e.g. completed + :param kwargs: Additional parameters to pass to the API. + Can include a 'params' dict for extra query parameters. """ url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments" params = {} @@ -417,7 +657,7 @@ class KnowledgeBaseClient(DifyClient): if status is not None: params["status"] = status if "params" in kwargs: - params.update(kwargs["params"]) + params.update(kwargs.pop("params")) return self._send_request("GET", url, params=params, **kwargs) def delete_document_segment(self, document_id: str, segment_id: str): @@ -443,3 +683,213 @@ class KnowledgeBaseClient(DifyClient): data = {"segment": segment_data} url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments/{segment_id}" return self._send_request("POST", url, json=data, **kwargs) + + # Advanced Knowledge Base APIs + def hit_testing( + self, + query: str, + retrieval_model: Dict[str, Any] = None, + external_retrieval_model: Dict[str, Any] = None, + ): + """Perform hit testing on the dataset.""" + data = {"query": query} + if retrieval_model: + data["retrieval_model"] = retrieval_model + if external_retrieval_model: + data["external_retrieval_model"] = external_retrieval_model + url = f"/datasets/{self._get_dataset_id()}/hit-testing" + return self._send_request("POST", url, json=data) + + def get_dataset_metadata(self): + """Get dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return self._send_request("GET", url) + + def create_dataset_metadata(self, metadata_data: Dict[str, Any]): + """Create dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return self._send_request("POST", url, json=metadata_data) + + def update_dataset_metadata(self, metadata_id: str, metadata_data: Dict[str, Any]): + """Update dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/{metadata_id}" + return self._send_request("PATCH", url, json=metadata_data) + + def get_built_in_metadata(self): + """Get built-in metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in" + return self._send_request("GET", url) + + def manage_built_in_metadata(self, action: str, metadata_data: Dict[str, Any] = None): + """Manage built-in metadata with specified action.""" + data = metadata_data or {} + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in/{action}" + return self._send_request("POST", url, json=data) + + def update_documents_metadata(self, operation_data: List[Dict[str, Any]]): + """Update metadata for multiple documents.""" + url = f"/datasets/{self._get_dataset_id()}/documents/metadata" + data = {"operation_data": operation_data} + return self._send_request("POST", url, json=data) + + # Dataset Tags APIs + def list_dataset_tags(self): + """List all dataset tags.""" + return self._send_request("GET", "/datasets/tags") + + def bind_dataset_tags(self, tag_ids: List[str]): + """Bind tags to dataset.""" + data = {"tag_ids": tag_ids, "target_id": self._get_dataset_id()} + return self._send_request("POST", "/datasets/tags/binding", json=data) + + def unbind_dataset_tag(self, tag_id: str): + """Unbind a single tag from dataset.""" + data = {"tag_id": tag_id, "target_id": self._get_dataset_id()} + return self._send_request("POST", "/datasets/tags/unbinding", json=data) + + def get_dataset_tags(self): + """Get tags for current dataset.""" + url = f"/datasets/{self._get_dataset_id()}/tags" + return self._send_request("GET", url) + + # RAG Pipeline APIs + def get_datasource_plugins(self, is_published: bool = True): + """Get datasource plugins for RAG pipeline.""" + params = {"is_published": is_published} + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource-plugins" + return self._send_request("GET", url, params=params) + + def run_datasource_node( + self, + node_id: str, + inputs: Dict[str, Any], + datasource_type: str, + is_published: bool = True, + credential_id: str = None, + ): + """Run a datasource node in RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "is_published": is_published, + } + if credential_id: + data["credential_id"] = credential_id + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource/nodes/{node_id}/run" + return self._send_request("POST", url, json=data, stream=True) + + def run_rag_pipeline( + self, + inputs: Dict[str, Any], + datasource_type: str, + datasource_info_list: List[Dict[str, Any]], + start_node_id: str, + is_published: bool = True, + response_mode: Literal["streaming", "blocking"] = "blocking", + ): + """Run RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "datasource_info_list": datasource_info_list, + "start_node_id": start_node_id, + "is_published": is_published, + "response_mode": response_mode, + } + url = f"/datasets/{self._get_dataset_id()}/pipeline/run" + return self._send_request("POST", url, json=data, stream=response_mode == "streaming") + + def upload_pipeline_file(self, file_path: str): + """Upload file for RAG pipeline.""" + with open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + return self._send_request_with_files("POST", "/datasets/pipeline/file-upload", {}, files) + + # Dataset Management APIs + def get_dataset(self, dataset_id: str | None = None): + """Get detailed information about a specific dataset. + + Args: + dataset_id: Dataset ID (optional, uses current dataset_id if not provided) + + Returns: + Response from the API containing dataset details including: + - name, description, permission + - indexing_technique, embedding_model, embedding_model_provider + - retrieval_model configuration + - document_count, word_count, app_count + - created_at, updated_at + """ + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}" + return self._send_request("GET", url) + + def update_dataset( + self, + dataset_id: str | None = None, + name: str | None = None, + description: str | None = None, + indexing_technique: str | None = None, + embedding_model: str | None = None, + embedding_model_provider: str | None = None, + retrieval_model: Dict[str, Any] | None = None, + **kwargs, + ): + """Update dataset configuration. + + Args: + dataset_id: Dataset ID (optional, uses current dataset_id if not provided) + name: New dataset name + description: New dataset description + indexing_technique: Indexing technique ('high_quality' or 'economy') + embedding_model: Embedding model name + embedding_model_provider: Embedding model provider + retrieval_model: Retrieval model configuration dict + **kwargs: Additional parameters to pass to the API + + Returns: + Response from the API with updated dataset information + """ + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}" + + # Build data dictionary with all possible parameters + payload = { + "name": name, + "description": description, + "indexing_technique": indexing_technique, + "embedding_model": embedding_model, + "embedding_model_provider": embedding_model_provider, + "retrieval_model": retrieval_model, + } + + # Filter out None values and merge with additional kwargs + data = {k: v for k, v in payload.items() if v is not None} + data.update(kwargs) + + return self._send_request("PATCH", url, json=data) + + def batch_update_document_status( + self, + action: Literal["enable", "disable", "archive", "un_archive"], + document_ids: List[str], + dataset_id: str | None = None, + ): + """Batch update document status (enable/disable/archive/unarchive). + + Args: + action: Action to perform on documents + - 'enable': Enable documents for retrieval + - 'disable': Disable documents from retrieval + - 'archive': Archive documents + - 'un_archive': Unarchive documents + document_ids: List of document IDs to update + dataset_id: Dataset ID (optional, uses current dataset_id if not provided) + + Returns: + Response from the API with operation result + """ + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}/documents/status/{action}" + data = {"document_ids": document_ids} + return self._send_request("PATCH", url, json=data) diff --git a/sdks/python-client/pyproject.toml b/sdks/python-client/pyproject.toml new file mode 100644 index 0000000000..db02cbd6e3 --- /dev/null +++ b/sdks/python-client/pyproject.toml @@ -0,0 +1,43 @@ +[project] +name = "dify-client" +version = "0.1.12" +description = "A package for interacting with the Dify Service-API" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "httpx>=0.27.0", + "aiofiles>=23.0.0", +] +authors = [ + {name = "Dify", email = "hello@dify.ai"} +] +license = {text = "MIT"} +keywords = ["dify", "nlp", "ai", "language-processing"] +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +[project.urls] +Homepage = "https://github.com/langgenius/dify" + +[project.optional-dependencies] +dev = [ + "pytest>=7.0.0", + "pytest-asyncio>=0.21.0", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["dify_client"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = ["test_*.py"] +python_classes = ["Test*"] +python_functions = ["test_*"] +asyncio_mode = "auto" diff --git a/sdks/python-client/setup.py b/sdks/python-client/setup.py deleted file mode 100644 index a05f6410fb..0000000000 --- a/sdks/python-client/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -from setuptools import setup - -with open("README.md", encoding="utf-8") as fh: - long_description = fh.read() - -setup( - name="dify-client", - version="0.1.12", - author="Dify", - author_email="hello@dify.ai", - description="A package for interacting with the Dify Service-API", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/langgenius/dify", - license="MIT", - packages=["dify_client"], - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - ], - python_requires=">=3.6", - install_requires=["requests"], - keywords="dify nlp ai language-processing", - include_package_data=True, -) diff --git a/sdks/python-client/tests/test_async_client.py b/sdks/python-client/tests/test_async_client.py new file mode 100644 index 0000000000..4f5001866f --- /dev/null +++ b/sdks/python-client/tests/test_async_client.py @@ -0,0 +1,250 @@ +#!/usr/bin/env python3 +""" +Test suite for async client implementation in the Python SDK. + +This test validates the async/await functionality using httpx.AsyncClient +and ensures API parity with sync clients. +""" + +import unittest +from unittest.mock import Mock, patch, AsyncMock + +from dify_client.async_client import ( + AsyncDifyClient, + AsyncChatClient, + AsyncCompletionClient, + AsyncWorkflowClient, + AsyncWorkspaceClient, + AsyncKnowledgeBaseClient, +) + + +class TestAsyncAPIParity(unittest.TestCase): + """Test that async clients have API parity with sync clients.""" + + def test_dify_client_api_parity(self): + """Test AsyncDifyClient has same methods as DifyClient.""" + from dify_client import DifyClient + + sync_methods = {name for name in dir(DifyClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncDifyClient) if not name.startswith("_")} + + # aclose is async-specific, close is sync-specific + sync_methods.discard("close") + async_methods.discard("aclose") + + # Verify parity + self.assertEqual(sync_methods, async_methods, "API parity mismatch for DifyClient") + + def test_chat_client_api_parity(self): + """Test AsyncChatClient has same methods as ChatClient.""" + from dify_client import ChatClient + + sync_methods = {name for name in dir(ChatClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncChatClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for ChatClient") + + def test_completion_client_api_parity(self): + """Test AsyncCompletionClient has same methods as CompletionClient.""" + from dify_client import CompletionClient + + sync_methods = {name for name in dir(CompletionClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncCompletionClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for CompletionClient") + + def test_workflow_client_api_parity(self): + """Test AsyncWorkflowClient has same methods as WorkflowClient.""" + from dify_client import WorkflowClient + + sync_methods = {name for name in dir(WorkflowClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncWorkflowClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for WorkflowClient") + + def test_workspace_client_api_parity(self): + """Test AsyncWorkspaceClient has same methods as WorkspaceClient.""" + from dify_client import WorkspaceClient + + sync_methods = {name for name in dir(WorkspaceClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncWorkspaceClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for WorkspaceClient") + + def test_knowledge_base_client_api_parity(self): + """Test AsyncKnowledgeBaseClient has same methods as KnowledgeBaseClient.""" + from dify_client import KnowledgeBaseClient + + sync_methods = {name for name in dir(KnowledgeBaseClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncKnowledgeBaseClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for KnowledgeBaseClient") + + +class TestAsyncClientMocked(unittest.IsolatedAsyncioTestCase): + """Test async client with mocked httpx.AsyncClient.""" + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_client_initialization(self, mock_httpx_async_client): + """Test async client initializes with httpx.AsyncClient.""" + mock_client_instance = AsyncMock() + mock_httpx_async_client.return_value = mock_client_instance + + client = AsyncDifyClient("test-key", "https://api.dify.ai/v1") + + # Verify httpx.AsyncClient was called + mock_httpx_async_client.assert_called_once() + self.assertEqual(client.api_key, "test-key") + + await client.aclose() + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_context_manager(self, mock_httpx_async_client): + """Test async context manager works.""" + mock_client_instance = AsyncMock() + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncDifyClient("test-key") as client: + self.assertEqual(client.api_key, "test-key") + + # Verify aclose was called + mock_client_instance.aclose.assert_called_once() + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_send_request(self, mock_httpx_async_client): + """Test async _send_request method.""" + mock_response = AsyncMock() + mock_response.json = AsyncMock(return_value={"result": "success"}) + mock_response.status_code = 200 + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncDifyClient("test-key") as client: + response = await client._send_request("GET", "/test") + + # Verify request was called + mock_client_instance.request.assert_called_once() + call_args = mock_client_instance.request.call_args + + # Verify parameters + self.assertEqual(call_args[0][0], "GET") + self.assertEqual(call_args[0][1], "/test") + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_chat_client(self, mock_httpx_async_client): + """Test AsyncChatClient functionality.""" + mock_response = AsyncMock() + mock_response.text = '{"answer": "Hello!"}' + mock_response.json = AsyncMock(return_value={"answer": "Hello!"}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncChatClient("test-key") as client: + response = await client.create_chat_message({}, "Hi", "user123") + self.assertIn("answer", response.text) + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_completion_client(self, mock_httpx_async_client): + """Test AsyncCompletionClient functionality.""" + mock_response = AsyncMock() + mock_response.text = '{"answer": "Response"}' + mock_response.json = AsyncMock(return_value={"answer": "Response"}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncCompletionClient("test-key") as client: + response = await client.create_completion_message({"query": "test"}, "blocking", "user123") + self.assertIn("answer", response.text) + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_workflow_client(self, mock_httpx_async_client): + """Test AsyncWorkflowClient functionality.""" + mock_response = AsyncMock() + mock_response.json = AsyncMock(return_value={"result": "success"}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncWorkflowClient("test-key") as client: + response = await client.run({"input": "test"}, "blocking", "user123") + data = await response.json() + self.assertEqual(data["result"], "success") + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_workspace_client(self, mock_httpx_async_client): + """Test AsyncWorkspaceClient functionality.""" + mock_response = AsyncMock() + mock_response.json = AsyncMock(return_value={"data": []}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncWorkspaceClient("test-key") as client: + response = await client.get_available_models("llm") + data = await response.json() + self.assertIn("data", data) + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_knowledge_base_client(self, mock_httpx_async_client): + """Test AsyncKnowledgeBaseClient functionality.""" + mock_response = AsyncMock() + mock_response.json = AsyncMock(return_value={"data": [], "total": 0}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncKnowledgeBaseClient("test-key") as client: + response = await client.list_datasets() + data = await response.json() + self.assertIn("data", data) + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_all_async_client_classes(self, mock_httpx_async_client): + """Test all async client classes work with httpx.AsyncClient.""" + mock_client_instance = AsyncMock() + mock_httpx_async_client.return_value = mock_client_instance + + clients = [ + AsyncDifyClient("key"), + AsyncChatClient("key"), + AsyncCompletionClient("key"), + AsyncWorkflowClient("key"), + AsyncWorkspaceClient("key"), + AsyncKnowledgeBaseClient("key"), + ] + + # Verify httpx.AsyncClient was called for each + self.assertEqual(mock_httpx_async_client.call_count, 6) + + # Clean up + for client in clients: + await client.aclose() + + +if __name__ == "__main__": + unittest.main() diff --git a/sdks/python-client/tests/test_httpx_migration.py b/sdks/python-client/tests/test_httpx_migration.py new file mode 100644 index 0000000000..b8e434d7ec --- /dev/null +++ b/sdks/python-client/tests/test_httpx_migration.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python3 +""" +Test suite for httpx migration in the Python SDK. + +This test validates that the migration from requests to httpx maintains +backward compatibility and proper resource management. +""" + +import unittest +from unittest.mock import Mock, patch + +from dify_client import ( + DifyClient, + ChatClient, + CompletionClient, + WorkflowClient, + WorkspaceClient, + KnowledgeBaseClient, +) + + +class TestHttpxMigrationMocked(unittest.TestCase): + """Test cases for httpx migration with mocked requests.""" + + def setUp(self): + """Set up test fixtures.""" + self.api_key = "test-api-key" + self.base_url = "https://api.dify.ai/v1" + + @patch("dify_client.client.httpx.Client") + def test_client_initialization(self, mock_httpx_client): + """Test that client initializes with httpx.Client.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + + # Verify httpx.Client was called with correct parameters + mock_httpx_client.assert_called_once() + call_kwargs = mock_httpx_client.call_args[1] + self.assertEqual(call_kwargs["base_url"], self.base_url) + + # Verify client properties + self.assertEqual(client.api_key, self.api_key) + self.assertEqual(client.base_url, self.base_url) + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_context_manager_support(self, mock_httpx_client): + """Test that client works as context manager.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + with DifyClient(self.api_key, self.base_url) as client: + self.assertEqual(client.api_key, self.api_key) + + # Verify close was called + mock_client_instance.close.assert_called_once() + + @patch("dify_client.client.httpx.Client") + def test_manual_close(self, mock_httpx_client): + """Test manual close() method.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + client.close() + + # Verify close was called + mock_client_instance.close.assert_called_once() + + @patch("dify_client.client.httpx.Client") + def test_send_request_httpx_compatibility(self, mock_httpx_client): + """Test _send_request uses httpx.Client.request properly.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + response = client._send_request("GET", "/test-endpoint") + + # Verify httpx.Client.request was called correctly + mock_client_instance.request.assert_called_once() + call_args = mock_client_instance.request.call_args + + # Verify method and endpoint + self.assertEqual(call_args[0][0], "GET") + self.assertEqual(call_args[0][1], "/test-endpoint") + + # Verify headers contain authorization + headers = call_args[1]["headers"] + self.assertEqual(headers["Authorization"], f"Bearer {self.api_key}") + self.assertEqual(headers["Content-Type"], "application/json") + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_response_compatibility(self, mock_httpx_client): + """Test httpx.Response is compatible with requests.Response API.""" + mock_response = Mock() + mock_response.json.return_value = {"key": "value"} + mock_response.text = '{"key": "value"}' + mock_response.content = b'{"key": "value"}' + mock_response.status_code = 200 + mock_response.headers = {"Content-Type": "application/json"} + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + response = client._send_request("GET", "/test") + + # Verify all common response methods work + self.assertEqual(response.json(), {"key": "value"}) + self.assertEqual(response.text, '{"key": "value"}') + self.assertEqual(response.content, b'{"key": "value"}') + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers["Content-Type"], "application/json") + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_all_client_classes_use_httpx(self, mock_httpx_client): + """Test that all client classes properly use httpx.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + clients = [ + DifyClient(self.api_key, self.base_url), + ChatClient(self.api_key, self.base_url), + CompletionClient(self.api_key, self.base_url), + WorkflowClient(self.api_key, self.base_url), + WorkspaceClient(self.api_key, self.base_url), + KnowledgeBaseClient(self.api_key, self.base_url), + ] + + # Verify httpx.Client was called for each client + self.assertEqual(mock_httpx_client.call_count, 6) + + # Clean up + for client in clients: + client.close() + + @patch("dify_client.client.httpx.Client") + def test_json_parameter_handling(self, mock_httpx_client): + """Test that json parameter is passed correctly.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + test_data = {"key": "value", "number": 123} + + client._send_request("POST", "/test", json=test_data) + + # Verify json parameter was passed + call_args = mock_client_instance.request.call_args + self.assertEqual(call_args[1]["json"], test_data) + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_params_parameter_handling(self, mock_httpx_client): + """Test that params parameter is passed correctly.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + test_params = {"page": 1, "limit": 20} + + client._send_request("GET", "/test", params=test_params) + + # Verify params parameter was passed + call_args = mock_client_instance.request.call_args + self.assertEqual(call_args[1]["params"], test_params) + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_inheritance_chain(self, mock_httpx_client): + """Test that inheritance chain is maintained.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + # ChatClient inherits from DifyClient + chat_client = ChatClient(self.api_key, self.base_url) + self.assertIsInstance(chat_client, DifyClient) + + # CompletionClient inherits from DifyClient + completion_client = CompletionClient(self.api_key, self.base_url) + self.assertIsInstance(completion_client, DifyClient) + + # WorkflowClient inherits from DifyClient + workflow_client = WorkflowClient(self.api_key, self.base_url) + self.assertIsInstance(workflow_client, DifyClient) + + # Clean up + chat_client.close() + completion_client.close() + workflow_client.close() + + @patch("dify_client.client.httpx.Client") + def test_nested_context_managers(self, mock_httpx_client): + """Test nested context managers work correctly.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + with DifyClient(self.api_key, self.base_url) as client1: + with ChatClient(self.api_key, self.base_url) as client2: + self.assertEqual(client1.api_key, self.api_key) + self.assertEqual(client2.api_key, self.api_key) + + # Both close methods should have been called + self.assertEqual(mock_client_instance.close.call_count, 2) + + +class TestChatClientHttpx(unittest.TestCase): + """Test ChatClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_create_chat_message_httpx(self, mock_httpx_client): + """Test create_chat_message works with httpx.""" + mock_response = Mock() + mock_response.text = '{"answer": "Hello!"}' + mock_response.json.return_value = {"answer": "Hello!"} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with ChatClient("test-key") as client: + response = client.create_chat_message({}, "Hi", "user123") + self.assertIn("answer", response.text) + self.assertEqual(response.json()["answer"], "Hello!") + + +class TestCompletionClientHttpx(unittest.TestCase): + """Test CompletionClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_create_completion_message_httpx(self, mock_httpx_client): + """Test create_completion_message works with httpx.""" + mock_response = Mock() + mock_response.text = '{"answer": "Response"}' + mock_response.json.return_value = {"answer": "Response"} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with CompletionClient("test-key") as client: + response = client.create_completion_message({"query": "test"}, "blocking", "user123") + self.assertIn("answer", response.text) + + +class TestKnowledgeBaseClientHttpx(unittest.TestCase): + """Test KnowledgeBaseClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_list_datasets_httpx(self, mock_httpx_client): + """Test list_datasets works with httpx.""" + mock_response = Mock() + mock_response.json.return_value = {"data": [], "total": 0} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with KnowledgeBaseClient("test-key") as client: + response = client.list_datasets() + data = response.json() + self.assertIn("data", data) + self.assertIn("total", data) + + +class TestWorkflowClientHttpx(unittest.TestCase): + """Test WorkflowClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_run_workflow_httpx(self, mock_httpx_client): + """Test run workflow works with httpx.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with WorkflowClient("test-key") as client: + response = client.run({"input": "test"}, "blocking", "user123") + self.assertEqual(response.json()["result"], "success") + + +class TestWorkspaceClientHttpx(unittest.TestCase): + """Test WorkspaceClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_get_available_models_httpx(self, mock_httpx_client): + """Test get_available_models works with httpx.""" + mock_response = Mock() + mock_response.json.return_value = {"data": []} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with WorkspaceClient("test-key") as client: + response = client.get_available_models("llm") + self.assertIn("data", response.json()) + + +if __name__ == "__main__": + unittest.main() diff --git a/sdks/python-client/uv.lock b/sdks/python-client/uv.lock new file mode 100644 index 0000000000..19f348289b --- /dev/null +++ b/sdks/python-client/uv.lock @@ -0,0 +1,271 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" + +[[package]] +name = "aiofiles" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "dify-client" +version = "0.1.12" +source = { editable = "." } +dependencies = [ + { name = "aiofiles" }, + { name = "httpx" }, +] + +[package.optional-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiofiles", specifier = ">=23.0.0" }, + { name = "httpx", specifier = ">=0.27.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" }, +] +provides-extras = ["dev"] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] diff --git a/web/.env.example b/web/.env.example index 23b72b3414..5bfcc9dac0 100644 --- a/web/.env.example +++ b/web/.env.example @@ -34,6 +34,9 @@ NEXT_PUBLIC_CSP_WHITELIST= # Default is not allow to embed into iframe to prevent Clickjacking: https://owasp.org/www-community/attacks/Clickjacking NEXT_PUBLIC_ALLOW_EMBED= +# Shared cookie domain when console UI and API use different subdomains (e.g. example.com) +NEXT_PUBLIC_COOKIE_DOMAIN= + # Allow rendering unsafe URLs which have "data:" scheme. NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME=false @@ -61,5 +64,9 @@ NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER=true NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL=true NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL=true +# Enable inline LaTeX rendering with single dollar signs ($...$) +# Default is false for security reasons to prevent conflicts with regular text +NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false + # The maximum number of tree node depth for workflow NEXT_PUBLIC_MAX_TREE_DEPTH=50 diff --git a/web/.storybook/__mocks__/context-block.tsx b/web/.storybook/__mocks__/context-block.tsx new file mode 100644 index 0000000000..8a9d8625cc --- /dev/null +++ b/web/.storybook/__mocks__/context-block.tsx @@ -0,0 +1,4 @@ +// Mock for context-block plugin to avoid circular dependency in Storybook +export const ContextBlockNode = null +export const ContextBlockReplacementBlock = null +export default null diff --git a/web/.storybook/__mocks__/history-block.tsx b/web/.storybook/__mocks__/history-block.tsx new file mode 100644 index 0000000000..e3c3965d13 --- /dev/null +++ b/web/.storybook/__mocks__/history-block.tsx @@ -0,0 +1,4 @@ +// Mock for history-block plugin to avoid circular dependency in Storybook +export const HistoryBlockNode = null +export const HistoryBlockReplacementBlock = null +export default null diff --git a/web/.storybook/__mocks__/query-block.tsx b/web/.storybook/__mocks__/query-block.tsx new file mode 100644 index 0000000000..d82f51363a --- /dev/null +++ b/web/.storybook/__mocks__/query-block.tsx @@ -0,0 +1,4 @@ +// Mock for query-block plugin to avoid circular dependency in Storybook +export const QueryBlockNode = null +export const QueryBlockReplacementBlock = null +export default null diff --git a/web/.storybook/main.ts b/web/.storybook/main.ts index fecf774e98..ca56261431 100644 --- a/web/.storybook/main.ts +++ b/web/.storybook/main.ts @@ -1,19 +1,45 @@ import type { StorybookConfig } from '@storybook/nextjs' +import path from 'node:path' +import { fileURLToPath } from 'node:url' + +const storybookDir = path.dirname(fileURLToPath(import.meta.url)) const config: StorybookConfig = { - // stories: ['../stories/**/*.mdx', '../stories/**/*.stories.@(js|jsx|mjs|ts|tsx)'], stories: ['../app/components/**/*.stories.@(js|jsx|mjs|ts|tsx)'], addons: [ '@storybook/addon-onboarding', '@storybook/addon-links', - '@storybook/addon-essentials', + '@storybook/addon-docs', '@chromatic-com/storybook', - '@storybook/addon-interactions', ], framework: { name: '@storybook/nextjs', - options: {}, + options: { + builder: { + useSWC: true, + lazyCompilation: false, + }, + nextConfigPath: undefined, + }, }, staticDirs: ['../public'], + core: { + disableWhatsNewNotifications: true, + }, + docs: { + defaultName: 'Documentation', + }, + webpackFinal: async (config) => { + // Add alias to mock problematic modules with circular dependencies + config.resolve = config.resolve || {} + config.resolve.alias = { + ...config.resolve.alias, + // Mock the plugin index files to avoid circular dependencies + [path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/context-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/context-block.tsx'), + [path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/history-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/history-block.tsx'), + [path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/query-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/query-block.tsx'), + } + return config + }, } export default config diff --git a/web/.storybook/preview.tsx b/web/.storybook/preview.tsx index 55328602f9..1f5726de34 100644 --- a/web/.storybook/preview.tsx +++ b/web/.storybook/preview.tsx @@ -1,12 +1,21 @@ -import React from 'react' import type { Preview } from '@storybook/react' import { withThemeByDataAttribute } from '@storybook/addon-themes' -import I18nServer from '../app/components/i18n-server' +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import I18N from '../app/components/i18n' +import { ToastProvider } from '../app/components/base/toast' import '../app/styles/globals.css' import '../app/styles/markdown.scss' import './storybook.css' +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + refetchOnWindowFocus: false, + }, + }, +}) + export const decorators = [ withThemeByDataAttribute({ themes: { @@ -17,9 +26,15 @@ export const decorators = [ attributeName: 'data-theme', }), (Story) => { - return - - + return ( + + + + + + + + ) }, ] @@ -31,7 +46,11 @@ const preview: Preview = { date: /Date$/i, }, }, + docs: { + toc: true, + }, }, + tags: ['autodocs'], } export default preview diff --git a/web/.storybook/utils/audio-player-manager.mock.ts b/web/.storybook/utils/audio-player-manager.mock.ts new file mode 100644 index 0000000000..aca8b56b76 --- /dev/null +++ b/web/.storybook/utils/audio-player-manager.mock.ts @@ -0,0 +1,64 @@ +import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager' + +type PlayerCallback = ((event: string) => void) | null + +class MockAudioPlayer { + private callback: PlayerCallback = null + private finishTimer?: ReturnType + + public setCallback(callback: PlayerCallback) { + this.callback = callback + } + + public playAudio() { + this.clearTimer() + this.callback?.('play') + this.finishTimer = setTimeout(() => { + this.callback?.('ended') + }, 2000) + } + + public pauseAudio() { + this.clearTimer() + this.callback?.('paused') + } + + private clearTimer() { + if (this.finishTimer) + clearTimeout(this.finishTimer) + } +} + +class MockAudioPlayerManager { + private readonly player = new MockAudioPlayer() + + public getAudioPlayer( + _url: string, + _isPublic: boolean, + _id: string | undefined, + _msgContent: string | null | undefined, + _voice: string | undefined, + callback: PlayerCallback, + ) { + this.player.setCallback(callback) + return this.player + } + + public resetMsgId() { + // No-op for the mock + } +} + +export const ensureMockAudioManager = () => { + const managerAny = AudioPlayerManager as unknown as { + getInstance: () => AudioPlayerManager + __isStorybookMockInstalled?: boolean + } + + if (managerAny.__isStorybookMockInstalled) + return + + const mock = new MockAudioPlayerManager() + managerAny.getInstance = () => mock as unknown as AudioPlayerManager + managerAny.__isStorybookMockInstalled = true +} diff --git a/web/__tests__/embedded-user-id-auth.test.tsx b/web/__tests__/embedded-user-id-auth.test.tsx new file mode 100644 index 0000000000..5c3c3c943f --- /dev/null +++ b/web/__tests__/embedded-user-id-auth.test.tsx @@ -0,0 +1,132 @@ +import React from 'react' +import { fireEvent, render, screen, waitFor } from '@testing-library/react' + +import MailAndPasswordAuth from '@/app/(shareLayout)/webapp-signin/components/mail-and-password-auth' +import CheckCode from '@/app/(shareLayout)/webapp-signin/check-code/page' + +jest.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string) => key, + }), +})) + +const replaceMock = jest.fn() +const backMock = jest.fn() + +jest.mock('next/navigation', () => ({ + usePathname: jest.fn(() => '/chatbot/test-app'), + useRouter: jest.fn(() => ({ + replace: replaceMock, + back: backMock, + })), + useSearchParams: jest.fn(), +})) + +const mockStoreState = { + embeddedUserId: 'embedded-user-99', + shareCode: 'test-app', +} + +const useWebAppStoreMock = jest.fn((selector?: (state: typeof mockStoreState) => any) => { + return selector ? selector(mockStoreState) : mockStoreState +}) + +jest.mock('@/context/web-app-context', () => ({ + useWebAppStore: (selector?: (state: typeof mockStoreState) => any) => useWebAppStoreMock(selector), +})) + +const webAppLoginMock = jest.fn() +const webAppEmailLoginWithCodeMock = jest.fn() +const sendWebAppEMailLoginCodeMock = jest.fn() + +jest.mock('@/service/common', () => ({ + webAppLogin: (...args: any[]) => webAppLoginMock(...args), + webAppEmailLoginWithCode: (...args: any[]) => webAppEmailLoginWithCodeMock(...args), + sendWebAppEMailLoginCode: (...args: any[]) => sendWebAppEMailLoginCodeMock(...args), +})) + +const fetchAccessTokenMock = jest.fn() + +jest.mock('@/service/share', () => ({ + fetchAccessToken: (...args: any[]) => fetchAccessTokenMock(...args), +})) + +const setWebAppAccessTokenMock = jest.fn() +const setWebAppPassportMock = jest.fn() + +jest.mock('@/service/webapp-auth', () => ({ + setWebAppAccessToken: (...args: any[]) => setWebAppAccessTokenMock(...args), + setWebAppPassport: (...args: any[]) => setWebAppPassportMock(...args), + webAppLogout: jest.fn(), +})) + +jest.mock('@/app/components/signin/countdown', () => () =>
) + +jest.mock('@remixicon/react', () => ({ + RiMailSendFill: () =>
, + RiArrowLeftLine: () =>
, +})) + +const { useSearchParams } = jest.requireMock('next/navigation') as { + useSearchParams: jest.Mock +} + +beforeEach(() => { + jest.clearAllMocks() +}) + +describe('embedded user id propagation in authentication flows', () => { + it('passes embedded user id when logging in with email and password', async () => { + const params = new URLSearchParams() + params.set('redirect_url', encodeURIComponent('/chatbot/test-app')) + useSearchParams.mockReturnValue(params) + + webAppLoginMock.mockResolvedValue({ result: 'success', data: { access_token: 'login-token' } }) + fetchAccessTokenMock.mockResolvedValue({ access_token: 'passport-token' }) + + render() + + fireEvent.change(screen.getByLabelText('login.email'), { target: { value: 'user@example.com' } }) + fireEvent.change(screen.getByLabelText(/login\.password/), { target: { value: 'strong-password' } }) + fireEvent.click(screen.getByRole('button', { name: 'login.signBtn' })) + + await waitFor(() => { + expect(fetchAccessTokenMock).toHaveBeenCalledWith({ + appCode: 'test-app', + userId: 'embedded-user-99', + }) + }) + expect(setWebAppAccessTokenMock).toHaveBeenCalledWith('login-token') + expect(setWebAppPassportMock).toHaveBeenCalledWith('test-app', 'passport-token') + expect(replaceMock).toHaveBeenCalledWith('/chatbot/test-app') + }) + + it('passes embedded user id when verifying email code', async () => { + const params = new URLSearchParams() + params.set('redirect_url', encodeURIComponent('/chatbot/test-app')) + params.set('email', encodeURIComponent('user@example.com')) + params.set('token', encodeURIComponent('token-abc')) + useSearchParams.mockReturnValue(params) + + webAppEmailLoginWithCodeMock.mockResolvedValue({ result: 'success', data: { access_token: 'code-token' } }) + fetchAccessTokenMock.mockResolvedValue({ access_token: 'passport-token' }) + + render() + + fireEvent.change( + screen.getByPlaceholderText('login.checkCode.verificationCodePlaceholder'), + { target: { value: '123456' } }, + ) + fireEvent.click(screen.getByRole('button', { name: 'login.checkCode.verify' })) + + await waitFor(() => { + expect(fetchAccessTokenMock).toHaveBeenCalledWith({ + appCode: 'test-app', + userId: 'embedded-user-99', + }) + }) + expect(setWebAppAccessTokenMock).toHaveBeenCalledWith('code-token') + expect(setWebAppPassportMock).toHaveBeenCalledWith('test-app', 'passport-token') + expect(replaceMock).toHaveBeenCalledWith('/chatbot/test-app') + }) +}) diff --git a/web/__tests__/embedded-user-id-store.test.tsx b/web/__tests__/embedded-user-id-store.test.tsx new file mode 100644 index 0000000000..24a815222e --- /dev/null +++ b/web/__tests__/embedded-user-id-store.test.tsx @@ -0,0 +1,155 @@ +import React from 'react' +import { render, screen, waitFor } from '@testing-library/react' + +import WebAppStoreProvider, { useWebAppStore } from '@/context/web-app-context' + +jest.mock('next/navigation', () => ({ + usePathname: jest.fn(() => '/chatbot/sample-app'), + useSearchParams: jest.fn(() => { + const params = new URLSearchParams() + return params + }), +})) + +jest.mock('@/service/use-share', () => { + const { AccessMode } = jest.requireActual('@/models/access-control') + return { + useGetWebAppAccessModeByCode: jest.fn(() => ({ + isLoading: false, + data: { accessMode: AccessMode.PUBLIC }, + })), + } +}) + +jest.mock('@/app/components/base/chat/utils', () => ({ + getProcessedSystemVariablesFromUrlParams: jest.fn(), +})) + +const { getProcessedSystemVariablesFromUrlParams: mockGetProcessedSystemVariablesFromUrlParams } + = jest.requireMock('@/app/components/base/chat/utils') as { + getProcessedSystemVariablesFromUrlParams: jest.Mock + } + +jest.mock('@/context/global-public-context', () => { + const mockGlobalStoreState = { + isGlobalPending: false, + setIsGlobalPending: jest.fn(), + systemFeatures: {}, + setSystemFeatures: jest.fn(), + } + const useGlobalPublicStore = Object.assign( + (selector?: (state: typeof mockGlobalStoreState) => any) => + selector ? selector(mockGlobalStoreState) : mockGlobalStoreState, + { + setState: (updater: any) => { + if (typeof updater === 'function') + Object.assign(mockGlobalStoreState, updater(mockGlobalStoreState) ?? {}) + + else + Object.assign(mockGlobalStoreState, updater) + }, + __mockState: mockGlobalStoreState, + }, + ) + return { + useGlobalPublicStore, + } +}) + +const { + useGlobalPublicStore: useGlobalPublicStoreMock, +} = jest.requireMock('@/context/global-public-context') as { + useGlobalPublicStore: ((selector?: (state: any) => any) => any) & { + setState: (updater: any) => void + __mockState: { + isGlobalPending: boolean + setIsGlobalPending: jest.Mock + systemFeatures: Record + setSystemFeatures: jest.Mock + } + } +} +const mockGlobalStoreState = useGlobalPublicStoreMock.__mockState + +const TestConsumer = () => { + const embeddedUserId = useWebAppStore(state => state.embeddedUserId) + const embeddedConversationId = useWebAppStore(state => state.embeddedConversationId) + return ( + <> +
{embeddedUserId ?? 'null'}
+
{embeddedConversationId ?? 'null'}
+ + ) +} + +const initialWebAppStore = (() => { + const snapshot = useWebAppStore.getState() + return { + shareCode: null as string | null, + appInfo: null, + appParams: null, + webAppAccessMode: snapshot.webAppAccessMode, + appMeta: null, + userCanAccessApp: false, + embeddedUserId: null, + embeddedConversationId: null, + updateShareCode: snapshot.updateShareCode, + updateAppInfo: snapshot.updateAppInfo, + updateAppParams: snapshot.updateAppParams, + updateWebAppAccessMode: snapshot.updateWebAppAccessMode, + updateWebAppMeta: snapshot.updateWebAppMeta, + updateUserCanAccessApp: snapshot.updateUserCanAccessApp, + updateEmbeddedUserId: snapshot.updateEmbeddedUserId, + updateEmbeddedConversationId: snapshot.updateEmbeddedConversationId, + } +})() + +beforeEach(() => { + mockGlobalStoreState.isGlobalPending = false + mockGetProcessedSystemVariablesFromUrlParams.mockReset() + useWebAppStore.setState(initialWebAppStore, true) +}) + +describe('WebAppStoreProvider embedded user id handling', () => { + it('hydrates embedded user and conversation ids from system variables', async () => { + mockGetProcessedSystemVariablesFromUrlParams.mockResolvedValue({ + user_id: 'iframe-user-123', + conversation_id: 'conversation-456', + }) + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('embedded-user-id')).toHaveTextContent('iframe-user-123') + expect(screen.getByTestId('embedded-conversation-id')).toHaveTextContent('conversation-456') + }) + expect(useWebAppStore.getState().embeddedUserId).toBe('iframe-user-123') + expect(useWebAppStore.getState().embeddedConversationId).toBe('conversation-456') + }) + + it('clears embedded user id when system variable is absent', async () => { + useWebAppStore.setState(state => ({ + ...state, + embeddedUserId: 'previous-user', + embeddedConversationId: 'existing-conversation', + })) + mockGetProcessedSystemVariablesFromUrlParams.mockResolvedValue({}) + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('embedded-user-id')).toHaveTextContent('null') + expect(screen.getByTestId('embedded-conversation-id')).toHaveTextContent('null') + }) + expect(useWebAppStore.getState().embeddedUserId).toBeNull() + expect(useWebAppStore.getState().embeddedConversationId).toBeNull() + }) +}) diff --git a/web/__tests__/goto-anything/command-selector.test.tsx b/web/__tests__/goto-anything/command-selector.test.tsx index 1db4be31fb..6d4e045d49 100644 --- a/web/__tests__/goto-anything/command-selector.test.tsx +++ b/web/__tests__/goto-anything/command-selector.test.tsx @@ -16,7 +16,7 @@ jest.mock('cmdk', () => ({ Item: ({ children, onSelect, value, className }: any) => (
onSelect && onSelect()} + onClick={() => onSelect?.()} data-value={value} data-testid={`command-item-${value}`} > diff --git a/web/__tests__/navigation-utils.test.ts b/web/__tests__/navigation-utils.test.ts index 9a388505d6..fa4986e63d 100644 --- a/web/__tests__/navigation-utils.test.ts +++ b/web/__tests__/navigation-utils.test.ts @@ -160,8 +160,7 @@ describe('Navigation Utilities', () => { page: 1, limit: '', keyword: 'test', - empty: null, - undefined, + filter: '', }) expect(path).toBe('/datasets/123/documents?page=1&keyword=test') diff --git a/web/__tests__/real-browser-flicker.test.tsx b/web/__tests__/real-browser-flicker.test.tsx index 52bdf4777f..0a0ea0c062 100644 --- a/web/__tests__/real-browser-flicker.test.tsx +++ b/web/__tests__/real-browser-flicker.test.tsx @@ -13,39 +13,185 @@ import { ThemeProvider } from 'next-themes' import useTheme from '@/hooks/use-theme' import { useEffect, useState } from 'react' +const DARK_MODE_MEDIA_QUERY = /prefers-color-scheme:\s*dark/i + // Setup browser environment for testing const setupMockEnvironment = (storedTheme: string | null, systemPrefersDark = false) => { - // Mock localStorage - const mockStorage = { - getItem: jest.fn((key: string) => { - if (key === 'theme') return storedTheme - return null - }), - setItem: jest.fn(), - removeItem: jest.fn(), + if (typeof window === 'undefined') + return + + try { + window.localStorage.clear() + } + catch { + // ignore if localStorage has been replaced by a throwing stub } - // Mock system theme preference - const mockMatchMedia = jest.fn((query: string) => ({ - matches: query.includes('dark') && systemPrefersDark, - media: query, - addListener: jest.fn(), - removeListener: jest.fn(), - })) + if (storedTheme === null) + window.localStorage.removeItem('theme') + else + window.localStorage.setItem('theme', storedTheme) - if (typeof window !== 'undefined') { - Object.defineProperty(window, 'localStorage', { - value: mockStorage, - configurable: true, - }) + document.documentElement.removeAttribute('data-theme') - Object.defineProperty(window, 'matchMedia', { - value: mockMatchMedia, - configurable: true, + const mockMatchMedia: typeof window.matchMedia = (query: string) => { + const listeners = new Set<(event: MediaQueryListEvent) => void>() + const isDarkQuery = DARK_MODE_MEDIA_QUERY.test(query) + const matches = isDarkQuery ? systemPrefersDark : false + + const handleAddListener = (listener: (event: MediaQueryListEvent) => void) => { + listeners.add(listener) + } + + const handleRemoveListener = (listener: (event: MediaQueryListEvent) => void) => { + listeners.delete(listener) + } + + const handleAddEventListener = (_event: string, listener: EventListener) => { + if (typeof listener === 'function') + listeners.add(listener as (event: MediaQueryListEvent) => void) + } + + const handleRemoveEventListener = (_event: string, listener: EventListener) => { + if (typeof listener === 'function') + listeners.delete(listener as (event: MediaQueryListEvent) => void) + } + + const handleDispatchEvent = (event: Event) => { + listeners.forEach(listener => listener(event as MediaQueryListEvent)) + return true + } + + const mediaQueryList: MediaQueryList = { + matches, + media: query, + onchange: null, + addListener: handleAddListener, + removeListener: handleRemoveListener, + addEventListener: handleAddEventListener, + removeEventListener: handleRemoveEventListener, + dispatchEvent: handleDispatchEvent, + } + + return mediaQueryList + } + + jest.spyOn(window, 'matchMedia').mockImplementation(mockMatchMedia) +} + +// Helper function to create timing page component +const createTimingPageComponent = ( + timingData: Array<{ phase: string; timestamp: number; styles: { backgroundColor: string; color: string } }>, +) => { + const recordTiming = (phase: string, styles: { backgroundColor: string; color: string }) => { + timingData.push({ + phase, + timestamp: performance.now(), + styles, }) } - return { mockStorage, mockMatchMedia } + const TimingPageComponent = () => { + const [mounted, setMounted] = useState(false) + const { theme } = useTheme() + const isDark = mounted ? theme === 'dark' : false + + const currentStyles = { + backgroundColor: isDark ? '#1f2937' : '#ffffff', + color: isDark ? '#ffffff' : '#000000', + } + + recordTiming(mounted ? 'CSR' : 'Initial', currentStyles) + + useEffect(() => { + setMounted(true) + }, []) + + return ( +
+
+ Phase: {mounted ? 'CSR' : 'Initial'} | Theme: {theme} | Visual: {isDark ? 'dark' : 'light'} +
+
+ ) + } + + return TimingPageComponent +} + +// Helper function to create CSS test component +const createCSSTestComponent = ( + cssStates: Array<{ className: string; timestamp: number }>, +) => { + const recordCSSState = (className: string) => { + cssStates.push({ + className, + timestamp: performance.now(), + }) + } + + const CSSTestComponent = () => { + const [mounted, setMounted] = useState(false) + const { theme } = useTheme() + const isDark = mounted ? theme === 'dark' : false + + const className = `min-h-screen ${isDark ? 'bg-gray-900 text-white' : 'bg-white text-black'}` + + recordCSSState(className) + + useEffect(() => { + setMounted(true) + }, []) + + return ( +
+
Classes: {className}
+
+ ) + } + + return CSSTestComponent +} + +// Helper function to create performance test component +const createPerformanceTestComponent = ( + performanceMarks: Array<{ event: string; timestamp: number }>, +) => { + const recordPerformanceMark = (event: string) => { + performanceMarks.push({ event, timestamp: performance.now() }) + } + + const PerformanceTestComponent = () => { + const [mounted, setMounted] = useState(false) + const { theme } = useTheme() + + recordPerformanceMark('component-render') + + useEffect(() => { + recordPerformanceMark('mount-start') + setMounted(true) + recordPerformanceMark('mount-complete') + }, []) + + useEffect(() => { + if (theme) + recordPerformanceMark('theme-available') + }, [theme]) + + return ( +
+ Mounted: {mounted.toString()} | Theme: {theme || 'loading'} +
+ ) + } + + return PerformanceTestComponent } // Simulate real page component based on Dify's actual theme usage @@ -94,7 +240,17 @@ const TestThemeProvider = ({ children }: { children: React.ReactNode }) => ( describe('Real Browser Environment Dark Mode Flicker Test', () => { beforeEach(() => { + jest.restoreAllMocks() jest.clearAllMocks() + if (typeof window !== 'undefined') { + try { + window.localStorage.clear() + } + catch { + // ignore when localStorage is replaced with an error-throwing stub + } + document.documentElement.removeAttribute('data-theme') + } }) describe('Page Refresh Scenario Simulation', () => { @@ -196,39 +352,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => { setupMockEnvironment('dark') const timingData: Array<{ phase: string; timestamp: number; styles: any }> = [] - - const TimingPageComponent = () => { - const [mounted, setMounted] = useState(false) - const { theme } = useTheme() - const isDark = mounted ? theme === 'dark' : false - - // Record timing and styles for each render phase - const currentStyles = { - backgroundColor: isDark ? '#1f2937' : '#ffffff', - color: isDark ? '#ffffff' : '#000000', - } - - timingData.push({ - phase: mounted ? 'CSR' : 'Initial', - timestamp: performance.now(), - styles: currentStyles, - }) - - useEffect(() => { - setMounted(true) - }, []) - - return ( -
-
- Phase: {mounted ? 'CSR' : 'Initial'} | Theme: {theme} | Visual: {isDark ? 'dark' : 'light'} -
-
- ) - } + const TimingPageComponent = createTimingPageComponent(timingData) render( @@ -264,33 +388,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => { setupMockEnvironment('dark') const cssStates: Array<{ className: string; timestamp: number }> = [] - - const CSSTestComponent = () => { - const [mounted, setMounted] = useState(false) - const { theme } = useTheme() - const isDark = mounted ? theme === 'dark' : false - - // Simulate Tailwind CSS class application - const className = `min-h-screen ${isDark ? 'bg-gray-900 text-white' : 'bg-white text-black'}` - - cssStates.push({ - className, - timestamp: performance.now(), - }) - - useEffect(() => { - setMounted(true) - }, []) - - return ( -
-
Classes: {className}
-
- ) - } + const CSSTestComponent = createCSSTestComponent(cssStates) render( @@ -323,35 +421,40 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => { describe('Edge Cases and Error Handling', () => { test('handles localStorage access errors gracefully', async () => { - // Mock localStorage to throw an error + setupMockEnvironment(null) + const mockStorage = { getItem: jest.fn(() => { throw new Error('LocalStorage access denied') }), setItem: jest.fn(), removeItem: jest.fn(), + clear: jest.fn(), } - if (typeof window !== 'undefined') { - Object.defineProperty(window, 'localStorage', { - value: mockStorage, - configurable: true, - }) - } - - render( - - - , - ) - - // Should fallback gracefully without crashing - await waitFor(() => { - expect(screen.getByTestId('theme-indicator')).toBeInTheDocument() + Object.defineProperty(window, 'localStorage', { + value: mockStorage, + configurable: true, }) - // Should default to light theme when localStorage fails - expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light') + try { + render( + + + , + ) + + // Should fallback gracefully without crashing + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toBeInTheDocument() + }) + + // Should default to light theme when localStorage fails + expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light') + } + finally { + Reflect.deleteProperty(window, 'localStorage') + } }) test('handles invalid theme values in localStorage', async () => { @@ -377,32 +480,12 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => { test('verifies ThemeProvider position fix reduces initialization delay', async () => { const performanceMarks: Array<{ event: string; timestamp: number }> = [] - const PerformanceTestComponent = () => { - const [mounted, setMounted] = useState(false) - const { theme } = useTheme() - - performanceMarks.push({ event: 'component-render', timestamp: performance.now() }) - - useEffect(() => { - performanceMarks.push({ event: 'mount-start', timestamp: performance.now() }) - setMounted(true) - performanceMarks.push({ event: 'mount-complete', timestamp: performance.now() }) - }, []) - - useEffect(() => { - if (theme) - performanceMarks.push({ event: 'theme-available', timestamp: performance.now() }) - }, [theme]) - - return ( -
- Mounted: {mounted.toString()} | Theme: {theme || 'loading'} -
- ) - } - setupMockEnvironment('dark') + expect(window.localStorage.getItem('theme')).toBe('dark') + + const PerformanceTestComponent = createPerformanceTestComponent(performanceMarks) + render( diff --git a/web/__tests__/unified-tags-logic.test.ts b/web/__tests__/unified-tags-logic.test.ts index c920e28e0a..ec73a6a268 100644 --- a/web/__tests__/unified-tags-logic.test.ts +++ b/web/__tests__/unified-tags-logic.test.ts @@ -70,14 +70,18 @@ describe('Unified Tags Editing - Pure Logic Tests', () => { }) describe('Fallback Logic (from layout-main.tsx)', () => { + type Tag = { id: string; name: string } + type AppDetail = { tags: Tag[] } + type FallbackResult = { tags?: Tag[] } | null + // no-op it('should trigger fallback when tags are missing or empty', () => { - const appDetailWithoutTags = { tags: [] } - const appDetailWithTags = { tags: [{ id: 'tag1' }] } - const appDetailWithUndefinedTags = { tags: undefined as any } + const appDetailWithoutTags: AppDetail = { tags: [] } + const appDetailWithTags: AppDetail = { tags: [{ id: 'tag1', name: 't' }] } + const appDetailWithUndefinedTags: { tags: Tag[] | undefined } = { tags: undefined } // This simulates the condition in layout-main.tsx - const shouldFallback1 = !appDetailWithoutTags.tags || appDetailWithoutTags.tags.length === 0 - const shouldFallback2 = !appDetailWithTags.tags || appDetailWithTags.tags.length === 0 + const shouldFallback1 = appDetailWithoutTags.tags.length === 0 + const shouldFallback2 = appDetailWithTags.tags.length === 0 const shouldFallback3 = !appDetailWithUndefinedTags.tags || appDetailWithUndefinedTags.tags.length === 0 expect(shouldFallback1).toBe(true) // Empty array should trigger fallback @@ -86,24 +90,26 @@ describe('Unified Tags Editing - Pure Logic Tests', () => { }) it('should preserve tags when fallback succeeds', () => { - const originalAppDetail = { tags: [] as any[] } - const fallbackResult = { tags: [{ id: 'tag1', name: 'fallback-tag' }] } + const originalAppDetail: AppDetail = { tags: [] } + const fallbackResult: { tags?: Tag[] } = { tags: [{ id: 'tag1', name: 'fallback-tag' }] } // This simulates the successful fallback in layout-main.tsx - if (fallbackResult?.tags) - originalAppDetail.tags = fallbackResult.tags + const tags = fallbackResult.tags + if (tags) + originalAppDetail.tags = tags expect(originalAppDetail.tags).toEqual(fallbackResult.tags) expect(originalAppDetail.tags.length).toBe(1) }) it('should continue with empty tags when fallback fails', () => { - const originalAppDetail: { tags: any[] } = { tags: [] } - const fallbackResult: { tags?: any[] } | null = null + const originalAppDetail: AppDetail = { tags: [] } + const fallbackResult = null as FallbackResult // This simulates fallback failure in layout-main.tsx - if (fallbackResult?.tags) - originalAppDetail.tags = fallbackResult.tags + const tags: Tag[] | undefined = fallbackResult && 'tags' in fallbackResult ? fallbackResult.tags : undefined + if (tags) + originalAppDetail.tags = tags expect(originalAppDetail.tags).toEqual([]) }) diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx index 1ab40e31bf..246a1eb6a3 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx @@ -4,6 +4,7 @@ import React, { useCallback, useRef, useState } from 'react' import type { PopupProps } from './config-popup' import ConfigPopup from './config-popup' +import cn from '@/utils/classnames' import { PortalToFollowElem, PortalToFollowElemContent, @@ -45,7 +46,7 @@ const ConfigBtn: FC = ({ offset={12} > -
+
{children}
diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx index 907c270017..0ad02ad7f3 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx @@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import TracingIcon from './tracing-icon' import ProviderPanel from './provider-panel' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import ProviderConfigModal from './provider-config-modal' import Indicator from '@/app/components/header/indicator' @@ -30,7 +30,8 @@ export type PopupProps = { opikConfig: OpikConfig | null weaveConfig: WeaveConfig | null aliyunConfig: AliyunConfig | null - onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void + tencentConfig: TencentConfig | null + onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void onConfigRemoved: (provider: TracingProvider) => void } @@ -48,6 +49,7 @@ const ConfigPopup: FC = ({ opikConfig, weaveConfig, aliyunConfig, + tencentConfig, onConfigUpdated, onConfigRemoved, }) => { @@ -71,7 +73,7 @@ const ConfigPopup: FC = ({ } }, [onChooseProvider]) - const handleConfigUpdated = useCallback((payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => { + const handleConfigUpdated = useCallback((payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => { onConfigUpdated(currentProvider!, payload) hideConfigModal() }, [currentProvider, hideConfigModal, onConfigUpdated]) @@ -81,8 +83,8 @@ const ConfigPopup: FC = ({ hideConfigModal() }, [currentProvider, hideConfigModal, onConfigRemoved]) - const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig - const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig + const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig && tencentConfig + const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig && !tencentConfig const switchContent = ( = ({ key="aliyun-provider-panel" /> ) + + const tencentPanel = ( + + ) const configuredProviderPanel = () => { const configuredPanels: JSX.Element[] = [] @@ -206,6 +221,9 @@ const ConfigPopup: FC = ({ if (aliyunConfig) configuredPanels.push(aliyunPanel) + if (tencentConfig) + configuredPanels.push(tencentPanel) + return configuredPanels } @@ -233,6 +251,9 @@ const ConfigPopup: FC = ({ if (!aliyunConfig) notConfiguredPanels.push(aliyunPanel) + if (!tencentConfig) + notConfiguredPanels.push(tencentPanel) + return notConfiguredPanels } @@ -249,6 +270,8 @@ const ConfigPopup: FC = ({ return opikConfig if (currentProvider === TracingProvider.aliyun) return aliyunConfig + if (currentProvider === TracingProvider.tencent) + return tencentConfig return weaveConfig } @@ -297,6 +320,7 @@ const ConfigPopup: FC = ({ {arizePanel} {phoenixPanel} {aliyunPanel} + {tencentPanel}
) diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts index 4c81b63ea2..00f6224e9e 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts @@ -8,4 +8,5 @@ export const docURL = { [TracingProvider.opik]: 'https://www.comet.com/docs/opik/tracing/integrations/dify#setup-instructions', [TracingProvider.weave]: 'https://weave-docs.wandb.ai/', [TracingProvider.aliyun]: 'https://help.aliyun.com/zh/arms/tracing-analysis/untitled-document-1750672984680', + [TracingProvider.tencent]: 'https://cloud.tencent.com/document/product/248/116531', } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx index f79745c4dd..e1fd39fd48 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx @@ -8,12 +8,12 @@ import { import { useTranslation } from 'react-i18next' import { usePathname } from 'next/navigation' import { useBoolean } from 'ahooks' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import TracingIcon from './tracing-icon' import ConfigButton from './config-button' import cn from '@/utils/classnames' -import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing' +import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, TencentIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing' import Indicator from '@/app/components/header/indicator' import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps' import type { TracingStatus } from '@/models/app' @@ -71,6 +71,7 @@ const Panel: FC = () => { [TracingProvider.opik]: OpikIcon, [TracingProvider.weave]: WeaveIcon, [TracingProvider.aliyun]: AliyunIcon, + [TracingProvider.tencent]: TencentIcon, } const InUseProviderIcon = inUseTracingProvider ? providerIconMap[inUseTracingProvider] : undefined @@ -81,7 +82,8 @@ const Panel: FC = () => { const [opikConfig, setOpikConfig] = useState(null) const [weaveConfig, setWeaveConfig] = useState(null) const [aliyunConfig, setAliyunConfig] = useState(null) - const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig) + const [tencentConfig, setTencentConfig] = useState(null) + const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig || tencentConfig) const fetchTracingConfig = async () => { const getArizeConfig = async () => { @@ -119,6 +121,11 @@ const Panel: FC = () => { if (!aliyunHasNotConfig) setAliyunConfig(aliyunConfig as AliyunConfig) } + const getTencentConfig = async () => { + const { tracing_config: tencentConfig, has_not_configured: tencentHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.tencent }) + if (!tencentHasNotConfig) + setTencentConfig(tencentConfig as TencentConfig) + } Promise.all([ getArizeConfig(), getPhoenixConfig(), @@ -127,6 +134,7 @@ const Panel: FC = () => { getOpikConfig(), getWeaveConfig(), getAliyunConfig(), + getTencentConfig(), ]) } @@ -147,6 +155,8 @@ const Panel: FC = () => { setWeaveConfig(tracing_config as WeaveConfig) else if (provider === TracingProvider.aliyun) setAliyunConfig(tracing_config as AliyunConfig) + else if (provider === TracingProvider.tencent) + setTencentConfig(tracing_config as TencentConfig) } const handleTracingConfigRemoved = (provider: TracingProvider) => { @@ -164,6 +174,8 @@ const Panel: FC = () => { setWeaveConfig(null) else if (provider === TracingProvider.aliyun) setAliyunConfig(null) + else if (provider === TracingProvider.tencent) + setTencentConfig(null) if (provider === inUseTracingProvider) { handleTracingStatusChange({ enabled: false, @@ -209,6 +221,7 @@ const Panel: FC = () => { opikConfig={opikConfig} weaveConfig={weaveConfig} aliyunConfig={aliyunConfig} + tencentConfig={tencentConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} > @@ -245,6 +258,7 @@ const Panel: FC = () => { opikConfig={opikConfig} weaveConfig={weaveConfig} aliyunConfig={aliyunConfig} + tencentConfig={tencentConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} > diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx index 318f1f61d6..9682bf6a07 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx @@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import Field from './field' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import { docURL } from './config' import { @@ -22,10 +22,10 @@ import Divider from '@/app/components/base/divider' type Props = { appId: string type: TracingProvider - payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | null + payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig | null onRemoved: () => void onCancel: () => void - onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void + onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void onChosen: (provider: TracingProvider) => void } @@ -77,6 +77,12 @@ const aliyunConfigTemplate = { endpoint: '', } +const tencentConfigTemplate = { + token: '', + endpoint: '', + service_name: '', +} + const ProviderConfigModal: FC = ({ appId, type, @@ -90,7 +96,7 @@ const ProviderConfigModal: FC = ({ const isEdit = !!payload const isAdd = !isEdit const [isSaving, setIsSaving] = useState(false) - const [config, setConfig] = useState((() => { + const [config, setConfig] = useState((() => { if (isEdit) return payload @@ -112,6 +118,9 @@ const ProviderConfigModal: FC = ({ else if (type === TracingProvider.aliyun) return aliyunConfigTemplate + else if (type === TracingProvider.tencent) + return tencentConfigTemplate + return weaveConfigTemplate })()) const [isShowRemoveConfirm, { @@ -202,6 +211,16 @@ const ProviderConfigModal: FC = ({ errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' }) } + if (type === TracingProvider.tencent) { + const postData = config as TencentConfig + if (!errorMessage && !postData.token) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Token' }) + if (!errorMessage && !postData.endpoint) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' }) + if (!errorMessage && !postData.service_name) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Service Name' }) + } + return errorMessage }, [config, t, type]) const handleSave = useCallback(async () => { @@ -338,6 +357,34 @@ const ProviderConfigModal: FC = ({ /> )} + {type === TracingProvider.tencent && ( + <> + + + + + )} {type === TracingProvider.weave && ( <> { [TracingProvider.opik]: OpikIconBig, [TracingProvider.weave]: WeaveIconBig, [TracingProvider.aliyun]: AliyunIconBig, + [TracingProvider.tencent]: TencentIconBig, })[type] } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts index 78bca41ad2..719451f5d0 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts @@ -6,6 +6,7 @@ export enum TracingProvider { opik = 'opik', weave = 'weave', aliyun = 'aliyun', + tencent = 'tencent', } export type ArizeConfig = { @@ -53,3 +54,9 @@ export type AliyunConfig = { license_key: string endpoint: string } + +export type TencentConfig = { + token: string + endpoint: string + service_name: string +} diff --git a/web/app/(commonLayout)/explore/installed/[appId]/page.tsx b/web/app/(commonLayout)/explore/installed/[appId]/page.tsx index e288c62b5d..983fdb9d23 100644 --- a/web/app/(commonLayout)/explore/installed/[appId]/page.tsx +++ b/web/app/(commonLayout)/explore/installed/[appId]/page.tsx @@ -2,14 +2,14 @@ import React from 'react' import Main from '@/app/components/explore/installed-app' export type IInstalledAppProps = { - params: { + params?: Promise<{ appId: string - } + }> } // Using Next.js page convention for async server components async function InstalledApp({ params }: IInstalledAppProps) { - const appId = (await params).appId + const { appId } = await (params ?? Promise.reject(new Error('Missing params'))) return (
) diff --git a/web/app/(commonLayout)/layout.tsx b/web/app/(commonLayout)/layout.tsx index ed1c995e25..be9c4fe49a 100644 --- a/web/app/(commonLayout)/layout.tsx +++ b/web/app/(commonLayout)/layout.tsx @@ -9,6 +9,7 @@ import { EventEmitterContextProvider } from '@/context/event-emitter' import { ProviderContextProvider } from '@/context/provider-context' import { ModalContextProvider } from '@/context/modal-context' import GotoAnything from '@/app/components/goto-anything' +import Zendesk from '@/app/components/base/zendesk' const Layout = ({ children }: { children: ReactNode }) => { return ( @@ -28,6 +29,7 @@ const Layout = ({ children }: { children: ReactNode }) => { + ) diff --git a/web/app/(shareLayout)/components/authenticated-layout.tsx b/web/app/(shareLayout)/components/authenticated-layout.tsx index e3cfc8e6a8..2185606a6d 100644 --- a/web/app/(shareLayout)/components/authenticated-layout.tsx +++ b/web/app/(shareLayout)/components/authenticated-layout.tsx @@ -2,16 +2,17 @@ import AppUnavailable from '@/app/components/base/app-unavailable' import Loading from '@/app/components/base/loading' -import { removeAccessToken } from '@/app/components/share/utils' import { useWebAppStore } from '@/context/web-app-context' import { useGetUserCanAccessApp } from '@/service/access-control' import { useGetWebAppInfo, useGetWebAppMeta, useGetWebAppParams } from '@/service/use-share' +import { webAppLogout } from '@/service/webapp-auth' import { usePathname, useRouter, useSearchParams } from 'next/navigation' import React, { useCallback, useEffect } from 'react' import { useTranslation } from 'react-i18next' const AuthenticatedLayout = ({ children }: { children: React.ReactNode }) => { const { t } = useTranslation() + const shareCode = useWebAppStore(s => s.shareCode) const updateAppInfo = useWebAppStore(s => s.updateAppInfo) const updateAppParams = useWebAppStore(s => s.updateAppParams) const updateWebAppMeta = useWebAppStore(s => s.updateWebAppMeta) @@ -41,11 +42,11 @@ const AuthenticatedLayout = ({ children }: { children: React.ReactNode }) => { return `/webapp-signin?${params.toString()}` }, [searchParams, pathname]) - const backToHome = useCallback(() => { - removeAccessToken() + const backToHome = useCallback(async () => { + await webAppLogout(shareCode!) const url = getSigninUrl() router.replace(url) - }, [getSigninUrl, router]) + }, [getSigninUrl, router, webAppLogout, shareCode]) if (appInfoError) { return
diff --git a/web/app/(shareLayout)/components/splash.tsx b/web/app/(shareLayout)/components/splash.tsx index 4fe9efe4dd..c30ad68950 100644 --- a/web/app/(shareLayout)/components/splash.tsx +++ b/web/app/(shareLayout)/components/splash.tsx @@ -1,26 +1,27 @@ 'use client' import type { FC, PropsWithChildren } from 'react' -import { useEffect } from 'react' +import { useEffect, useState } from 'react' import { useCallback } from 'react' import { useWebAppStore } from '@/context/web-app-context' import { useRouter, useSearchParams } from 'next/navigation' import AppUnavailable from '@/app/components/base/app-unavailable' -import { checkOrSetAccessToken, removeAccessToken, setAccessToken } from '@/app/components/share/utils' import { useTranslation } from 'react-i18next' +import { webAppLoginStatus, webAppLogout } from '@/service/webapp-auth' import { fetchAccessToken } from '@/service/share' import Loading from '@/app/components/base/loading' -import { AccessMode } from '@/models/access-control' +import { setWebAppAccessToken, setWebAppPassport } from '@/service/webapp-auth' const Splash: FC = ({ children }) => { const { t } = useTranslation() const shareCode = useWebAppStore(s => s.shareCode) const webAppAccessMode = useWebAppStore(s => s.webAppAccessMode) + const embeddedUserId = useWebAppStore(s => s.embeddedUserId) const searchParams = useSearchParams() const router = useRouter() const redirectUrl = searchParams.get('redirect_url') - const tokenFromUrl = searchParams.get('web_sso_token') const message = searchParams.get('message') const code = searchParams.get('code') + const tokenFromUrl = searchParams.get('web_sso_token') const getSigninUrl = useCallback(() => { const params = new URLSearchParams(searchParams) params.delete('message') @@ -28,35 +29,68 @@ const Splash: FC = ({ children }) => { return `/webapp-signin?${params.toString()}` }, [searchParams]) - const backToHome = useCallback(() => { - removeAccessToken() + const backToHome = useCallback(async () => { + await webAppLogout(shareCode!) const url = getSigninUrl() router.replace(url) - }, [getSigninUrl, router]) + }, [getSigninUrl, router, webAppLogout, shareCode]) + const [isLoading, setIsLoading] = useState(true) useEffect(() => { + if (message) { + setIsLoading(false) + return + } + + if(tokenFromUrl) + setWebAppAccessToken(tokenFromUrl) + + const redirectOrFinish = () => { + if (redirectUrl) + router.replace(decodeURIComponent(redirectUrl)) + else + setIsLoading(false) + } + + const proceedToAuth = () => { + setIsLoading(false) + } + (async () => { - if (message) - return - if (shareCode && tokenFromUrl && redirectUrl) { - localStorage.setItem('webapp_access_token', tokenFromUrl) - const tokenResp = await fetchAccessToken({ appCode: shareCode, webAppAccessToken: tokenFromUrl }) - await setAccessToken(shareCode, tokenResp.access_token) - router.replace(decodeURIComponent(redirectUrl)) - return + // if access mode is public, user login is always true, but the app login(passport) may be expired + const { userLoggedIn, appLoggedIn } = await webAppLoginStatus(shareCode!) + if (userLoggedIn && appLoggedIn) { + redirectOrFinish() } - if (shareCode && redirectUrl && localStorage.getItem('webapp_access_token')) { - const tokenResp = await fetchAccessToken({ appCode: shareCode, webAppAccessToken: localStorage.getItem('webapp_access_token') }) - await setAccessToken(shareCode, tokenResp.access_token) - router.replace(decodeURIComponent(redirectUrl)) - return + else if (!userLoggedIn && !appLoggedIn) { + proceedToAuth() } - if (webAppAccessMode === AccessMode.PUBLIC && redirectUrl) { - await checkOrSetAccessToken(shareCode) - router.replace(decodeURIComponent(redirectUrl)) + else if (!userLoggedIn && appLoggedIn) { + redirectOrFinish() + } + else if (userLoggedIn && !appLoggedIn) { + try { + const { access_token } = await fetchAccessToken({ + appCode: shareCode!, + userId: embeddedUserId || undefined, + }) + setWebAppPassport(shareCode!, access_token) + redirectOrFinish() + } + catch (error) { + await webAppLogout(shareCode!) + proceedToAuth() + } } })() - }, [shareCode, redirectUrl, router, tokenFromUrl, message, webAppAccessMode]) + }, [ + shareCode, + redirectUrl, + router, + message, + webAppAccessMode, + tokenFromUrl, + embeddedUserId]) if (message) { return
@@ -64,12 +98,8 @@ const Splash: FC = ({ children }) => { {code === '403' ? t('common.userProfile.logout') : t('share.login.backToHome')}
} - if (tokenFromUrl) { - return
- -
- } - if (webAppAccessMode === AccessMode.PUBLIC && redirectUrl) { + + if (isLoading) { return
diff --git a/web/app/(shareLayout)/webapp-signin/check-code/page.tsx b/web/app/(shareLayout)/webapp-signin/check-code/page.tsx index 3fc32fec71..69131cdabe 100644 --- a/web/app/(shareLayout)/webapp-signin/check-code/page.tsx +++ b/web/app/(shareLayout)/webapp-signin/check-code/page.tsx @@ -10,8 +10,9 @@ import Input from '@/app/components/base/input' import Toast from '@/app/components/base/toast' import { sendWebAppEMailLoginCode, webAppEmailLoginWithCode } from '@/service/common' import I18NContext from '@/context/i18n' -import { setAccessToken } from '@/app/components/share/utils' +import { setWebAppAccessToken, setWebAppPassport } from '@/service/webapp-auth' import { fetchAccessToken } from '@/service/share' +import { useWebAppStore } from '@/context/web-app-context' export default function CheckCode() { const { t } = useTranslation() @@ -23,6 +24,7 @@ export default function CheckCode() { const [loading, setIsLoading] = useState(false) const { locale } = useContext(I18NContext) const redirectUrl = searchParams.get('redirect_url') + const embeddedUserId = useWebAppStore(s => s.embeddedUserId) const getAppCodeFromRedirectUrl = useCallback(() => { if (!redirectUrl) @@ -62,9 +64,12 @@ export default function CheckCode() { setIsLoading(true) const ret = await webAppEmailLoginWithCode({ email, code, token }) if (ret.result === 'success') { - localStorage.setItem('webapp_access_token', ret.data.access_token) - const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: ret.data.access_token }) - await setAccessToken(appCode, tokenResp.access_token) + setWebAppAccessToken(ret.data.access_token) + const { access_token } = await fetchAccessToken({ + appCode: appCode!, + userId: embeddedUserId || undefined, + }) + setWebAppPassport(appCode!, access_token) router.replace(decodeURIComponent(redirectUrl)) } } diff --git a/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx b/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx index 2201b28a2f..0136445ac9 100644 --- a/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx +++ b/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx @@ -10,16 +10,15 @@ import { emailRegex } from '@/config' import { webAppLogin } from '@/service/common' import Input from '@/app/components/base/input' import I18NContext from '@/context/i18n' +import { useWebAppStore } from '@/context/web-app-context' import { noop } from 'lodash-es' -import { setAccessToken } from '@/app/components/share/utils' import { fetchAccessToken } from '@/service/share' +import { setWebAppAccessToken, setWebAppPassport } from '@/service/webapp-auth' type MailAndPasswordAuthProps = { isEmailSetup: boolean } -const passwordRegex = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/ - export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAuthProps) { const { t } = useTranslation() const { locale } = useContext(I18NContext) @@ -32,6 +31,7 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut const [isLoading, setIsLoading] = useState(false) const redirectUrl = searchParams.get('redirect_url') + const embeddedUserId = useWebAppStore(s => s.embeddedUserId) const getAppCodeFromRedirectUrl = useCallback(() => { if (!redirectUrl) @@ -43,8 +43,8 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut return appCode }, [redirectUrl]) + const appCode = getAppCodeFromRedirectUrl() const handleEmailPasswordLogin = async () => { - const appCode = getAppCodeFromRedirectUrl() if (!email) { Toast.notify({ type: 'error', message: t('login.error.emailEmpty') }) return @@ -60,13 +60,7 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut Toast.notify({ type: 'error', message: t('login.error.passwordEmpty') }) return } - if (!passwordRegex.test(password)) { - Toast.notify({ - type: 'error', - message: t('login.error.passwordInvalid'), - }) - return - } + if (!redirectUrl || !appCode) { Toast.notify({ type: 'error', @@ -88,9 +82,13 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut body: loginData, }) if (res.result === 'success') { - localStorage.setItem('webapp_access_token', res.data.access_token) - const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: res.data.access_token }) - await setAccessToken(appCode, tokenResp.access_token) + setWebAppAccessToken(res.data.access_token) + + const { access_token } = await fetchAccessToken({ + appCode: appCode!, + userId: embeddedUserId || undefined, + }) + setWebAppPassport(appCode!, access_token) router.replace(decodeURIComponent(redirectUrl)) } else { @@ -100,7 +98,10 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut }) } } - + catch (e: any) { + if (e.code === 'authentication_failed') + Toast.notify({ type: 'error', message: e.message }) + } finally { setIsLoading(false) } @@ -138,9 +139,9 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut
setPassword(e.target.value)} + id="password" onKeyDown={(e) => { if (e.key === 'Enter') handleEmailPasswordLogin() diff --git a/web/app/(shareLayout)/webapp-signin/page.tsx b/web/app/(shareLayout)/webapp-signin/page.tsx index 1c6209b902..2ffa19c0c9 100644 --- a/web/app/(shareLayout)/webapp-signin/page.tsx +++ b/web/app/(shareLayout)/webapp-signin/page.tsx @@ -3,13 +3,13 @@ import { useRouter, useSearchParams } from 'next/navigation' import type { FC } from 'react' import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' -import { removeAccessToken } from '@/app/components/share/utils' import { useGlobalPublicStore } from '@/context/global-public-context' import AppUnavailable from '@/app/components/base/app-unavailable' import NormalForm from './normalForm' import { AccessMode } from '@/models/access-control' import ExternalMemberSsoAuth from './components/external-member-sso-auth' import { useWebAppStore } from '@/context/web-app-context' +import { webAppLogout } from '@/service/webapp-auth' const WebSSOForm: FC = () => { const { t } = useTranslation() @@ -26,11 +26,12 @@ const WebSSOForm: FC = () => { return `/webapp-signin?${params.toString()}` }, [redirectUrl]) - const backToHome = useCallback(() => { - removeAccessToken() + const shareCode = useWebAppStore(s => s.shareCode) + const backToHome = useCallback(async () => { + await webAppLogout(shareCode!) const url = getSigninUrl() router.replace(url) - }, [getSigninUrl, router]) + }, [getSigninUrl, router, webAppLogout, shareCode]) if (!redirectUrl) { return
diff --git a/web/app/account/(commonLayout)/account-page/email-change-modal.tsx b/web/app/account/(commonLayout)/account-page/email-change-modal.tsx index bd00f27ac5..d04cd18557 100644 --- a/web/app/account/(commonLayout)/account-page/email-change-modal.tsx +++ b/web/app/account/(commonLayout)/account-page/email-change-modal.tsx @@ -9,7 +9,6 @@ import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' import { checkEmailExisted, - logout, resetEmail, sendVerifyCode, verifyEmail, @@ -17,6 +16,7 @@ import { import { noop } from 'lodash-es' import { asyncRunSafe } from '@/utils' import type { ResponseError } from '@/service/fetch' +import { useLogout } from '@/service/use-common' type Props = { show: boolean @@ -167,15 +167,12 @@ const EmailChangeModal = ({ onClose, email, show }: Props) => { setStep(STEP.verifyNew) } + const { mutateAsync: logout } = useLogout() const handleLogout = async () => { - await logout({ - url: '/logout', - params: {}, - }) + await logout() localStorage.removeItem('setup_status') - localStorage.removeItem('console_token') - localStorage.removeItem('refresh_token') + // Tokens are now stored in cookies and cleared by backend router.push('/signin') } diff --git a/web/app/account/(commonLayout)/avatar.tsx b/web/app/account/(commonLayout)/avatar.tsx index ea897e639f..d8943b7879 100644 --- a/web/app/account/(commonLayout)/avatar.tsx +++ b/web/app/account/(commonLayout)/avatar.tsx @@ -7,11 +7,11 @@ import { } from '@remixicon/react' import { Menu, MenuButton, MenuItem, MenuItems, Transition } from '@headlessui/react' import Avatar from '@/app/components/base/avatar' -import { logout } from '@/service/common' import { useAppContext } from '@/context/app-context' import { useProviderContext } from '@/context/provider-context' import { LogOut01 } from '@/app/components/base/icons/src/vender/line/general' import PremiumBadge from '@/app/components/base/premium-badge' +import { useLogout } from '@/service/use-common' export type IAppSelector = { isMobile: boolean @@ -23,15 +23,12 @@ export default function AppSelector() { const { userProfile } = useAppContext() const { isEducationAccount } = useProviderContext() + const { mutateAsync: logout } = useLogout() const handleLogout = async () => { - await logout({ - url: '/logout', - params: {}, - }) + await logout() localStorage.removeItem('setup_status') - localStorage.removeItem('console_token') - localStorage.removeItem('refresh_token') + // Tokens are now stored in cookies and cleared by backend router.push('/signin') } diff --git a/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx b/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx index 2cd30bc3f2..64a378d2fe 100644 --- a/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx +++ b/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx @@ -8,7 +8,7 @@ import Button from '@/app/components/base/button' import CustomDialog from '@/app/components/base/dialog' import Textarea from '@/app/components/base/textarea' import Toast from '@/app/components/base/toast' -import { logout } from '@/service/common' +import { useLogout } from '@/service/use-common' type DeleteAccountProps = { onCancel: () => void @@ -22,14 +22,11 @@ export default function FeedBack(props: DeleteAccountProps) { const [userFeedback, setUserFeedback] = useState('') const { isPending, mutateAsync: sendFeedback } = useDeleteAccountFeedback() + const { mutateAsync: logout } = useLogout() const handleSuccess = useCallback(async () => { try { - await logout({ - url: '/logout', - params: {}, - }) - localStorage.removeItem('refresh_token') - localStorage.removeItem('console_token') + await logout() + // Tokens are now stored in cookies and cleared by backend router.push('/signin') Toast.notify({ type: 'info', message: t('common.account.deleteSuccessTip') }) } diff --git a/web/app/account/oauth/authorize/constants.ts b/web/app/account/oauth/authorize/constants.ts new file mode 100644 index 0000000000..f1d8b98ef4 --- /dev/null +++ b/web/app/account/oauth/authorize/constants.ts @@ -0,0 +1,3 @@ +export const OAUTH_AUTHORIZE_PENDING_KEY = 'oauth_authorize_pending' +export const REDIRECT_URL_KEY = 'oauth_redirect_url' +export const OAUTH_AUTHORIZE_PENDING_TTL = 60 * 3 diff --git a/web/app/account/oauth/authorize/layout.tsx b/web/app/account/oauth/authorize/layout.tsx index 078d23114a..2ab676d6b6 100644 --- a/web/app/account/oauth/authorize/layout.tsx +++ b/web/app/account/oauth/authorize/layout.tsx @@ -5,17 +5,22 @@ import cn from '@/utils/classnames' import { useGlobalPublicStore } from '@/context/global-public-context' import useDocumentTitle from '@/hooks/use-document-title' import { AppContextProvider } from '@/context/app-context' -import { useMemo } from 'react' +import { useIsLogin } from '@/service/use-common' +import Loading from '@/app/components/base/loading' export default function SignInLayout({ children }: any) { const { systemFeatures } = useGlobalPublicStore() useDocumentTitle('') - const isLoggedIn = useMemo(() => { - try { - return Boolean(localStorage.getItem('console_token') && localStorage.getItem('refresh_token')) - } - catch { return false } - }, []) + const { isLoading, data: loginData } = useIsLogin() + const isLoggedIn = loginData?.logged_in + + if(isLoading) { + return ( +
+ +
+ ) + } return <>
diff --git a/web/app/account/oauth/authorize/page.tsx b/web/app/account/oauth/authorize/page.tsx index 6ad63996ae..c9b26b97c1 100644 --- a/web/app/account/oauth/authorize/page.tsx +++ b/web/app/account/oauth/authorize/page.tsx @@ -1,6 +1,6 @@ 'use client' -import React, { useEffect, useMemo, useRef } from 'react' +import React, { useEffect, useRef } from 'react' import { useTranslation } from 'react-i18next' import { useRouter, useSearchParams } from 'next/navigation' import Button from '@/app/components/base/button' @@ -18,11 +18,12 @@ import { RiTranslate2, } from '@remixicon/react' import dayjs from 'dayjs' - -export const OAUTH_AUTHORIZE_PENDING_KEY = 'oauth_authorize_pending' -export const REDIRECT_URL_KEY = 'oauth_redirect_url' - -const OAUTH_AUTHORIZE_PENDING_TTL = 60 * 3 +import { useIsLogin } from '@/service/use-common' +import { + OAUTH_AUTHORIZE_PENDING_KEY, + OAUTH_AUTHORIZE_PENDING_TTL, + REDIRECT_URL_KEY, +} from './constants' function setItemWithExpiry(key: string, value: string, ttl: number) { const item = { @@ -74,17 +75,13 @@ export default function OAuthAuthorize() { const client_id = decodeURIComponent(searchParams.get('client_id') || '') const redirect_uri = decodeURIComponent(searchParams.get('redirect_uri') || '') const { userProfile } = useAppContext() - const { data: authAppInfo, isLoading, isError } = useOAuthAppInfo(client_id, redirect_uri) + const { data: authAppInfo, isLoading: isOAuthLoading, isError } = useOAuthAppInfo(client_id, redirect_uri) const { mutateAsync: authorize, isPending: authorizing } = useAuthorizeOAuthApp() const hasNotifiedRef = useRef(false) - const isLoggedIn = useMemo(() => { - try { - return Boolean(localStorage.getItem('console_token') && localStorage.getItem('refresh_token')) - } - catch { return false } - }, []) - + const { isLoading: isIsLoginLoading, data: loginData } = useIsLogin() + const isLoggedIn = loginData?.logged_in + const isLoading = isOAuthLoading || isIsLoginLoading const onLoginSwitchClick = () => { try { const returnUrl = buildReturnUrl('/account/oauth/authorize', `?client_id=${encodeURIComponent(client_id)}&redirect_uri=${encodeURIComponent(redirect_uri)}`) diff --git a/web/app/components/app-sidebar/dataset-info/menu.tsx b/web/app/components/app-sidebar/dataset-info/menu.tsx index fd560ce643..6f91c9c513 100644 --- a/web/app/components/app-sidebar/dataset-info/menu.tsx +++ b/web/app/components/app-sidebar/dataset-info/menu.tsx @@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next' import MenuItem from './menu-item' import { RiDeleteBinLine, RiEditLine, RiFileDownloadLine } from '@remixicon/react' import Divider from '../../base/divider' +import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail' type MenuProps = { showDelete: boolean @@ -18,6 +19,7 @@ const Menu = ({ detectIsUsedByApp, }: MenuProps) => { const { t } = useTranslation() + const runtimeMode = useDatasetDetailContextWithSelector(state => state.dataset?.runtime_mode) return (
@@ -27,11 +29,13 @@ const Menu = ({ name={t('common.operation.edit')} handleClick={openRenameModal} /> - + {runtimeMode === 'rag_pipeline' && ( + + )}
{showDelete && ( <> diff --git a/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx b/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx index dfc8d10087..b98eb815f9 100644 --- a/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx +++ b/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx @@ -28,7 +28,8 @@ const CSVUploader: FC = ({ const handleDragEnter = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target !== dragRef.current && setDragging(true) + if (e.target !== dragRef.current) + setDragging(true) } const handleDragOver = (e: DragEvent) => { e.preventDefault() @@ -37,7 +38,8 @@ const CSVUploader: FC = ({ const handleDragLeave = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target === dragRef.current && setDragging(false) + if (e.target === dragRef.current) + setDragging(false) } const handleDrop = (e: DragEvent) => { e.preventDefault() diff --git a/web/app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx b/web/app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx index 17cb456558..e808d0b48a 100644 --- a/web/app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx +++ b/web/app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx @@ -1,6 +1,6 @@ 'use client' import type { FC } from 'react' -import React, { useState } from 'react' +import React, { useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import { RiDeleteBinLine, RiEditFill, RiEditLine } from '@remixicon/react' import { Robot, User } from '@/app/components/base/icons/src/public/avatar' @@ -16,7 +16,7 @@ type Props = { type: EditItemType content: string readonly?: boolean - onSave: (content: string) => void + onSave: (content: string) => Promise } export const EditTitle: FC<{ className?: string; title: string }> = ({ className, title }) => ( @@ -46,8 +46,13 @@ const EditItem: FC = ({ const placeholder = type === EditItemType.Query ? t('appAnnotation.editModal.queryPlaceholder') : t('appAnnotation.editModal.answerPlaceholder') const [isEdit, setIsEdit] = useState(false) - const handleSave = () => { - onSave(newContent) + // Reset newContent when content prop changes + useEffect(() => { + setNewContent('') + }, [content]) + + const handleSave = async () => { + await onSave(newContent) setIsEdit(false) } diff --git a/web/app/components/app/annotation/index.tsx b/web/app/components/app/annotation/index.tsx index afa8732701..bc63b85f6d 100644 --- a/web/app/components/app/annotation/index.tsx +++ b/web/app/components/app/annotation/index.tsx @@ -38,7 +38,7 @@ const Annotation: FC = (props) => { const [isShowEdit, setIsShowEdit] = useState(false) const [annotationConfig, setAnnotationConfig] = useState(null) const [isChatApp] = useState(appDetail.mode !== 'completion') - const [controlRefreshSwitch, setControlRefreshSwitch] = useState(Date.now()) + const [controlRefreshSwitch, setControlRefreshSwitch] = useState(() => Date.now()) const { plan, enableBilling } = useProviderContext() const isAnnotationFull = enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse const [isShowAnnotationFullModal, setIsShowAnnotationFullModal] = useState(false) @@ -48,12 +48,11 @@ const Annotation: FC = (props) => { const [list, setList] = useState([]) const [total, setTotal] = useState(0) const [isLoading, setIsLoading] = useState(false) - const [controlUpdateList, setControlUpdateList] = useState(Date.now()) + const [controlUpdateList, setControlUpdateList] = useState(() => Date.now()) const [currItem, setCurrItem] = useState(null) const [isShowViewModal, setIsShowViewModal] = useState(false) const [selectedIds, setSelectedIds] = useState([]) const debouncedQueryParams = useDebounce(queryParams, { wait: 500 }) - const [isBatchDeleting, setIsBatchDeleting] = useState(false) const fetchAnnotationConfig = async () => { const res = await doFetchAnnotationConfig(appDetail.id) @@ -108,9 +107,6 @@ const Annotation: FC = (props) => { } const handleBatchDelete = async () => { - if (isBatchDeleting) - return - setIsBatchDeleting(true) try { await delAnnotations(appDetail.id, selectedIds) Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' }) @@ -121,9 +117,6 @@ const Annotation: FC = (props) => { catch (e: any) { Toast.notify({ type: 'error', message: e.message || t('common.api.actionFailed') }) } - finally { - setIsBatchDeleting(false) - } } const handleView = (item: AnnotationItem) => { @@ -213,7 +206,6 @@ const Annotation: FC = (props) => { onSelectedIdsChange={setSelectedIds} onBatchDelete={handleBatchDelete} onCancel={() => setSelectedIds([])} - isBatchDeleting={isBatchDeleting} /> :
} diff --git a/web/app/components/app/annotation/list.tsx b/web/app/components/app/annotation/list.tsx index 6705ac5768..70ecedb869 100644 --- a/web/app/components/app/annotation/list.tsx +++ b/web/app/components/app/annotation/list.tsx @@ -19,7 +19,6 @@ type Props = { onSelectedIdsChange: (selectedIds: string[]) => void onBatchDelete: () => Promise onCancel: () => void - isBatchDeleting?: boolean } const List: FC = ({ @@ -30,7 +29,6 @@ const List: FC = ({ onSelectedIdsChange, onBatchDelete, onCancel, - isBatchDeleting, }) => { const { t } = useTranslation() const { formatTime } = useTimestamp() @@ -142,7 +140,6 @@ const List: FC = ({ selectedIds={selectedIds} onBatchDelete={onBatchDelete} onCancel={onCancel} - isBatchDeleting={isBatchDeleting} /> )}
diff --git a/web/app/components/app/annotation/view-annotation-modal/index.tsx b/web/app/components/app/annotation/view-annotation-modal/index.tsx index 08904d23d4..8426ab0005 100644 --- a/web/app/components/app/annotation/view-annotation-modal/index.tsx +++ b/web/app/components/app/annotation/view-annotation-modal/index.tsx @@ -21,7 +21,7 @@ type Props = { isShow: boolean onHide: () => void item: AnnotationItem - onSave: (editedQuery: string, editedAnswer: string) => void + onSave: (editedQuery: string, editedAnswer: string) => Promise onRemove: () => void } @@ -46,6 +46,16 @@ const ViewAnnotationModal: FC = ({ const [currPage, setCurrPage] = React.useState(0) const [total, setTotal] = useState(0) const [hitHistoryList, setHitHistoryList] = useState([]) + + // Update local state when item prop changes (e.g., when modal is reopened with updated data) + useEffect(() => { + setNewQuery(question) + setNewAnswer(answer) + setCurrPage(0) + setTotal(0) + setHitHistoryList([]) + }, [question, answer, id]) + const fetchHitHistory = async (page = 1) => { try { const { data, total }: any = await fetchHitHistoryList(appId, id, { @@ -63,6 +73,12 @@ const ViewAnnotationModal: FC = ({ fetchHitHistory(currPage + 1) }, [currPage]) + // Fetch hit history when item changes + useEffect(() => { + if (isShow && id) + fetchHitHistory(1) + }, [id, isShow]) + const tabs = [ { value: TabType.annotation, text: t('appAnnotation.viewModal.annotatedResponse') }, { @@ -82,14 +98,20 @@ const ViewAnnotationModal: FC = ({ }, ] const [activeTab, setActiveTab] = useState(TabType.annotation) - const handleSave = (type: EditItemType, editedContent: string) => { - if (type === EditItemType.Query) { - setNewQuery(editedContent) - onSave(editedContent, newAnswer) + const handleSave = async (type: EditItemType, editedContent: string) => { + try { + if (type === EditItemType.Query) { + await onSave(editedContent, newAnswer) + setNewQuery(editedContent) + } + else { + await onSave(newQuestion, editedContent) + setNewAnswer(editedContent) + } } - else { - setNewAnswer(editedContent) - onSave(newQuestion, editedContent) + catch (error) { + // If save fails, don't update local state + console.error('Failed to save annotation:', error) } } const [showModal, setShowModal] = useState(false) diff --git a/web/app/components/app/app-access-control/access-control-dialog.tsx b/web/app/components/app/app-access-control/access-control-dialog.tsx index 479eedc9cf..ee3fa9650b 100644 --- a/web/app/components/app/app-access-control/access-control-dialog.tsx +++ b/web/app/components/app/app-access-control/access-control-dialog.tsx @@ -22,7 +22,7 @@ const AccessControlDialog = ({ }, [onClose]) return ( - null}> + null}> -
+
diff --git a/web/app/components/app/app-access-control/add-member-or-group-pop.tsx b/web/app/components/app/app-access-control/add-member-or-group-pop.tsx index 0fad6cc740..e9519aeedf 100644 --- a/web/app/components/app/app-access-control/add-member-or-group-pop.tsx +++ b/web/app/components/app/app-access-control/add-member-or-group-pop.tsx @@ -52,7 +52,7 @@ export default function AddMemberOrGroupDialog() { {open && } - +
diff --git a/web/app/components/app/app-publisher/features-wrapper.tsx b/web/app/components/app/app-publisher/features-wrapper.tsx index dadd112135..409c390f4b 100644 --- a/web/app/components/app/app-publisher/features-wrapper.tsx +++ b/web/app/components/app/app-publisher/features-wrapper.tsx @@ -1,6 +1,6 @@ import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' -import produce from 'immer' +import { produce } from 'immer' import type { AppPublisherProps } from '@/app/components/app/app-publisher' import Confirm from '@/app/components/base/confirm' import AppPublisher from '@/app/components/app/app-publisher' diff --git a/web/app/components/app/app-publisher/index.tsx b/web/app/components/app/app-publisher/index.tsx index 53cceb8020..d3306ac141 100644 --- a/web/app/components/app/app-publisher/index.tsx +++ b/web/app/components/app/app-publisher/index.tsx @@ -44,7 +44,7 @@ import { appDefaultIconBackground } from '@/config' import type { PublishWorkflowParams } from '@/types/workflow' import { useAppWhiteListSubjects, useGetUserCanAccessApp } from '@/service/access-control' import { AccessMode } from '@/models/access-control' -import { fetchAppDetail } from '@/service/apps' +import { fetchAppDetailDirect } from '@/service/apps' import { useGlobalPublicStore } from '@/context/global-public-context' import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' @@ -162,11 +162,16 @@ const AppPublisher = ({ } }, [appDetail?.id]) - const handleAccessControlUpdate = useCallback(() => { - fetchAppDetail({ url: '/apps', id: appDetail!.id }).then((res) => { + const handleAccessControlUpdate = useCallback(async () => { + if (!appDetail) + return + try { + const res = await fetchAppDetailDirect({ url: '/apps', id: appDetail.id }) setAppDetail(res) + } + finally { setShowAppAccessControl(false) - }) + } }, [appDetail, setAppDetail]) const [embeddingModalOpen, setEmbeddingModalOpen] = useState(false) @@ -348,7 +353,8 @@ const AppPublisher = ({ { - publishedAt && handleOpenInExplore() + if (publishedAt) + handleOpenInExplore() }} disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && !userCanAccessApp?.result)} icon={} diff --git a/web/app/components/app/app-publisher/version-info-modal.tsx b/web/app/components/app/app-publisher/version-info-modal.tsx index 4d5d3705c1..263f187736 100644 --- a/web/app/components/app/app-publisher/version-info-modal.tsx +++ b/web/app/components/app/app-publisher/version-info-modal.tsx @@ -40,7 +40,8 @@ const VersionInfoModal: FC = ({ return } else { - titleError && setTitleError(false) + if (titleError) + setTitleError(false) } if (releaseNotes.length > RELEASE_NOTES_MAX_LENGTH) { @@ -52,7 +53,8 @@ const VersionInfoModal: FC = ({ return } else { - releaseNotesError && setReleaseNotesError(false) + if (releaseNotesError) + setReleaseNotesError(false) } onPublish({ title, releaseNotes, id: versionInfo?.id }) diff --git a/web/app/components/app/configuration/base/icons/more-like-this-icon.tsx b/web/app/components/app/configuration/base/icons/more-like-this-icon.tsx deleted file mode 100644 index 74c808eb39..0000000000 --- a/web/app/components/app/configuration/base/icons/more-like-this-icon.tsx +++ /dev/null @@ -1,14 +0,0 @@ -'use client' -import type { FC } from 'react' -import React from 'react' - -const MoreLikeThisIcon: FC = () => { - return ( - - - - - - ) -} -export default React.memo(MoreLikeThisIcon) diff --git a/web/app/components/app/configuration/base/icons/suggested-questions-after-answer-icon.tsx b/web/app/components/app/configuration/base/icons/suggested-questions-after-answer-icon.tsx deleted file mode 100644 index cabc2e4d73..0000000000 --- a/web/app/components/app/configuration/base/icons/suggested-questions-after-answer-icon.tsx +++ /dev/null @@ -1,12 +0,0 @@ -'use client' -import type { FC } from 'react' -import React from 'react' - -const SuggestedQuestionsAfterAnswerIcon: FC = () => { - return ( - - - - ) -} -export default React.memo(SuggestedQuestionsAfterAnswerIcon) diff --git a/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx b/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx index e2d37bb9de..aa8d0f65ca 100644 --- a/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx +++ b/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx @@ -5,7 +5,7 @@ import copy from 'copy-to-clipboard' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' import { useBoolean } from 'ahooks' -import produce from 'immer' +import { produce } from 'immer' import { RiDeleteBinLine, RiErrorWarningFill, @@ -78,7 +78,9 @@ const AdvancedPromptInput: FC = ({ const handleOpenExternalDataToolModal = () => { setShowExternalDataToolModal({ payload: {}, - onSaveCallback: (newExternalDataTool: ExternalDataTool) => { + onSaveCallback: (newExternalDataTool?: ExternalDataTool) => { + if (!newExternalDataTool) + return eventEmitter?.emit({ type: ADD_EXTERNAL_DATA_TOOL, payload: newExternalDataTool, diff --git a/web/app/components/app/configuration/config-prompt/index.tsx b/web/app/components/app/configuration/config-prompt/index.tsx index 1caca47bcb..ec34588e41 100644 --- a/web/app/components/app/configuration/config-prompt/index.tsx +++ b/web/app/components/app/configuration/config-prompt/index.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React from 'react' import { useContext } from 'use-context-selector' -import produce from 'immer' +import { produce } from 'immer' import { RiAddLine, } from '@remixicon/react' diff --git a/web/app/components/app/configuration/config-prompt/prompt-editor-height-resize-wrap.tsx b/web/app/components/app/configuration/config-prompt/prompt-editor-height-resize-wrap.tsx index 1457a298f2..9e10db93ae 100644 --- a/web/app/components/app/configuration/config-prompt/prompt-editor-height-resize-wrap.tsx +++ b/web/app/components/app/configuration/config-prompt/prompt-editor-height-resize-wrap.tsx @@ -25,7 +25,7 @@ const PromptEditorHeightResizeWrap: FC = ({ }) => { const [clientY, setClientY] = useState(0) const [isResizing, setIsResizing] = useState(false) - const [prevUserSelectStyle, setPrevUserSelectStyle] = useState(getComputedStyle(document.body).userSelect) + const [prevUserSelectStyle, setPrevUserSelectStyle] = useState(() => getComputedStyle(document.body).userSelect) const [oldHeight, setOldHeight] = useState(height) const handleStartResize = useCallback((e: React.MouseEvent) => { diff --git a/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx b/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx index a7bdc550d1..8634232b2b 100644 --- a/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx +++ b/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx @@ -3,7 +3,7 @@ import type { FC } from 'react' import React, { useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' -import produce from 'immer' +import { produce } from 'immer' import { useContext } from 'use-context-selector' import ConfirmAddVar from './confirm-add-var' import PromptEditorHeightResizeWrap from './prompt-editor-height-resize-wrap' @@ -76,7 +76,9 @@ const Prompt: FC = ({ const handleOpenExternalDataToolModal = () => { setShowExternalDataToolModal({ payload: {}, - onSaveCallback: (newExternalDataTool: ExternalDataTool) => { + onSaveCallback: (newExternalDataTool?: ExternalDataTool) => { + if (!newExternalDataTool) + return eventEmitter?.emit({ type: ADD_EXTERNAL_DATA_TOOL, payload: newExternalDataTool, diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx index cecc076fe7..3f32c9b0c7 100644 --- a/web/app/components/app/configuration/config-var/config-modal/index.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx @@ -3,7 +3,7 @@ import type { ChangeEvent, FC } from 'react' import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' -import produce from 'immer' +import { produce } from 'immer' import ModalFoot from '../modal-foot' import ConfigSelect from '../config-select' import ConfigString from '../config-string' @@ -32,6 +32,19 @@ import { TransferMethod } from '@/types/app' import type { FileEntity } from '@/app/components/base/file-uploader/types' const TEXT_MAX_LENGTH = 256 +const CHECKBOX_DEFAULT_TRUE_VALUE = 'true' +const CHECKBOX_DEFAULT_FALSE_VALUE = 'false' + +const getCheckboxDefaultSelectValue = (value: InputVar['default']) => { + if (typeof value === 'boolean') + return value ? CHECKBOX_DEFAULT_TRUE_VALUE : CHECKBOX_DEFAULT_FALSE_VALUE + if (typeof value === 'string') + return value.toLowerCase() === CHECKBOX_DEFAULT_TRUE_VALUE ? CHECKBOX_DEFAULT_TRUE_VALUE : CHECKBOX_DEFAULT_FALSE_VALUE + return CHECKBOX_DEFAULT_FALSE_VALUE +} + +const parseCheckboxSelectValue = (value: string) => + value === CHECKBOX_DEFAULT_TRUE_VALUE export type IConfigModalProps = { isCreate?: boolean @@ -53,7 +66,7 @@ const ConfigModal: FC = ({ }) => { const { modelConfig } = useContext(ConfigContext) const { t } = useTranslation() - const [tempPayload, setTempPayload] = useState(payload || getNewVarInWorkflow('') as any) + const [tempPayload, setTempPayload] = useState(() => payload || getNewVarInWorkflow('') as any) const { type, label, variable, options, max_length } = tempPayload const modalRef = useRef(null) const appDetail = useAppStore(state => state.appDetail) @@ -66,7 +79,7 @@ const ConfigModal: FC = ({ try { return JSON.stringify(JSON.parse(tempPayload.json_schema).properties, null, 2) } - catch (_e) { + catch { return '' } }, [tempPayload.json_schema]) @@ -110,7 +123,7 @@ const ConfigModal: FC = ({ } handlePayloadChange('json_schema')(JSON.stringify(res, null, 2)) } - catch (_e) { + catch { return null } }, [handlePayloadChange]) @@ -198,6 +211,8 @@ const ConfigModal: FC = ({ handlePayloadChange('variable')(e.target.value) }, [handlePayloadChange, t]) + const checkboxDefaultSelectValue = useMemo(() => getCheckboxDefaultSelectValue(tempPayload.default), [tempPayload.default]) + const handleConfirm = () => { const moreInfo = tempPayload.variable === payload?.variable ? undefined @@ -305,7 +320,7 @@ const ConfigModal: FC = ({ {type === InputVarType.paragraph && (