diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 3dd00ee4db..c03f281858 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/devcontainers/python:3.12-bullseye +FROM mcr.microsoft.com/devcontainers/python:3.12-bookworm RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install libgmp-dev libmpfr-dev libmpc-dev diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index c1666d24cf..859f499b8e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,5 +1,8 @@ blank_issues_enabled: false contact_links: + - name: "\U0001F510 Security Vulnerabilities" + url: "https://github.com/langgenius/dify/security/advisories/new" + about: Report security vulnerabilities through GitHub Security Advisories to ensure responsible disclosure. 💡 Please do not report security vulnerabilities in public issues. - name: "\U0001F4A1 Model Providers & Plugins" url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose" about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details. diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 116fc59ee8..37d351627b 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -39,25 +39,11 @@ jobs: - name: Install dependencies run: uv sync --project api --dev - - name: Run Unit tests - run: | - uv run --project api bash dev/pytest/pytest_unit_tests.sh - - name: Run pyrefly check run: | cd api uv add --dev pyrefly uv run pyrefly check || true - - name: Coverage Summary - run: | - set -x - # Extract coverage percentage and create a summary - TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])') - - # Create a detailed coverage summary - echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY - echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY - uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY - name: Run dify config tests run: uv run --project api dev/pytest/pytest_config_tests.py @@ -93,3 +79,19 @@ jobs: - name: Run TestContainers run: uv run --project api bash dev/pytest/pytest_testcontainers.sh + + - name: Run Unit tests + run: | + uv run --project api bash dev/pytest/pytest_unit_tests.sh + + - name: Coverage Summary + run: | + set -x + # Extract coverage percentage and create a summary + TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])') + + # Create a detailed coverage summary + echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY + echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY + uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY + diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml index 068ba686fa..0cae2ef552 100644 --- a/.github/workflows/autofix.yml +++ b/.github/workflows/autofix.yml @@ -15,10 +15,12 @@ jobs: # Use uv to ensure we have the same ruff version in CI and locally. - uses: astral-sh/setup-uv@v6 with: - python-version: "3.12" + python-version: "3.11" - run: | cd api uv sync --dev + # fmt first to avoid line too long + uv run ruff format .. # Fix lint errors uv run ruff check --fix . # Format code @@ -28,6 +30,8 @@ jobs: run: | uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all + uvx --from ast-grep-cli sg -p '$A = db.Column($$$B)' -r '$A = mapped_column($$$B)' -l py --update-all + uvx --from ast-grep-cli sg -p '$A : $T = db.Column($$$B)' -r '$A : $T = mapped_column($$$B)' -l py --update-all # Convert Optional[T] to T | None (ignoring quoted types) cat > /tmp/optional-rule.yml << 'EOF' id: convert-optional-to-union diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index 24a9da4400..f7f464a601 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -4,8 +4,7 @@ on: push: branches: - "main" - - "deploy/dev" - - "deploy/enterprise" + - "deploy/**" - "build/**" - "release/e-*" - "hotfix/**" diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index de732c3134..cd1c86e668 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -18,7 +18,7 @@ jobs: - name: Deploy to server uses: appleboy/ssh-action@v0.1.8 with: - host: ${{ secrets.RAG_SSH_HOST }} + host: ${{ secrets.SSH_HOST }} username: ${{ secrets.SSH_USER }} key: ${{ secrets.SSH_PRIVATE_KEY }} script: | diff --git a/.github/workflows/deploy-rag-dev.yml b/.github/workflows/deploy-trigger-dev.yml similarity index 75% rename from .github/workflows/deploy-rag-dev.yml rename to .github/workflows/deploy-trigger-dev.yml index 86265aad6d..2d9a904fc5 100644 --- a/.github/workflows/deploy-rag-dev.yml +++ b/.github/workflows/deploy-trigger-dev.yml @@ -1,4 +1,4 @@ -name: Deploy RAG Dev +name: Deploy Trigger Dev permissions: contents: read @@ -7,7 +7,7 @@ on: workflow_run: workflows: ["Build and Push API & Web"] branches: - - "deploy/rag-dev" + - "deploy/trigger-dev" types: - completed @@ -16,12 +16,12 @@ jobs: runs-on: ubuntu-latest if: | github.event.workflow_run.conclusion == 'success' && - github.event.workflow_run.head_branch == 'deploy/rag-dev' + github.event.workflow_run.head_branch == 'deploy/trigger-dev' steps: - name: Deploy to server uses: appleboy/ssh-action@v0.1.8 with: - host: ${{ secrets.RAG_SSH_HOST }} + host: ${{ secrets.TRIGGER_SSH_HOST }} username: ${{ secrets.SSH_USER }} key: ${{ secrets.SSH_PRIVATE_KEY }} script: | diff --git a/.github/workflows/expose_service_ports.sh b/.github/workflows/expose_service_ports.sh index 01772ccf9f..fa0fd2ee8c 100755 --- a/.github/workflows/expose_service_ports.sh +++ b/.github/workflows/expose_service_ports.sh @@ -1,6 +1,7 @@ #!/bin/bash yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml +yq eval '.services.weaviate.ports += ["50051:50051"]' -i docker/docker-compose.yaml yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml diff --git a/AGENTS.md b/AGENTS.md index 44f7b30360..5859cd1bd9 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -4,84 +4,51 @@ Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management. -The codebase consists of: +The codebase is split into: -- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture -- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19 +- **Backend API** (`/api`): Python Flask application organized with Domain-Driven Design +- **Frontend Web** (`/web`): Next.js 15 application using TypeScript and React 19 - **Docker deployment** (`/docker`): Containerized deployment configurations -## Development Commands +## Backend Workflow -### Backend (API) +- Run backend CLI commands through `uv run --project api `. -All Python commands must be prefixed with `uv run --project api`: +- Backend QA gate requires passing `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh` before review. -```bash -# Start development servers -./dev/start-api # Start API server -./dev/start-worker # Start Celery worker +- Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks. -# Run tests -uv run --project api pytest # Run all tests -uv run --project api pytest tests/unit_tests/ # Unit tests only -uv run --project api pytest tests/integration_tests/ # Integration tests +- Integration tests are CI-only and are not expected to run in the local environment. -# Code quality -./dev/reformat # Run all formatters and linters -uv run --project api ruff check --fix ./ # Fix linting issues -uv run --project api ruff format ./ # Format code -uv run --directory api basedpyright # Type checking -``` - -### Frontend (Web) +## Frontend Workflow ```bash cd web -pnpm lint # Run ESLint -pnpm eslint-fix # Fix ESLint issues -pnpm test # Run Jest tests +pnpm lint +pnpm lint:fix +pnpm test ``` -## Testing Guidelines +## Testing & Quality Practices -### Backend Testing +- Follow TDD: red → green → refactor. +- Use `pytest` for backend tests with Arrange-Act-Assert structure. +- Enforce strong typing; avoid `Any` and prefer explicit type annotations. +- Write self-documenting code; only add comments that explain intent. -- Use `pytest` for all backend tests -- Write tests first (TDD approach) -- Test structure: Arrange-Act-Assert +## Language Style -## Code Style Requirements +- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`). +- **TypeScript**: Use the strict config, lean on ESLint + Prettier workflows, and avoid `any` types. -### Python +## General Practices -- Use type hints for all functions and class attributes -- No `Any` types unless absolutely necessary -- Implement special methods (`__repr__`, `__str__`) appropriately +- Prefer editing existing files; add new documentation only when requested. +- Inject dependencies through constructors and preserve clean architecture boundaries. +- Handle errors with domain-specific exceptions at the correct layer. -### TypeScript/JavaScript +## Project Conventions -- Strict TypeScript configuration -- ESLint with Prettier integration -- Avoid `any` type - -## Important Notes - -- **Environment Variables**: Always use UV for Python commands: `uv run --project api ` -- **Comments**: Only write meaningful comments that explain "why", not "what" -- **File Creation**: Always prefer editing existing files over creating new ones -- **Documentation**: Don't create documentation files unless explicitly requested -- **Code Quality**: Always run `./dev/reformat` before committing backend changes - -## Common Development Tasks - -### Adding a New API Endpoint - -1. Create controller in `/api/controllers/` -1. Add service logic in `/api/services/` -1. Update routes in controller's `__init__.py` -1. Write tests in `/api/tests/` - -## Project-Specific Conventions - -- All async tasks use Celery with Redis as broker -- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations. +- Backend architecture adheres to DDD and Clean Architecture principles. +- Async work runs through Celery with Redis as the broker. +- Frontend user-facing strings must use `web/i18n/en-US/`; avoid hardcoded text. diff --git a/Makefile b/Makefile index ea560c7157..19c398ec82 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,6 @@ prepare-web: @echo "🌐 Setting up web environment..." @cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists" @cd web && pnpm install - @cd web && pnpm build @echo "✅ Web environment prepared (not started)" # Step 3: Prepare API environment diff --git a/README.md b/README.md index 90da1d3def..aadced582d 100644 --- a/README.md +++ b/README.md @@ -40,18 +40,18 @@

README in English - 繁體中文文件 - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production. diff --git a/api/.env.example b/api/.env.example index d53de3779b..1d8190ce5f 100644 --- a/api/.env.example +++ b/api/.env.example @@ -343,6 +343,15 @@ OCEANBASE_VECTOR_DATABASE=test OCEANBASE_MEMORY_LIMIT=6G OCEANBASE_ENABLE_HYBRID_SEARCH=false +# AlibabaCloud MySQL Vector configuration +ALIBABACLOUD_MYSQL_HOST=127.0.0.1 +ALIBABACLOUD_MYSQL_PORT=3306 +ALIBABACLOUD_MYSQL_USER=root +ALIBABACLOUD_MYSQL_PASSWORD=root +ALIBABACLOUD_MYSQL_DATABASE=dify +ALIBABACLOUD_MYSQL_MAX_CONNECTION=5 +ALIBABACLOUD_MYSQL_HNSW_M=6 + # openGauss configuration OPENGAUSS_HOST=127.0.0.1 OPENGAUSS_PORT=6600 @@ -427,8 +436,8 @@ CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20 CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0 CODE_MAX_NUMBER=9223372036854775807 CODE_MIN_NUMBER=-9223372036854775808 -CODE_MAX_STRING_LENGTH=80000 -TEMPLATE_TRANSFORM_MAX_LENGTH=80000 +CODE_MAX_STRING_LENGTH=400000 +TEMPLATE_TRANSFORM_MAX_LENGTH=400000 CODE_MAX_STRING_ARRAY_LENGTH=30 CODE_MAX_OBJECT_ARRAY_LENGTH=30 CODE_MAX_NUMBER_ARRAY_LENGTH=1000 diff --git a/api/.ruff.toml b/api/.ruff.toml index 643bc063a1..5a29e1d8fa 100644 --- a/api/.ruff.toml +++ b/api/.ruff.toml @@ -81,7 +81,6 @@ ignore = [ "SIM113", # enumerate-for-loop "SIM117", # multiple-with-statements "SIM210", # if-expr-with-true-false - "UP038", # deprecated and not recommended by Ruff, https://docs.astral.sh/ruff/rules/non-pep604-isinstance/ ] [lint.per-file-ignores] diff --git a/api/README.md b/api/README.md index 5ecf92a4f0..e75ea3d354 100644 --- a/api/README.md +++ b/api/README.md @@ -80,10 +80,10 @@ 1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service. ```bash -uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation +uv run celery -A app.celery worker -P gevent -c 2 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation ``` -Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal: +Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service: ```bash uv run celery -A app.celery beat diff --git a/api/commands.py b/api/commands.py index 82efe34611..8ca19e1dac 100644 --- a/api/commands.py +++ b/api/commands.py @@ -1521,6 +1521,14 @@ def transform_datasource_credentials(): auth_count = 0 for firecrawl_tenant_credential in firecrawl_tenant_credentials: auth_count += 1 + if not firecrawl_tenant_credential.credentials: + click.echo( + click.style( + f"Skipping firecrawl credential for tenant {tenant_id} due to missing credentials.", + fg="yellow", + ) + ) + continue # get credential api key credentials_json = json.loads(firecrawl_tenant_credential.credentials) api_key = credentials_json.get("config", {}).get("api_key") @@ -1576,6 +1584,14 @@ def transform_datasource_credentials(): auth_count = 0 for jina_tenant_credential in jina_tenant_credentials: auth_count += 1 + if not jina_tenant_credential.credentials: + click.echo( + click.style( + f"Skipping jina credential for tenant {tenant_id} due to missing credentials.", + fg="yellow", + ) + ) + continue # get credential api key credentials_json = json.loads(jina_tenant_credential.credentials) api_key = credentials_json.get("config", {}).get("api_key") diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 363cf4e2b5..6ce72e80df 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -150,7 +150,7 @@ class CodeExecutionSandboxConfig(BaseSettings): CODE_MAX_STRING_LENGTH: PositiveInt = Field( description="Maximum allowed length for strings in code execution", - default=80000, + default=400_000, ) CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field( @@ -189,6 +189,11 @@ class PluginConfig(BaseSettings): default="plugin-api-key", ) + PLUGIN_DAEMON_TIMEOUT: PositiveFloat | None = Field( + description="Timeout in seconds for requests to the plugin daemon (set to None to disable)", + default=300.0, + ) + INNER_API_KEY_FOR_PLUGIN: str = Field(description="Inner api key for plugin", default="inner-api-key") PLUGIN_REMOTE_INSTALL_HOST: str = Field( @@ -362,11 +367,11 @@ class HttpConfig(BaseSettings): ) HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field( - ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60 + ge=1, description="Maximum read timeout in seconds for HTTP requests", default=600 ) HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field( - ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20 + ge=1, description="Maximum write timeout in seconds for HTTP requests", default=600 ) HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field( @@ -582,6 +587,11 @@ class WorkflowConfig(BaseSettings): default=200 * 1024, ) + TEMPLATE_TRANSFORM_MAX_LENGTH: PositiveInt = Field( + description="Maximum number of characters allowed in Template Transform node output", + default=400_000, + ) + # GraphEngine Worker Pool Configuration GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field( description="Minimum number of workers per GraphEngine instance", @@ -766,7 +776,7 @@ class MailConfig(BaseSettings): MAIL_TEMPLATING_TIMEOUT: int = Field( description=""" - Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates. + Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates. Only available in sandbox mode.""", default=3, ) diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 62b3cc9842..d872e8201b 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -18,6 +18,7 @@ from .storage.opendal_storage_config import OpenDALStorageConfig from .storage.supabase_storage_config import SupabaseStorageConfig from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig +from .vdb.alibabacloud_mysql_config import AlibabaCloudMySQLConfig from .vdb.analyticdb_config import AnalyticdbConfig from .vdb.baidu_vector_config import BaiduVectorDBConfig from .vdb.chroma_config import ChromaConfig @@ -330,6 +331,7 @@ class MiddlewareConfig( ClickzettaConfig, HuaweiCloudConfig, MilvusConfig, + AlibabaCloudMySQLConfig, MyScaleConfig, OpenSearchConfig, OracleConfig, diff --git a/api/configs/middleware/vdb/alibabacloud_mysql_config.py b/api/configs/middleware/vdb/alibabacloud_mysql_config.py new file mode 100644 index 0000000000..a76400ed1c --- /dev/null +++ b/api/configs/middleware/vdb/alibabacloud_mysql_config.py @@ -0,0 +1,54 @@ +from pydantic import Field, PositiveInt +from pydantic_settings import BaseSettings + + +class AlibabaCloudMySQLConfig(BaseSettings): + """ + Configuration settings for AlibabaCloud MySQL vector database + """ + + ALIBABACLOUD_MYSQL_HOST: str = Field( + description="Hostname or IP address of the AlibabaCloud MySQL server (e.g., 'localhost' or 'mysql.aliyun.com')", + default="localhost", + ) + + ALIBABACLOUD_MYSQL_PORT: PositiveInt = Field( + description="Port number on which the AlibabaCloud MySQL server is listening (default is 3306)", + default=3306, + ) + + ALIBABACLOUD_MYSQL_USER: str = Field( + description="Username for authenticating with AlibabaCloud MySQL (default is 'root')", + default="root", + ) + + ALIBABACLOUD_MYSQL_PASSWORD: str = Field( + description="Password for authenticating with AlibabaCloud MySQL (default is an empty string)", + default="", + ) + + ALIBABACLOUD_MYSQL_DATABASE: str = Field( + description="Name of the AlibabaCloud MySQL database to connect to (default is 'dify')", + default="dify", + ) + + ALIBABACLOUD_MYSQL_MAX_CONNECTION: PositiveInt = Field( + description="Maximum number of connections in the connection pool", + default=5, + ) + + ALIBABACLOUD_MYSQL_CHARSET: str = Field( + description="Character set for AlibabaCloud MySQL connection (default is 'utf8mb4')", + default="utf8mb4", + ) + + ALIBABACLOUD_MYSQL_DISTANCE_FUNCTION: str = Field( + description="Distance function used for vector similarity search in AlibabaCloud MySQL " + "(e.g., 'cosine', 'euclidean')", + default="cosine", + ) + + ALIBABACLOUD_MYSQL_HNSW_M: PositiveInt = Field( + description="Maximum number of connections per layer for HNSW vector index (default is 6, range: 3-200)", + default=6, + ) diff --git a/api/configs/middleware/vdb/opensearch_config.py b/api/configs/middleware/vdb/opensearch_config.py index ba015a6eb9..a7d712545e 100644 --- a/api/configs/middleware/vdb/opensearch_config.py +++ b/api/configs/middleware/vdb/opensearch_config.py @@ -1,23 +1,24 @@ -from enum import Enum +from enum import StrEnum from typing import Literal from pydantic import Field, PositiveInt from pydantic_settings import BaseSettings +class AuthMethod(StrEnum): + """ + Authentication method for OpenSearch + """ + + BASIC = "basic" + AWS_MANAGED_IAM = "aws_managed_iam" + + class OpenSearchConfig(BaseSettings): """ Configuration settings for OpenSearch """ - class AuthMethod(Enum): - """ - Authentication method for OpenSearch - """ - - BASIC = "basic" - AWS_MANAGED_IAM = "aws_managed_iam" - OPENSEARCH_HOST: str | None = Field( description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')", default=None, diff --git a/api/constants/__init__.py b/api/constants/__init__.py index fe8f4f8785..9141fbea95 100644 --- a/api/constants/__init__.py +++ b/api/constants/__init__.py @@ -1,4 +1,5 @@ from configs import dify_config +from libs.collection_utils import convert_to_lower_and_upper_set HIDDEN_VALUE = "[__HIDDEN__]" UNKNOWN_VALUE = "[__UNKNOWN__]" @@ -6,24 +7,39 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000" DEFAULT_FILE_NUMBER_LIMITS = 3 -IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"] -IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS]) +IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"}) -VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "webm"] -VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS]) +VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"}) -AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"] -AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS]) +AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"}) - -_doc_extensions: list[str] +_doc_extensions: set[str] if dify_config.ETL_TYPE == "Unstructured": - _doc_extensions = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"] - _doc_extensions.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub")) + _doc_extensions = { + "txt", + "markdown", + "md", + "mdx", + "pdf", + "html", + "htm", + "xlsx", + "xls", + "vtt", + "properties", + "doc", + "docx", + "csv", + "eml", + "msg", + "pptx", + "xml", + "epub", + } if dify_config.UNSTRUCTURED_API_URL: - _doc_extensions.append("ppt") + _doc_extensions.add("ppt") else: - _doc_extensions = [ + _doc_extensions = { "txt", "markdown", "md", @@ -37,5 +53,5 @@ else: "csv", "vtt", "properties", - ] -DOCUMENT_EXTENSIONS = _doc_extensions + [ext.upper() for ext in _doc_extensions] + } +DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions) diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index ee02ff3937..621f5066e4 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -1,31 +1,10 @@ +from importlib import import_module + from flask import Blueprint from flask_restx import Namespace from libs.external_api import ExternalApi -from .app.app_import import AppImportApi, AppImportCheckDependenciesApi, AppImportConfirmApi -from .explore.audio import ChatAudioApi, ChatTextApi -from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi -from .explore.conversation import ( - ConversationApi, - ConversationListApi, - ConversationPinApi, - ConversationRenameApi, - ConversationUnPinApi, -) -from .explore.message import ( - MessageFeedbackApi, - MessageListApi, - MessageMoreLikeThisApi, - MessageSuggestedQuestionApi, -) -from .explore.workflow import ( - InstalledAppWorkflowRunApi, - InstalledAppWorkflowTaskStopApi, -) -from .files import FileApi, FilePreviewApi, FileSupportTypeApi -from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi - bp = Blueprint("console", __name__, url_prefix="/console/api") api = ExternalApi( @@ -35,23 +14,23 @@ api = ExternalApi( description="Console management APIs for app configuration, monitoring, and administration", ) -# Create namespace console_ns = Namespace("console", description="Console management API operations", path="/") -# File -api.add_resource(FileApi, "/files/upload") -api.add_resource(FilePreviewApi, "/files//preview") -api.add_resource(FileSupportTypeApi, "/files/support-type") +RESOURCE_MODULES = ( + "controllers.console.app.app_import", + "controllers.console.explore.audio", + "controllers.console.explore.completion", + "controllers.console.explore.conversation", + "controllers.console.explore.message", + "controllers.console.explore.workflow", + "controllers.console.files", + "controllers.console.remote_files", +) -# Remote files -api.add_resource(RemoteFileInfoApi, "/remote-files/") -api.add_resource(RemoteFileUploadApi, "/remote-files/upload") - -# Import App -api.add_resource(AppImportApi, "/apps/imports") -api.add_resource(AppImportConfirmApi, "/apps/imports//confirm") -api.add_resource(AppImportCheckDependenciesApi, "/apps/imports//check-dependencies") +for module_name in RESOURCE_MODULES: + import_module(module_name) +# Ensure resource modules are imported so route decorators are evaluated. # Import other controllers from . import ( admin, @@ -150,77 +129,6 @@ from .workspace import ( workspace, ) -# Explore Audio -api.add_resource(ChatAudioApi, "/installed-apps//audio-to-text", endpoint="installed_app_audio") -api.add_resource(ChatTextApi, "/installed-apps//text-to-audio", endpoint="installed_app_text") - -# Explore Completion -api.add_resource( - CompletionApi, "/installed-apps//completion-messages", endpoint="installed_app_completion" -) -api.add_resource( - CompletionStopApi, - "/installed-apps//completion-messages//stop", - endpoint="installed_app_stop_completion", -) -api.add_resource( - ChatApi, "/installed-apps//chat-messages", endpoint="installed_app_chat_completion" -) -api.add_resource( - ChatStopApi, - "/installed-apps//chat-messages//stop", - endpoint="installed_app_stop_chat_completion", -) - -# Explore Conversation -api.add_resource( - ConversationRenameApi, - "/installed-apps//conversations//name", - endpoint="installed_app_conversation_rename", -) -api.add_resource( - ConversationListApi, "/installed-apps//conversations", endpoint="installed_app_conversations" -) -api.add_resource( - ConversationApi, - "/installed-apps//conversations/", - endpoint="installed_app_conversation", -) -api.add_resource( - ConversationPinApi, - "/installed-apps//conversations//pin", - endpoint="installed_app_conversation_pin", -) -api.add_resource( - ConversationUnPinApi, - "/installed-apps//conversations//unpin", - endpoint="installed_app_conversation_unpin", -) - - -# Explore Message -api.add_resource(MessageListApi, "/installed-apps//messages", endpoint="installed_app_messages") -api.add_resource( - MessageFeedbackApi, - "/installed-apps//messages//feedbacks", - endpoint="installed_app_message_feedback", -) -api.add_resource( - MessageMoreLikeThisApi, - "/installed-apps//messages//more-like-this", - endpoint="installed_app_more_like_this", -) -api.add_resource( - MessageSuggestedQuestionApi, - "/installed-apps//messages//suggested-questions", - endpoint="installed_app_suggested_question", -) -# Explore Workflow -api.add_resource(InstalledAppWorkflowRunApi, "/installed-apps//workflows/run") -api.add_resource( - InstalledAppWorkflowTaskStopApi, "/installed-apps//workflows/tasks//stop" -) - api.add_namespace(console_ns) __all__ = [ diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index fec527e4cb..4f04af7932 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -1,5 +1,4 @@ import flask_restx -from flask_login import current_user from flask_restx import Resource, fields, marshal_with from flask_restx._http import HTTPStatus from sqlalchemy import select @@ -8,12 +7,12 @@ from werkzeug.exceptions import Forbidden from extensions.ext_database import db from libs.helper import TimestampField -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.dataset import Dataset from models.model import ApiToken, App from . import api, console_ns -from .wraps import account_initialization_required, setup_required +from .wraps import account_initialization_required, edit_permission_required, setup_required api_key_fields = { "id": fields.String, @@ -57,7 +56,9 @@ class BaseApiKeyListResource(Resource): def get(self, resource_id): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) + _, current_tenant_id = current_account_with_tenant() + + _get_resource(resource_id, current_tenant_id, self.resource_model) keys = db.session.scalars( select(ApiToken).where( ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id @@ -66,13 +67,12 @@ class BaseApiKeyListResource(Resource): return {"items": keys} @marshal_with(api_key_fields) + @edit_permission_required def post(self, resource_id): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) - if not current_user.is_editor: - raise Forbidden() - + _, current_tenant_id = current_account_with_tenant() + _get_resource(resource_id, current_tenant_id, self.resource_model) current_key_count = ( db.session.query(ApiToken) .where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id) @@ -89,7 +89,7 @@ class BaseApiKeyListResource(Resource): key = ApiToken.generate_api_key(self.token_prefix or "", 24) api_token = ApiToken() setattr(api_token, self.resource_id_field, resource_id) - api_token.tenant_id = current_user.current_tenant_id + api_token.tenant_id = current_tenant_id api_token.token = key api_token.type = self.resource_type db.session.add(api_token) @@ -108,7 +108,8 @@ class BaseApiKeyResource(Resource): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) api_key_id = str(api_key_id) - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) + current_user, current_tenant_id = current_account_with_tenant() + _get_resource(resource_id, current_tenant_id, self.resource_model) # The role of the current user in the ta table must be admin or owner if not current_user.is_admin_or_owner: @@ -152,11 +153,6 @@ class AppApiKeyListResource(BaseApiKeyListResource): """Create a new API key for an app""" return super().post(resource_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "app" resource_model = App resource_id_field = "app_id" @@ -173,11 +169,6 @@ class AppApiKeyResource(BaseApiKeyResource): """Delete an API key for an app""" return super().delete(resource_id, api_key_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "app" resource_model = App resource_id_field = "app_id" @@ -202,11 +193,6 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): """Create a new API key for a dataset""" return super().post(resource_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "dataset" resource_model = Dataset resource_id_field = "dataset_id" @@ -223,11 +209,6 @@ class DatasetApiKeyResource(BaseApiKeyResource): """Delete an API key for a dataset""" return super().delete(resource_id, api_key_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "dataset" resource_model = Dataset resource_id_field = "dataset_id" diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index d0ee11fe75..3e549d869e 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -1,15 +1,14 @@ from typing import Literal from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse -from werkzeug.exceptions import Forbidden from controllers.common.errors import NoFileUploadedError, TooManyFilesError from controllers.console import api, console_ns from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from extensions.ext_redis import redis_client @@ -42,10 +41,8 @@ class AnnotationReplyActionApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def post(self, app_id, action: Literal["enable", "disable"]): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) parser = reqparse.RequestParser() parser.add_argument("score_threshold", required=True, type=float, location="json") @@ -69,10 +66,8 @@ class AppAnnotationSettingDetailApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) result = AppAnnotationService.get_app_annotation_setting_by_app_id(app_id) return result, 200 @@ -98,10 +93,8 @@ class AppAnnotationSettingUpdateApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, app_id, annotation_setting_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_setting_id = str(annotation_setting_id) @@ -124,10 +117,8 @@ class AnnotationReplyActionStatusApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def get(self, app_id, job_id, action): - if not current_user.is_editor: - raise Forbidden() - job_id = str(job_id) app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}" cache_result = redis_client.get(app_annotation_job_key) @@ -159,10 +150,8 @@ class AnnotationApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) keyword = request.args.get("keyword", default="", type=str) @@ -198,10 +187,8 @@ class AnnotationApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("annotation") @marshal_with(annotation_fields) + @edit_permission_required def post(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) parser = reqparse.RequestParser() parser.add_argument("question", required=True, type=str, location="json") @@ -213,10 +200,8 @@ class AnnotationApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) # Use request.args.getlist to get annotation_ids array directly @@ -249,10 +234,8 @@ class AnnotationExportApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id) response = {"data": marshal(annotation_list, annotation_fields)} @@ -271,11 +254,9 @@ class AnnotationUpdateDeleteApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required @marshal_with(annotation_fields) def post(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_id = str(annotation_id) parser = reqparse.RequestParser() @@ -288,10 +269,8 @@ class AnnotationUpdateDeleteApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_id = str(annotation_id) AppAnnotationService.delete_app_annotation(app_id, annotation_id) @@ -310,10 +289,8 @@ class AnnotationBatchImportApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def post(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) # check file if "file" not in request.files: @@ -341,10 +318,8 @@ class AnnotationBatchImportStatusApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def get(self, app_id, job_id): - if not current_user.is_editor: - raise Forbidden() - job_id = str(job_id) indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" cache_result = redis_client.get(indexing_cache_key) @@ -376,10 +351,8 @@ class AnnotationHitHistoryListApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) app_id = str(app_id) diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 2d2e4b448a..3900f5a6eb 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -1,7 +1,5 @@ import uuid -from typing import cast -from flask_login import current_user from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session @@ -12,14 +10,16 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, enterprise_license_required, setup_required, ) from core.ops.ops_trace_manager import OpsTraceManager from extensions.ext_database import db from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields -from libs.login import login_required -from models import Account, App +from libs.login import current_account_with_tenant, login_required +from libs.validators import validate_description_length +from models import App from services.app_dsl_service import AppDslService, ImportMode from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -28,12 +28,6 @@ from services.feature_service import FeatureService ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"] -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - @console_ns.route("/apps") class AppListApi(Resource): @api.doc("list_apps") @@ -61,6 +55,7 @@ class AppListApi(Resource): @enterprise_license_required def get(self): """Get app list""" + current_user, current_tenant_id = current_account_with_tenant() def uuid_list(value): try: @@ -95,7 +90,7 @@ class AppListApi(Resource): # get app list app_service = AppService() - app_pagination = app_service.get_paginate_apps(current_user.id, current_user.current_tenant_id, args) + app_pagination = app_service.get_paginate_apps(current_user.id, current_tenant_id, args) if not app_pagination: return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False} @@ -134,30 +129,24 @@ class AppListApi(Resource): @account_initialization_required @marshal_with(app_detail_fields) @cloud_edition_billing_resource_check("apps") + @edit_permission_required def post(self): """Create app""" + current_user, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") + parser.add_argument("description", type=validate_description_length, location="json") parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json") parser.add_argument("icon_type", type=str, location="json") parser.add_argument("icon", type=str, location="json") parser.add_argument("icon_background", type=str, location="json") args = parser.parse_args() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - if "mode" not in args or args["mode"] is None: raise BadRequest("mode is required") app_service = AppService() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - if current_user.current_tenant_id is None: - raise ValueError("current_user.current_tenant_id cannot be None") - app = app_service.create_app(current_user.current_tenant_id, args, current_user) + app = app_service.create_app(current_tenant_id, args, current_user) return app, 201 @@ -210,16 +199,13 @@ class AppApi(Resource): @login_required @account_initialization_required @get_app_model + @edit_permission_required @marshal_with(app_detail_fields_with_site) def put(self, app_model): """Update app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") + parser.add_argument("description", type=validate_description_length, location="json") parser.add_argument("icon_type", type=str, location="json") parser.add_argument("icon", type=str, location="json") parser.add_argument("icon_background", type=str, location="json") @@ -253,12 +239,9 @@ class AppApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_model): """Delete app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - app_service = AppService() app_service.delete_app(app_model) @@ -288,16 +271,16 @@ class AppCopyApi(Resource): @login_required @account_initialization_required @get_app_model + @edit_permission_required @marshal_with(app_detail_fields_with_site) def post(self, app_model): """Copy app""" # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("name", type=str, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") + parser.add_argument("description", type=validate_description_length, location="json") parser.add_argument("icon_type", type=str, location="json") parser.add_argument("icon", type=str, location="json") parser.add_argument("icon_background", type=str, location="json") @@ -306,10 +289,9 @@ class AppCopyApi(Resource): with Session(db.engine) as session: import_service = AppDslService(session) yaml_content = import_service.export_dsl(app_model=app_model, include_secret=True) - account = cast(Account, current_user) result = import_service.import_app( - account=account, - import_mode=ImportMode.YAML_CONTENT.value, + account=current_user, + import_mode=ImportMode.YAML_CONTENT, yaml_content=yaml_content, name=args.get("name"), description=args.get("description"), @@ -345,12 +327,9 @@ class AppExportApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_model): """Export app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - # Add include_secret params parser = reqparse.RequestParser() parser.add_argument("include_secret", type=inputs.boolean, default=False, location="args") @@ -376,11 +355,8 @@ class AppNameApi(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") args = parser.parse_args() @@ -413,11 +389,8 @@ class AppIconApi(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("icon", type=str, location="json") parser.add_argument("icon_background", type=str, location="json") @@ -446,11 +419,8 @@ class AppSiteStatus(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("enable_site", type=bool, required=True, location="json") args = parser.parse_args() @@ -480,6 +450,7 @@ class AppApiStatus(Resource): @marshal_with(app_detail_fields) def post(self, app_model): # The role of the current user in the ta table must be admin or owner + current_user, _ = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() @@ -525,10 +496,9 @@ class AppTraceApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, app_id): # add app trace - if not current_user.is_editor: - raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("enabled", type=bool, required=True, location="json") parser.add_argument("tracing_provider", type=str, required=True, location="json") diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index aee93a8814..5e7ea6d481 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -1,37 +1,35 @@ -from typing import cast - -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from sqlalchemy.orm import Session -from werkzeug.exceptions import Forbidden from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from extensions.ext_database import db from fields.app_fields import app_import_check_dependencies_fields, app_import_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from models.model import App from services.app_dsl_service import AppDslService, ImportStatus from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService +from .. import console_ns + +@console_ns.route("/apps/imports") class AppImportApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(app_import_fields) @cloud_edition_billing_resource_check("apps") + @edit_permission_required def post(self): # Check user role first - if not current_user.is_editor: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("mode", type=str, required=True, location="json") parser.add_argument("yaml_content", type=str, location="json") @@ -48,7 +46,7 @@ class AppImportApi(Resource): with Session(db.engine) as session: import_service = AppDslService(session) # Import app - account = cast(Account, current_user) + account = current_user result = import_service.import_app( account=account, import_mode=args["mode"], @@ -67,47 +65,47 @@ class AppImportApi(Resource): EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private") # Return appropriate status code based on result status = result.status - if status == ImportStatus.FAILED.value: + if status == ImportStatus.FAILED: return result.model_dump(mode="json"), 400 - elif status == ImportStatus.PENDING.value: + elif status == ImportStatus.PENDING: return result.model_dump(mode="json"), 202 return result.model_dump(mode="json"), 200 +@console_ns.route("/apps/imports//confirm") class AppImportConfirmApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(app_import_fields) + @edit_permission_required def post(self, import_id): # Check user role first - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() # Create service with session with Session(db.engine) as session: import_service = AppDslService(session) # Confirm import - account = cast(Account, current_user) + account = current_user result = import_service.confirm_import(import_id=import_id, account=account) session.commit() # Return appropriate status code based on result - if result.status == ImportStatus.FAILED.value: + if result.status == ImportStatus.FAILED: return result.model_dump(mode="json"), 400 return result.model_dump(mode="json"), 200 +@console_ns.route("/apps/imports//check-dependencies") class AppImportCheckDependenciesApi(Resource): @setup_required @login_required @get_app_model @account_initialization_required @marshal_with(app_import_check_dependencies_fields) + @edit_permission_required def get(self, app_model: App): - if not current_user.is_editor: - raise Forbidden() - with Session(db.engine) as session: import_service = AppDslService(session) result = import_service.check_dependencies(app_model=app_model) diff --git a/api/controllers/console/app/completion.py b/api/controllers/console/app/completion.py index 2f7b90e7fb..d69f05f23e 100644 --- a/api/controllers/console/app/completion.py +++ b/api/controllers/console/app/completion.py @@ -2,7 +2,7 @@ import logging from flask import request from flask_restx import Resource, fields, reqparse -from werkzeug.exceptions import Forbidden, InternalServerError, NotFound +from werkzeug.exceptions import InternalServerError, NotFound import services from controllers.console import api, console_ns @@ -15,7 +15,7 @@ from controllers.console.app.error import ( ProviderQuotaExceededError, ) from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom @@ -151,13 +151,8 @@ class ChatMessageApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT]) + @edit_permission_required def post(self, app_model): - if not isinstance(current_user, Account): - raise Forbidden() - - if not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, required=True, location="json") parser.add_argument("query", type=str, required=True, location="json") diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index f104ab5dee..779be62973 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -1,17 +1,16 @@ from datetime import datetime -import pytz # pip install pytz +import pytz import sqlalchemy as sa -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range from sqlalchemy import func, or_ from sqlalchemy.orm import joinedload -from werkzeug.exceptions import Forbidden, NotFound +from werkzeug.exceptions import NotFound from controllers.console import api, console_ns from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from fields.conversation_fields import ( @@ -22,8 +21,8 @@ from fields.conversation_fields import ( ) from libs.datetime_utils import naive_utc_now from libs.helper import DatetimeString -from libs.login import login_required -from models import Account, Conversation, EndUser, Message, MessageAnnotation +from libs.login import current_account_with_tenant, login_required +from models import Conversation, EndUser, Message, MessageAnnotation from models.model import AppMode from services.conversation_service import ConversationService from services.errors.conversation import ConversationNotExistsError @@ -57,9 +56,9 @@ class CompletionConversationApi(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) @marshal_with(conversation_pagination_fields) + @edit_permission_required def get(self, app_model): - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("keyword", type=str, location="args") parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -84,6 +83,7 @@ class CompletionConversationApi(Resource): ) account = current_user + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -137,9 +137,8 @@ class CompletionConversationDetailApi(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) @marshal_with(conversation_message_detail_fields) + @edit_permission_required def get(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() conversation_id = str(conversation_id) return _get_conversation(app_model, conversation_id) @@ -154,14 +153,12 @@ class CompletionConversationDetailApi(Resource): @login_required @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) + @edit_permission_required def delete(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() conversation_id = str(conversation_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") ConversationService.delete(app_model, conversation_id, current_user) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @@ -206,9 +203,9 @@ class ChatConversationApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(conversation_with_summary_pagination_fields) + @edit_permission_required def get(self, app_model): - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("keyword", type=str, location="args") parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -260,6 +257,7 @@ class ChatConversationApi(Resource): ) account = current_user + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -309,7 +307,7 @@ class ChatConversationApi(Resource): ) if app_model.mode == AppMode.ADVANCED_CHAT: - query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER.value) + query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER) match args["sort_by"]: case "created_at": @@ -341,9 +339,8 @@ class ChatConversationDetailApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(conversation_detail_fields) + @edit_permission_required def get(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() conversation_id = str(conversation_id) return _get_conversation(app_model, conversation_id) @@ -358,14 +355,12 @@ class ChatConversationDetailApi(Resource): @login_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @account_initialization_required + @edit_permission_required def delete(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() conversation_id = str(conversation_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") ConversationService.delete(app_model, conversation_id, current_user) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @@ -374,6 +369,7 @@ class ChatConversationDetailApi(Resource): def _get_conversation(app_model, conversation_id): + current_user, _ = current_account_with_tenant() conversation = ( db.session.query(Conversation) .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index 230ccdca15..4a9b6e7801 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -1,6 +1,5 @@ from collections.abc import Sequence -from flask_login import current_user from flask_restx import Resource, fields, reqparse from controllers.console import api, console_ns @@ -17,7 +16,7 @@ from core.helper.code_executor.python3.python3_code_provider import Python3CodeP from core.llm_generator.llm_generator import LLMGenerator from core.model_runtime.errors.invoke import InvokeError from extensions.ext_database import db -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import App from services.workflow_service import WorkflowService @@ -48,11 +47,11 @@ class RuleGenerateApi(Resource): parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") parser.add_argument("no_variable", type=bool, required=True, default=False, location="json") args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: rules = LLMGenerator.generate_rule_config( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=args["no_variable"], @@ -99,11 +98,11 @@ class RuleCodeGenerateApi(Resource): parser.add_argument("no_variable", type=bool, required=True, default=False, location="json") parser.add_argument("code_language", type=str, required=False, default="javascript", location="json") args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: code_result = LLMGenerator.generate_code( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], code_language=args["code_language"], @@ -144,11 +143,11 @@ class RuleStructuredOutputGenerateApi(Resource): parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: structured_output = LLMGenerator.generate_structured_output( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], ) @@ -198,6 +197,7 @@ class InstructionGenerateApi(Resource): parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") parser.add_argument("ideal_output", type=str, required=False, default="", location="json") args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() code_template = ( Python3CodeProvider.get_default_code() if args["language"] == "python" @@ -222,21 +222,21 @@ class InstructionGenerateApi(Resource): match node_type: case "llm": return LLMGenerator.generate_rule_config( - current_user.current_tenant_id, + current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=True, ) case "agent": return LLMGenerator.generate_rule_config( - current_user.current_tenant_id, + current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=True, ) case "code": return LLMGenerator.generate_code( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], code_language=args["language"], @@ -245,7 +245,7 @@ class InstructionGenerateApi(Resource): return {"error": f"invalid node type: {node_type}"} if args["node_id"] == "" and args["current"] != "": # For legacy app without a workflow return LLMGenerator.instruction_modify_legacy( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, flow_id=args["flow_id"], current=args["current"], instruction=args["instruction"], @@ -254,7 +254,7 @@ class InstructionGenerateApi(Resource): ) if args["node_id"] != "" and args["current"] != "": # For workflow node return LLMGenerator.instruction_modify_workflow( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, flow_id=args["flow_id"], node_id=args["node_id"], current=args["current"], diff --git a/api/controllers/console/app/mcp_server.py b/api/controllers/console/app/mcp_server.py index b9a383ee61..599f5adb34 100644 --- a/api/controllers/console/app/mcp_server.py +++ b/api/controllers/console/app/mcp_server.py @@ -1,16 +1,15 @@ import json from enum import StrEnum -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from werkzeug.exceptions import NotFound from controllers.console import api, console_ns from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from extensions.ext_database import db from fields.app_fields import app_server_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.model import AppMCPServer @@ -25,9 +24,9 @@ class AppMCPServerController(Resource): @api.doc(description="Get MCP server configuration for an application") @api.doc(params={"app_id": "Application ID"}) @api.response(200, "MCP server configuration retrieved successfully", app_server_fields) - @setup_required @login_required @account_initialization_required + @setup_required @get_app_model @marshal_with(app_server_fields) def get(self, app_model): @@ -48,14 +47,14 @@ class AppMCPServerController(Resource): ) @api.response(201, "MCP server configuration created successfully", app_server_fields) @api.response(403, "Insufficient permissions") - @setup_required - @login_required @account_initialization_required @get_app_model + @login_required + @setup_required @marshal_with(app_server_fields) + @edit_permission_required def post(self, app_model): - if not current_user.is_editor: - raise NotFound() + _, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("description", type=str, required=False, location="json") parser.add_argument("parameters", type=dict, required=True, location="json") @@ -71,7 +70,7 @@ class AppMCPServerController(Resource): parameters=json.dumps(args["parameters"], ensure_ascii=False), status=AppMCPServerStatus.ACTIVE, app_id=app_model.id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, server_code=AppMCPServer.generate_server_code(16), ) db.session.add(server) @@ -95,14 +94,13 @@ class AppMCPServerController(Resource): @api.response(200, "MCP server configuration updated successfully", app_server_fields) @api.response(403, "Insufficient permissions") @api.response(404, "Server not found") - @setup_required - @login_required - @account_initialization_required @get_app_model + @login_required + @setup_required + @account_initialization_required @marshal_with(app_server_fields) + @edit_permission_required def put(self, app_model): - if not current_user.is_editor: - raise NotFound() parser = reqparse.RequestParser() parser.add_argument("id", type=str, required=True, location="json") parser.add_argument("description", type=str, required=False, location="json") @@ -142,13 +140,13 @@ class AppMCPServerRefreshController(Resource): @login_required @account_initialization_required @marshal_with(app_server_fields) + @edit_permission_required def get(self, server_id): - if not current_user.is_editor: - raise NotFound() + _, current_tenant_id = current_account_with_tenant() server = ( db.session.query(AppMCPServer) .where(AppMCPServer.id == server_id) - .where(AppMCPServer.tenant_id == current_user.current_tenant_id) + .where(AppMCPServer.tenant_id == current_tenant_id) .first() ) if not server: diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 46523feccc..005cff75fc 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -3,7 +3,7 @@ import logging from flask_restx import Resource, fields, marshal_with, reqparse from flask_restx.inputs import int_range from sqlalchemy import exists, select -from werkzeug.exceptions import Forbidden, InternalServerError, NotFound +from werkzeug.exceptions import InternalServerError, NotFound from controllers.console import api, console_ns from controllers.console.app.error import ( @@ -17,6 +17,7 @@ from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDi from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from core.app.entities.app_invoke_entities import InvokeFrom @@ -26,8 +27,7 @@ from extensions.ext_database import db from fields.conversation_fields import annotation_fields, message_detail_fields from libs.helper import uuid_value from libs.infinite_scroll_pagination import InfiniteScrollPagination -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback from services.annotation_service import AppAnnotationService from services.errors.conversation import ConversationNotExistsError @@ -56,15 +56,13 @@ class ChatMessageListApi(Resource): ) @api.response(200, "Success", message_infinite_scroll_pagination_fields) @api.response(404, "Conversation not found") - @setup_required @login_required - @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @account_initialization_required + @setup_required + @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(message_infinite_scroll_pagination_fields) + @edit_permission_required def get(self, app_model): - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("conversation_id", required=True, type=uuid_value, location="args") parser.add_argument("first_id", type=uuid_value, location="args") @@ -154,8 +152,7 @@ class MessageFeedbackApi(Resource): @login_required @account_initialization_required def post(self, app_model): - if current_user is None: - raise Forbidden() + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("message_id", required=True, type=uuid_value, location="json") @@ -211,18 +208,14 @@ class MessageAnnotationApi(Resource): ) @api.response(200, "Annotation created successfully", annotation_fields) @api.response(403, "Insufficient permissions") + @marshal_with(annotation_fields) + @get_app_model @setup_required @login_required - @account_initialization_required @cloud_edition_billing_resource_check("annotation") - @get_app_model - @marshal_with(annotation_fields) + @account_initialization_required + @edit_permission_required def post(self, app_model): - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("message_id", required=False, type=uuid_value, location="json") parser.add_argument("question", required=True, type=str, location="json") @@ -270,6 +263,7 @@ class MessageSuggestedQuestionApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) def get(self, app_model, message_id): + current_user, _ = current_account_with_tenant() message_id = str(message_id) try: @@ -304,12 +298,12 @@ class MessageApi(Resource): @api.doc(params={"app_id": "Application ID", "message_id": "Message ID"}) @api.response(200, "Message retrieved successfully", message_detail_fields) @api.response(404, "Message not found") + @get_app_model @setup_required @login_required @account_initialization_required - @get_app_model @marshal_with(message_detail_fields) - def get(self, app_model, message_id): + def get(self, app_model, message_id: str): message_id = str(message_id) message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first() diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py index 11df511840..72ce8a7ddf 100644 --- a/api/controllers/console/app/model_config.py +++ b/api/controllers/console/app/model_config.py @@ -2,7 +2,6 @@ import json from typing import cast from flask import request -from flask_login import current_user from flask_restx import Resource, fields from werkzeug.exceptions import Forbidden @@ -14,8 +13,8 @@ from core.tools.tool_manager import ToolManager from core.tools.utils.configuration import ToolParameterConfigurationManager from events.app_event import app_model_config_was_updated from extensions.ext_database import db -from libs.login import login_required -from models.account import Account +from libs.datetime_utils import naive_utc_now +from libs.login import current_account_with_tenant, login_required from models.model import AppMode, AppModelConfig from services.app_model_config_service import AppModelConfigService @@ -53,16 +52,14 @@ class ModelConfigResource(Resource): @get_app_model(mode=[AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION]) def post(self, app_model): """Modify app model config""" - if not isinstance(current_user, Account): - raise Forbidden() + current_user, current_tenant_id = current_account_with_tenant() if not current_user.has_edit_permission: raise Forbidden() - assert current_user.current_tenant_id is not None, "The tenant information should be loaded." # validate config model_configuration = AppModelConfigService.validate_configuration( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, config=cast(dict, request.json), app_mode=AppMode.value_of(app_model.mode), ) @@ -90,16 +87,16 @@ class ModelConfigResource(Resource): if not isinstance(tool, dict) or len(tool.keys()) <= 3: continue - agent_tool_entity = AgentToolEntity(**tool) + agent_tool_entity = AgentToolEntity.model_validate(tool) # get tool try: tool_runtime = ToolManager.get_agent_tool_runtime( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, app_id=app_model.id, agent_tool=agent_tool_entity, ) manager = ToolParameterConfigurationManager( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, tool_runtime=tool_runtime, provider_name=agent_tool_entity.provider_id, provider_type=agent_tool_entity.provider_type, @@ -124,7 +121,7 @@ class ModelConfigResource(Resource): # encrypt agent tool parameters if it's secret-input agent_mode = new_app_model_config.agent_mode_dict for tool in agent_mode.get("tools") or []: - agent_tool_entity = AgentToolEntity(**tool) + agent_tool_entity = AgentToolEntity.model_validate(tool) # get tool key = f"{agent_tool_entity.provider_id}.{agent_tool_entity.provider_type}.{agent_tool_entity.tool_name}" @@ -133,7 +130,7 @@ class ModelConfigResource(Resource): else: try: tool_runtime = ToolManager.get_agent_tool_runtime( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, app_id=app_model.id, agent_tool=agent_tool_entity, ) @@ -141,7 +138,7 @@ class ModelConfigResource(Resource): continue manager = ToolParameterConfigurationManager( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, tool_runtime=tool_runtime, provider_name=agent_tool_entity.provider_id, provider_type=agent_tool_entity.provider_type, @@ -172,6 +169,8 @@ class ModelConfigResource(Resource): db.session.flush() app_model.app_model_config_id = new_app_model_config.id + app_model.updated_by = current_user.id + app_model.updated_at = naive_utc_now() db.session.commit() app_model_config_was_updated.send(app_model, app_model_config=new_app_model_config) diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index 95befc5df9..1da704efcc 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -1,4 +1,3 @@ -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from werkzeug.exceptions import Forbidden, NotFound @@ -9,8 +8,8 @@ from controllers.console.wraps import account_initialization_required, setup_req from extensions.ext_database import db from fields.app_fields import app_site_fields from libs.datetime_utils import naive_utc_now -from libs.login import login_required -from models import Account, Site +from libs.login import current_account_with_tenant, login_required +from models import Site def parse_app_site_args(): @@ -76,9 +75,10 @@ class AppSite(Resource): @marshal_with(app_site_fields) def post(self, app_model): args = parse_app_site_args() + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be editor, admin, or owner - if not current_user.is_editor: + if not current_user.has_edit_permission: raise Forbidden() site = db.session.query(Site).where(Site.app_id == app_model.id).first() @@ -107,8 +107,6 @@ class AppSite(Resource): if value is not None: setattr(site, attr_name, value) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") site.updated_by = current_user.id site.updated_at = naive_utc_now() db.session.commit() @@ -131,6 +129,8 @@ class AppSiteAccessTokenReset(Resource): @marshal_with(app_site_fields) def post(self, app_model): # The role of the current user in the ta table must be admin or owner + current_user, _ = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() @@ -140,8 +140,6 @@ class AppSiteAccessTokenReset(Resource): raise NotFound site.code = Site.generate_code(16) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") site.updated_by = current_user.id site.updated_at = naive_utc_now() db.session.commit() diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py index 6471b843c6..cfe5b3ff17 100644 --- a/api/controllers/console/app/statistic.py +++ b/api/controllers/console/app/statistic.py @@ -4,7 +4,6 @@ from decimal import Decimal import pytz import sqlalchemy as sa from flask import jsonify -from flask_login import current_user from flask_restx import Resource, fields, reqparse from controllers.console import api, console_ns @@ -13,7 +12,7 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from libs.helper import DatetimeString -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import AppMode, Message @@ -37,7 +36,7 @@ class DailyMessageStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -52,7 +51,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -109,13 +109,13 @@ class DailyConversationStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") args = parser.parse_args() - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -127,7 +127,7 @@ class DailyConversationStatistic(Resource): sa.func.count(sa.distinct(Message.conversation_id)).label("conversation_count"), ) .select_from(Message) - .where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER.value) + .where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER) ) if args["start"]: @@ -175,7 +175,7 @@ class DailyTerminalsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -190,8 +190,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -247,7 +247,7 @@ class DailyTokenCostStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -263,8 +263,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -322,7 +322,7 @@ class AverageSessionInteractionStatistic(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -345,8 +345,8 @@ FROM WHERE c.app_id = :app_id AND m.invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -413,7 +413,7 @@ class UserSatisfactionRateStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -432,8 +432,8 @@ LEFT JOIN WHERE m.app_id = :app_id AND m.invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -494,7 +494,7 @@ class AverageResponseTimeStatistic(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -509,8 +509,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -566,7 +566,7 @@ class TokensPerSecondStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -584,8 +584,8 @@ FROM WHERE app_id = :app_id AND invoke_from != :invoke_from""" - arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER.value} - + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 1f5cbbeca5..172a80736f 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -12,7 +12,7 @@ import services from controllers.console import api, console_ns from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError from core.app.app_config.features.file_upload.manager import FileUploadConfigManager from core.app.apps.base_app_queue_manager import AppQueueManager @@ -25,10 +25,10 @@ from factories import file_factory, variable_factory from fields.workflow_fields import workflow_fields, workflow_pagination_fields from fields.workflow_run_fields import workflow_run_node_execution_fields from libs import helper +from libs.datetime_utils import naive_utc_now from libs.helper import TimestampField, uuid_value -from libs.login import current_user, login_required +from libs.login import current_account_with_tenant, login_required from models import App -from models.account import Account from models.model import AppMode from models.workflow import Workflow from services.app_generate_service import AppGenerateService @@ -69,15 +69,11 @@ class DraftWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def get(self, app_model: App): """ Get draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() - # fetch draft workflow by app_model workflow_service = WorkflowService() workflow = workflow_service.get_draft_workflow(app_model=app_model) @@ -109,14 +105,12 @@ class DraftWorkflowApi(Resource): @api.response(200, "Draft workflow synced successfully", workflow_fields) @api.response(400, "Invalid workflow configuration") @api.response(403, "Permission denied") + @edit_permission_required def post(self, app_model: App): """ Sync draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() content_type = request.headers.get("Content-Type", "") @@ -148,10 +142,6 @@ class DraftWorkflowApi(Resource): return {"message": "Invalid JSON data"}, 400 else: abort(415) - - if not isinstance(current_user, Account): - raise Forbidden() - workflow_service = WorkflowService() try: @@ -205,17 +195,12 @@ class AdvancedChatDraftWorkflowRunApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App): """ Run draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() - - if not isinstance(current_user, Account): - raise Forbidden() + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") @@ -270,16 +255,12 @@ class AdvancedChatDraftRunIterationNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow iteration node """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") args = parser.parse_args() @@ -322,16 +303,12 @@ class WorkflowDraftRunIterationNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow iteration node """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") args = parser.parse_args() @@ -374,17 +351,12 @@ class AdvancedChatDraftRunLoopNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow loop node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") args = parser.parse_args() @@ -427,17 +399,12 @@ class WorkflowDraftRunLoopNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow loop node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") args = parser.parse_args() @@ -479,17 +446,12 @@ class DraftWorkflowRunApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App): """ Run draft workflow """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") parser.add_argument("files", type=list, required=False, location="json") @@ -525,17 +487,11 @@ class WorkflowTaskStopApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, task_id: str): """ Stop workflow task """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # Stop using both mechanisms for backward compatibility # Legacy stop flag mechanism (without user check) AppQueueManager.set_stop_flag_no_user_check(task_id) @@ -567,17 +523,12 @@ class DraftWorkflowNodeRunApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_run_node_execution_fields) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") parser.add_argument("query", type=str, required=False, location="json", default="") @@ -621,17 +572,11 @@ class PublishedWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def get(self, app_model: App): """ Get published workflow """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # fetch published workflow by app_model workflow_service = WorkflowService() workflow = workflow_service.get_published_workflow(app_model=app_model) @@ -643,16 +588,12 @@ class PublishedWorkflowApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App): """ Publish workflow """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("marked_name", type=str, required=False, default="", location="json") parser.add_argument("marked_comment", type=str, required=False, default="", location="json") @@ -674,8 +615,12 @@ class PublishedWorkflowApi(Resource): marked_comment=args.marked_comment or "", ) - app_model.workflow_id = workflow.id - db.session.commit() # NOTE: this is necessary for update app_model.workflow_id + # Update app_model within the same session to ensure atomicity + app_model_in_session = session.get(App, app_model.id) + if app_model_in_session: + app_model_in_session.workflow_id = workflow.id + app_model_in_session.updated_by = current_user.id + app_model_in_session.updated_at = naive_utc_now() workflow_created_at = TimestampField().format(workflow.created_at) @@ -697,17 +642,11 @@ class DefaultBlockConfigsApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def get(self, app_model: App): """ Get default block config """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # Get default block configs workflow_service = WorkflowService() return workflow_service.get_default_block_configs() @@ -724,16 +663,11 @@ class DefaultBlockConfigApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def get(self, app_model: App, block_type: str): """ Get default block config """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("q", type=str, location="args") args = parser.parse_args() @@ -764,17 +698,14 @@ class ConvertToWorkflowApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.COMPLETION]) + @edit_permission_required def post(self, app_model: App): """ Convert basic mode of chatbot app to workflow mode Convert expert mode of chatbot app to workflow mode Convert Completion App to Workflow App """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() if request.data: parser = reqparse.RequestParser() @@ -807,15 +738,12 @@ class PublishedAllWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_pagination_fields) + @edit_permission_required def get(self, app_model: App): """ Get published workflows """ - - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") @@ -874,16 +802,12 @@ class WorkflowByIdApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def patch(self, app_model: App, workflow_id: str): """ Update workflow attributes """ - if not isinstance(current_user, Account): - raise Forbidden() - # Check permission - if not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("marked_name", type=str, required=False, location="json") parser.add_argument("marked_comment", type=str, required=False, location="json") @@ -929,16 +853,11 @@ class WorkflowByIdApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def delete(self, app_model: App, workflow_id: str): """ Delete workflow """ - if not isinstance(current_user, Account): - raise Forbidden() - # Check permission - if not current_user.has_edit_permission: - raise Forbidden() - workflow_service = WorkflowService() # Create a session and manage the transaction diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py index da6b56d026..5e865dc4c1 100644 --- a/api/controllers/console/app/workflow_draft_variable.py +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -22,8 +22,7 @@ from extensions.ext_database import db from factories.file_factory import build_from_mapping, build_from_mappings from factories.variable_factory import build_segment_with_type from libs.login import current_user, login_required -from models import App, AppMode -from models.account import Account +from models import Account, App, AppMode from models.workflow import WorkflowDraftVariable from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService from services.workflow_service import WorkflowService diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index 23ba63845c..286ba65a7f 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -1,6 +1,5 @@ from typing import cast -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range @@ -14,7 +13,7 @@ from fields.workflow_run_fields import ( workflow_run_pagination_fields, ) from libs.helper import uuid_value -from libs.login import login_required +from libs.login import current_user, login_required from models import Account, App, AppMode, EndUser from services.workflow_run_service import WorkflowRunService diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py index 535e7cadd6..8f7f936c9b 100644 --- a/api/controllers/console/app/workflow_statistic.py +++ b/api/controllers/console/app/workflow_statistic.py @@ -4,7 +4,6 @@ from decimal import Decimal import pytz import sqlalchemy as sa from flask import jsonify -from flask_login import current_user from flask_restx import Resource, reqparse from controllers.console import api, console_ns @@ -12,7 +11,7 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db from libs.helper import DatetimeString -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.enums import WorkflowRunTriggeredFrom from models.model import AppMode @@ -29,7 +28,7 @@ class WorkflowDailyRunsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -47,9 +46,9 @@ WHERE arg_dict = { "tz": account.timezone, "app_id": app_model.id, - "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value, + "triggered_from": WorkflowRunTriggeredFrom.APP_RUN, } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -97,7 +96,7 @@ class WorkflowDailyTerminalsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -115,9 +114,9 @@ WHERE arg_dict = { "tz": account.timezone, "app_id": app_model.id, - "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value, + "triggered_from": WorkflowRunTriggeredFrom.APP_RUN, } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -165,7 +164,7 @@ class WorkflowDailyTokenCostStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -183,9 +182,9 @@ WHERE arg_dict = { "tz": account.timezone, "app_id": app_model.id, - "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value, + "triggered_from": WorkflowRunTriggeredFrom.APP_RUN, } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -238,7 +237,7 @@ class WorkflowAverageAppInteractionStatistic(Resource): @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") @@ -269,9 +268,9 @@ GROUP BY arg_dict = { "tz": account.timezone, "app_id": app_model.id, - "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value, + "triggered_from": WorkflowRunTriggeredFrom.APP_RUN, } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 44aba01820..9bb2718f89 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -4,28 +4,29 @@ from typing import ParamSpec, TypeVar, Union from controllers.console.app.error import AppNotFoundError from extensions.ext_database import db -from libs.login import current_user +from libs.login import current_account_with_tenant from models import App, AppMode -from models.account import Account P = ParamSpec("P") R = TypeVar("R") +P1 = ParamSpec("P1") +R1 = TypeVar("R1") def _load_app_model(app_id: str) -> App | None: - assert isinstance(current_user, Account) + _, current_tenant_id = current_account_with_tenant() app_model = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) return app_model def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None): - def decorator(view_func: Callable[P, R]): + def decorator(view_func: Callable[P1, R1]): @wraps(view_func) - def decorated_view(*args: P.args, **kwargs: P.kwargs): + def decorated_view(*args: P1.args, **kwargs: P1.kwargs): if not kwargs.get("app_id"): raise ValueError("missing app_id in path parameters") diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py index 8cdadfb03c..06d2b936b7 100644 --- a/api/controllers/console/auth/activate.py +++ b/api/controllers/console/auth/activate.py @@ -7,7 +7,7 @@ from controllers.console.error import AlreadyActivateError from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.helper import StrLen, email, extract_remote_ip, timezone -from models.account import AccountStatus +from models import AccountStatus from services.account_service import AccountService, RegisterService active_check_parser = reqparse.RequestParser() @@ -103,7 +103,7 @@ class ActivateApi(Resource): account.interface_language = args["interface_language"] account.timezone = args["timezone"] account.interface_theme = "light" - account.status = AccountStatus.ACTIVE.value + account.status = AccountStatus.ACTIVE account.initialized_at = naive_utc_now() db.session.commit() diff --git a/api/controllers/console/auth/data_source_bearer_auth.py b/api/controllers/console/auth/data_source_bearer_auth.py index 796e6916cc..d9ab7de29b 100644 --- a/api/controllers/console/auth/data_source_bearer_auth.py +++ b/api/controllers/console/auth/data_source_bearer_auth.py @@ -1,21 +1,22 @@ -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ApiKeyAuthFailedError -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.auth.api_key_auth_service import ApiKeyAuthService from ..wraps import account_initialization_required, setup_required +@console_ns.route("/api-key-auth/data-source") class ApiKeyAuthDataSource(Resource): @setup_required @login_required @account_initialization_required def get(self): - data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_tenant_id) if data_source_api_key_bindings: return { "sources": [ @@ -33,12 +34,15 @@ class ApiKeyAuthDataSource(Resource): return {"sources": []} +@console_ns.route("/api-key-auth/data-source/binding") class ApiKeyAuthDataSourceBinding(Resource): @setup_required @login_required @account_initialization_required def post(self): # The role of the current user in the table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() parser = reqparse.RequestParser() @@ -48,26 +52,24 @@ class ApiKeyAuthDataSourceBinding(Resource): args = parser.parse_args() ApiKeyAuthService.validate_api_key_auth_args(args) try: - ApiKeyAuthService.create_provider_auth(current_user.current_tenant_id, args) + ApiKeyAuthService.create_provider_auth(current_tenant_id, args) except Exception as e: raise ApiKeyAuthFailedError(str(e)) return {"result": "success"}, 200 +@console_ns.route("/api-key-auth/data-source/") class ApiKeyAuthDataSourceBindingDelete(Resource): @setup_required @login_required @account_initialization_required def delete(self, binding_id): # The role of the current user in the table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id) + ApiKeyAuthService.delete_provider_auth(current_tenant_id, binding_id) return {"result": "success"}, 204 - - -api.add_resource(ApiKeyAuthDataSource, "/api-key-auth/data-source") -api.add_resource(ApiKeyAuthDataSourceBinding, "/api-key-auth/data-source/binding") -api.add_resource(ApiKeyAuthDataSourceBindingDelete, "/api-key-auth/data-source/") diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index 6f1fd2f11a..0fd433d718 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -2,13 +2,12 @@ import logging import httpx from flask import current_app, redirect, request -from flask_login import current_user from flask_restx import Resource, fields from werkzeug.exceptions import Forbidden from configs import dify_config from controllers.console import api, console_ns -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from libs.oauth_data_source import NotionOAuth from ..wraps import account_initialization_required, setup_required @@ -45,6 +44,7 @@ class OAuthDataSource(Resource): @api.response(403, "Admin privileges required") def get(self, provider: str): # The role of the current user in the table must be admin or owner + current_user, _ = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers() diff --git a/api/controllers/console/auth/email_register.py b/api/controllers/console/auth/email_register.py index 91de19a78a..cabd118d23 100644 --- a/api/controllers/console/auth/email_register.py +++ b/api/controllers/console/auth/email_register.py @@ -5,7 +5,7 @@ from sqlalchemy.orm import Session from configs import dify_config from constants.languages import languages -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ( EmailAlreadyInUseError, EmailCodeError, @@ -19,12 +19,13 @@ from controllers.console.wraps import email_password_login_enabled, email_regist from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import valid_password -from models.account import Account +from models import Account from services.account_service import AccountService from services.billing_service import BillingService from services.errors.account import AccountNotFoundError, AccountRegisterError +@console_ns.route("/email-register/send-email") class EmailRegisterSendEmailApi(Resource): @setup_required @email_password_login_enabled @@ -52,6 +53,7 @@ class EmailRegisterSendEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/email-register/validity") class EmailRegisterCheckApi(Resource): @setup_required @email_password_login_enabled @@ -92,6 +94,7 @@ class EmailRegisterCheckApi(Resource): return {"is_valid": True, "email": token_data.get("email"), "token": new_token} +@console_ns.route("/email-register") class EmailRegisterResetApi(Resource): @setup_required @email_password_login_enabled @@ -148,8 +151,3 @@ class EmailRegisterResetApi(Resource): raise AccountInFreezeError() return account - - -api.add_resource(EmailRegisterSendEmailApi, "/email-register/send-email") -api.add_resource(EmailRegisterCheckApi, "/email-register/validity") -api.add_resource(EmailRegisterResetApi, "/email-register") diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 36ccb1d562..102d33966e 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -20,7 +20,7 @@ from events.tenant_event import tenant_was_created from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password -from models.account import Account +from models import Account from services.account_service import AccountService, TenantService from services.feature_service import FeatureService @@ -221,8 +221,3 @@ class ForgotPasswordResetApi(Resource): TenantService.create_tenant_member(tenant, account, role="owner") account.current_tenant = tenant tenant_was_created.send(tenant) - - -api.add_resource(ForgotPasswordSendEmailApi, "/forgot-password") -api.add_resource(ForgotPasswordCheckApi, "/forgot-password/validity") -api.add_resource(ForgotPasswordResetApi, "/forgot-password/resets") diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 3b35ab3c23..e4bbbf107b 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -1,5 +1,3 @@ -from typing import cast - import flask_login from flask import request from flask_restx import Resource, reqparse @@ -7,7 +5,7 @@ from flask_restx import Resource, reqparse import services from configs import dify_config from constants.languages import languages -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ( AuthenticationFailedError, EmailCodeError, @@ -26,7 +24,7 @@ from controllers.console.error import ( from controllers.console.wraps import email_password_login_enabled, setup_required from events.tenant_event import tenant_was_created from libs.helper import email, extract_remote_ip -from models.account import Account +from libs.login import current_account_with_tenant from services.account_service import AccountService, RegisterService, TenantService from services.billing_service import BillingService from services.errors.account import AccountRegisterError @@ -34,6 +32,7 @@ from services.errors.workspace import WorkSpaceNotAllowedCreateError, Workspaces from services.feature_service import FeatureService +@console_ns.route("/login") class LoginApi(Resource): """Resource for user login.""" @@ -91,10 +90,12 @@ class LoginApi(Resource): return {"result": "success", "data": token_pair.model_dump()} +@console_ns.route("/logout") class LogoutApi(Resource): @setup_required def get(self): - account = cast(Account, flask_login.current_user) + current_user, _ = current_account_with_tenant() + account = current_user if isinstance(account, flask_login.AnonymousUserMixin): return {"result": "success"} AccountService.logout(account=account) @@ -102,6 +103,7 @@ class LogoutApi(Resource): return {"result": "success"} +@console_ns.route("/reset-password") class ResetPasswordSendEmailApi(Resource): @setup_required @email_password_login_enabled @@ -130,6 +132,7 @@ class ResetPasswordSendEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/email-code-login") class EmailCodeLoginSendEmailApi(Resource): @setup_required def post(self): @@ -162,6 +165,7 @@ class EmailCodeLoginSendEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/email-code-login/validity") class EmailCodeLoginApi(Resource): @setup_required def post(self): @@ -218,6 +222,7 @@ class EmailCodeLoginApi(Resource): return {"result": "success", "data": token_pair.model_dump()} +@console_ns.route("/refresh-token") class RefreshTokenApi(Resource): def post(self): parser = reqparse.RequestParser() @@ -229,11 +234,3 @@ class RefreshTokenApi(Resource): return {"result": "success", "data": new_token_pair.model_dump()} except Exception as e: return {"result": "fail", "data": str(e)}, 401 - - -api.add_resource(LoginApi, "/login") -api.add_resource(LogoutApi, "/logout") -api.add_resource(EmailCodeLoginSendEmailApi, "/email-code-login") -api.add_resource(EmailCodeLoginApi, "/email-code-login/validity") -api.add_resource(ResetPasswordSendEmailApi, "/reset-password") -api.add_resource(RefreshTokenApi, "/refresh-token") diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 5528dc0569..52459ad5eb 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -14,8 +14,7 @@ from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.helper import extract_remote_ip from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo -from models import Account -from models.account import AccountStatus +from models import Account, AccountStatus from services.account_service import AccountService, RegisterService, TenantService from services.billing_service import BillingService from services.errors.account import AccountNotFoundError, AccountRegisterError @@ -130,11 +129,11 @@ class OAuthCallback(Resource): return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={e.description}") # Check account status - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account is banned.") - if account.status == AccountStatus.PENDING.value: - account.status = AccountStatus.ACTIVE.value + if account.status == AccountStatus.PENDING: + account.status = AccountStatus.ACTIVE account.initialized_at = naive_utc_now() db.session.commit() diff --git a/api/controllers/console/auth/oauth_server.py b/api/controllers/console/auth/oauth_server.py index a54c1443f8..188ef7f622 100644 --- a/api/controllers/console/auth/oauth_server.py +++ b/api/controllers/console/auth/oauth_server.py @@ -1,20 +1,19 @@ from collections.abc import Callable from functools import wraps -from typing import Concatenate, ParamSpec, TypeVar, cast +from typing import Concatenate, ParamSpec, TypeVar -import flask_login from flask import jsonify, request from flask_restx import Resource, reqparse from werkzeug.exceptions import BadRequest, NotFound from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder -from libs.login import login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required +from models import Account from models.model import OAuthProviderApp from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN, OAuthGrantType, OAuthServerService -from .. import api +from .. import console_ns P = ParamSpec("P") R = TypeVar("R") @@ -86,6 +85,7 @@ def oauth_server_access_token_required(view: Callable[Concatenate[T, OAuthProvid return decorated +@console_ns.route("/oauth/provider") class OAuthServerAppApi(Resource): @setup_required @oauth_server_client_id_required @@ -108,13 +108,15 @@ class OAuthServerAppApi(Resource): ) +@console_ns.route("/oauth/provider/authorize") class OAuthServerUserAuthorizeApi(Resource): @setup_required @login_required @account_initialization_required @oauth_server_client_id_required def post(self, oauth_provider_app: OAuthProviderApp): - account = cast(Account, flask_login.current_user) + current_user, _ = current_account_with_tenant() + account = current_user user_account_id = account.id code = OAuthServerService.sign_oauth_authorization_code(oauth_provider_app.client_id, user_account_id) @@ -125,6 +127,7 @@ class OAuthServerUserAuthorizeApi(Resource): ) +@console_ns.route("/oauth/provider/token") class OAuthServerUserTokenApi(Resource): @setup_required @oauth_server_client_id_required @@ -180,6 +183,7 @@ class OAuthServerUserTokenApi(Resource): ) +@console_ns.route("/oauth/provider/account") class OAuthServerUserAccountApi(Resource): @setup_required @oauth_server_client_id_required @@ -194,9 +198,3 @@ class OAuthServerUserAccountApi(Resource): "timezone": account.timezone, } ) - - -api.add_resource(OAuthServerAppApi, "/oauth/provider") -api.add_resource(OAuthServerUserAuthorizeApi, "/oauth/provider/authorize") -api.add_resource(OAuthServerUserTokenApi, "/oauth/provider/token") -api.add_resource(OAuthServerUserAccountApi, "/oauth/provider/account") diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index 39fc7dec6b..5c89b29057 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -1,42 +1,34 @@ from flask_restx import Resource, reqparse -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required -from libs.login import current_user, login_required -from models.model import Account +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService +@console_ns.route("/billing/subscription") class Subscription(Resource): @setup_required @login_required @account_initialization_required @only_edition_cloud def get(self): + current_user, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("plan", type=str, required=True, location="args", choices=["professional", "team"]) parser.add_argument("interval", type=str, required=True, location="args", choices=["month", "year"]) args = parser.parse_args() - assert isinstance(current_user, Account) - BillingService.is_tenant_owner_or_admin(current_user) - assert current_user.current_tenant_id is not None - return BillingService.get_subscription( - args["plan"], args["interval"], current_user.email, current_user.current_tenant_id - ) + return BillingService.get_subscription(args["plan"], args["interval"], current_user.email, current_tenant_id) +@console_ns.route("/billing/invoices") class Invoices(Resource): @setup_required @login_required @account_initialization_required @only_edition_cloud def get(self): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() BillingService.is_tenant_owner_or_admin(current_user) - assert current_user.current_tenant_id is not None - return BillingService.get_invoices(current_user.email, current_user.current_tenant_id) - - -api.add_resource(Subscription, "/billing/subscription") -api.add_resource(Invoices, "/billing/invoices") + return BillingService.get_invoices(current_user.email, current_tenant_id) diff --git a/api/controllers/console/billing/compliance.py b/api/controllers/console/billing/compliance.py index 4bc073f679..3b32fe29a1 100644 --- a/api/controllers/console/billing/compliance.py +++ b/api/controllers/console/billing/compliance.py @@ -1,35 +1,32 @@ from flask import request -from flask_login import current_user from flask_restx import Resource, reqparse from libs.helper import extract_remote_ip -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService -from .. import api +from .. import console_ns from ..wraps import account_initialization_required, only_edition_cloud, setup_required +@console_ns.route("/compliance/download") class ComplianceApi(Resource): @setup_required @login_required @account_initialization_required @only_edition_cloud def get(self): + current_user, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("doc_name", type=str, required=True, location="args") args = parser.parse_args() ip_address = extract_remote_ip(request) device_info = request.headers.get("User-Agent", "Unknown device") - return BillingService.get_compliance_download_link( doc_name=args.doc_name, account_id=current_user.id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, ip=ip_address, device_info=device_info, ) - - -api.add_resource(ComplianceApi, "/compliance/download") diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index 3a9530af84..058ef4408a 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -3,40 +3,45 @@ from collections.abc import Generator from typing import cast from flask import request -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.datasource.entities.datasource_entities import DatasourceProviderType, OnlineDocumentPagesMessage from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin from core.indexing_runner import IndexingRunner from core.rag.extractor.entity.datasource_type import DatasourceType -from core.rag.extractor.entity.extract_setting import ExtractSetting +from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo from core.rag.extractor.notion_extractor import NotionExtractor from extensions.ext_database import db from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields from libs.datetime_utils import naive_utc_now -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import DataSourceOauthBinding, Document from services.dataset_service import DatasetService, DocumentService from services.datasource_provider_service import DatasourceProviderService from tasks.document_indexing_sync_task import document_indexing_sync_task +@console_ns.route( + "/data-source/integrates", + "/data-source/integrates//", +) class DataSourceApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(integrate_list_fields) def get(self): + _, current_tenant_id = current_account_with_tenant() + # get workspace data source integrates data_source_integrates = db.session.scalars( select(DataSourceOauthBinding).where( - DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, + DataSourceOauthBinding.tenant_id == current_tenant_id, DataSourceOauthBinding.disabled == False, ) ).all() @@ -109,19 +114,22 @@ class DataSourceApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/notion/pre-import/pages") class DataSourceNotionListApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(integrate_notion_info_list_fields) def get(self): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = request.args.get("dataset_id", default=None, type=str) credential_id = request.args.get("credential_id", default=None, type=str) if not credential_id: raise ValueError("Credential id is required.") datasource_provider_service = DatasourceProviderService() credential = datasource_provider_service.get_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, credential_id=credential_id, provider="notion_datasource", plugin_id="langgenius/notion_datasource", @@ -141,7 +149,7 @@ class DataSourceNotionListApi(Resource): documents = session.scalars( select(Document).filter_by( dataset_id=dataset_id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, data_source_type="notion_import", enabled=True, ) @@ -156,7 +164,7 @@ class DataSourceNotionListApi(Resource): datasource_runtime = DatasourceManager.get_datasource_runtime( provider_id="langgenius/notion_datasource/notion_datasource", datasource_name="notion_datasource", - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_type=DatasourceProviderType.ONLINE_DOCUMENT, ) datasource_provider_service = DatasourceProviderService() @@ -196,17 +204,23 @@ class DataSourceNotionListApi(Resource): return {"notion_info": {**workspace_info, "pages": pages}}, 200 +@console_ns.route( + "/notion/workspaces//pages///preview", + "/datasets/notion-indexing-estimate", +) class DataSourceNotionApi(Resource): @setup_required @login_required @account_initialization_required def get(self, workspace_id, page_id, page_type): + _, current_tenant_id = current_account_with_tenant() + credential_id = request.args.get("credential_id", default=None, type=str) if not credential_id: raise ValueError("Credential id is required.") datasource_provider_service = DatasourceProviderService() credential = datasource_provider_service.get_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, credential_id=credential_id, provider="notion_datasource", plugin_id="langgenius/notion_datasource", @@ -220,7 +234,7 @@ class DataSourceNotionApi(Resource): notion_obj_id=page_id, notion_page_type=page_type, notion_access_token=credential.get("integration_secret"), - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, ) text_docs = extractor.extract() @@ -230,6 +244,8 @@ class DataSourceNotionApi(Resource): @login_required @account_initialization_required def post(self): + _, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser() parser.add_argument("notion_info_list", type=list, required=True, nullable=True, location="json") parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json") @@ -247,20 +263,22 @@ class DataSourceNotionApi(Resource): credential_id = notion_info.get("credential_id") for page in notion_info["pages"]: extract_setting = ExtractSetting( - datasource_type=DatasourceType.NOTION.value, - notion_info={ - "credential_id": credential_id, - "notion_workspace_id": workspace_id, - "notion_obj_id": page["page_id"], - "notion_page_type": page["type"], - "tenant_id": current_user.current_tenant_id, - }, + datasource_type=DatasourceType.NOTION, + notion_info=NotionInfo.model_validate( + { + "credential_id": credential_id, + "notion_workspace_id": workspace_id, + "notion_obj_id": page["page_id"], + "notion_page_type": page["type"], + "tenant_id": current_tenant_id, + } + ), document_model=args["doc_form"], ) extract_settings.append(extract_setting) indexing_runner = IndexingRunner() response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, args["process_rule"], args["doc_form"], @@ -269,6 +287,7 @@ class DataSourceNotionApi(Resource): return response.model_dump(), 200 +@console_ns.route("/datasets//notion/sync") class DataSourceNotionDatasetSyncApi(Resource): @setup_required @login_required @@ -285,6 +304,7 @@ class DataSourceNotionDatasetSyncApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/datasets//documents//notion/sync") class DataSourceNotionDocumentSyncApi(Resource): @setup_required @login_required @@ -301,16 +321,3 @@ class DataSourceNotionDocumentSyncApi(Resource): raise NotFound("Document not found.") document_indexing_sync_task.delay(dataset_id_str, document_id_str) return {"result": "success"}, 200 - - -api.add_resource(DataSourceApi, "/data-source/integrates", "/data-source/integrates//") -api.add_resource(DataSourceNotionListApi, "/notion/pre-import/pages") -api.add_resource( - DataSourceNotionApi, - "/notion/workspaces//pages///preview", - "/datasets/notion-indexing-estimate", -) -api.add_resource(DataSourceNotionDatasetSyncApi, "/datasets//notion/sync") -api.add_resource( - DataSourceNotionDocumentSyncApi, "/datasets//documents//notion/sync" -) diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 2affbd6a42..c03767d2e6 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -1,6 +1,6 @@ -import flask_restx +from typing import Any, cast + from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse from sqlalchemy import select from werkzeug.exceptions import Forbidden, NotFound @@ -23,29 +23,97 @@ from core.model_runtime.entities.model_entities import ModelType from core.provider_manager import ProviderManager from core.rag.datasource.vdb.vector_type import VectorType from core.rag.extractor.entity.datasource_type import DatasourceType -from core.rag.extractor.entity.extract_setting import ExtractSetting +from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from fields.app_fields import related_app_list from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields from fields.document_fields import document_status_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required +from libs.validators import validate_description_length from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile from models.dataset import DatasetPermissionEnum from models.provider_ids import ModelProviderID from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 40: raise ValueError("Name must be between 1 to 40 characters.") return name -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description +def _get_retrieval_methods_by_vector_type(vector_type: str | None, is_mock: bool = False) -> dict[str, list[str]]: + """ + Get supported retrieval methods based on vector database type. + + Args: + vector_type: Vector database type, can be None + is_mock: Whether this is a Mock API, affects MILVUS handling + + Returns: + Dictionary containing supported retrieval methods + + Raises: + ValueError: If vector_type is None or unsupported + """ + if vector_type is None: + raise ValueError("Vector store type is not configured.") + + # Define vector database types that only support semantic search + semantic_only_types = { + VectorType.RELYT, + VectorType.TIDB_VECTOR, + VectorType.CHROMA, + VectorType.PGVECTO_RS, + VectorType.VIKINGDB, + VectorType.UPSTASH, + } + + # Define vector database types that support all retrieval methods + full_search_types = { + VectorType.QDRANT, + VectorType.WEAVIATE, + VectorType.OPENSEARCH, + VectorType.ANALYTICDB, + VectorType.MYSCALE, + VectorType.ORACLE, + VectorType.ELASTICSEARCH, + VectorType.ELASTICSEARCH_JA, + VectorType.PGVECTOR, + VectorType.VASTBASE, + VectorType.TIDB_ON_QDRANT, + VectorType.LINDORM, + VectorType.COUCHBASE, + VectorType.OPENGAUSS, + VectorType.OCEANBASE, + VectorType.TABLESTORE, + VectorType.HUAWEI_CLOUD, + VectorType.TENCENT, + VectorType.MATRIXONE, + VectorType.CLICKZETTA, + VectorType.BAIDU, + VectorType.ALIBABACLOUD_MYSQL, + } + + semantic_methods = {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} + full_methods = { + "retrieval_method": [ + RetrievalMethod.SEMANTIC_SEARCH.value, + RetrievalMethod.FULL_TEXT_SEARCH.value, + RetrievalMethod.HYBRID_SEARCH.value, + ] + } + + if vector_type == VectorType.MILVUS: + return semantic_methods if is_mock else full_methods + + if vector_type in semantic_only_types: + return semantic_methods + elif vector_type in full_search_types: + return full_methods + else: + raise ValueError(f"Unsupported vector db type {vector_type}.") @console_ns.route("/datasets") @@ -68,6 +136,7 @@ class DatasetListApi(Resource): @account_initialization_required @enterprise_license_required def get(self): + current_user, current_tenant_id = current_account_with_tenant() page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) ids = request.args.getlist("ids") @@ -76,15 +145,15 @@ class DatasetListApi(Resource): tag_ids = request.args.getlist("tag_ids") include_all = request.args.get("include_all", default="false").lower() == "true" if ids: - datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id) + datasets, total = DatasetService.get_datasets_by_ids(ids, current_tenant_id) else: datasets, total = DatasetService.get_datasets( - page, limit, current_user.current_tenant_id, current_user, search, tag_ids, include_all + page, limit, current_tenant_id, current_user, search, tag_ids, include_all ) # check embedding setting provider_manager = ProviderManager() - configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) + configurations = provider_manager.get_configurations(tenant_id=current_tenant_id) embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True) @@ -92,7 +161,7 @@ class DatasetListApi(Resource): for embedding_model in embedding_models: model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}") - data = marshal(datasets, dataset_detail_fields) + data = cast(list[dict[str, Any]], marshal(datasets, dataset_detail_fields)) for item in data: # convert embedding_model_provider to plugin standard format if item["indexing_technique"] == "high_quality" and item["embedding_model_provider"]: @@ -147,7 +216,7 @@ class DatasetListApi(Resource): ) parser.add_argument( "description", - type=_validate_description_length, + type=validate_description_length, nullable=True, required=False, default="", @@ -181,6 +250,7 @@ class DatasetListApi(Resource): required=False, ) args = parser.parse_args() + current_user, current_tenant_id = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator if not current_user.is_dataset_editor: @@ -188,7 +258,7 @@ class DatasetListApi(Resource): try: dataset = DatasetService.create_empty_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, name=args["name"], description=args["description"], indexing_technique=args["indexing_technique"], @@ -216,6 +286,7 @@ class DatasetApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, current_tenant_id = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -224,7 +295,7 @@ class DatasetApi(Resource): DatasetService.check_dataset_permission(dataset, current_user) except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - data = marshal(dataset, dataset_detail_fields) + data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) if dataset.indexing_technique == "high_quality": if dataset.embedding_model_provider: provider_id = ModelProviderID(dataset.embedding_model_provider) @@ -235,7 +306,7 @@ class DatasetApi(Resource): # check embedding setting provider_manager = ProviderManager() - configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) + configurations = provider_manager.get_configurations(tenant_id=current_tenant_id) embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True) @@ -288,7 +359,7 @@ class DatasetApi(Resource): help="type is required. Name must be between 1 to 40 characters.", type=_validate_name, ) - parser.add_argument("description", location="json", store_missing=False, type=_validate_description_length) + parser.add_argument("description", location="json", store_missing=False, type=validate_description_length) parser.add_argument( "indexing_technique", type=str, @@ -348,6 +419,7 @@ class DatasetApi(Resource): ) args = parser.parse_args() data = request.get_json() + current_user, current_tenant_id = current_account_with_tenant() # check embedding model setting if ( @@ -369,8 +441,8 @@ class DatasetApi(Resource): if dataset is None: raise NotFound("Dataset not found.") - result_data = marshal(dataset, dataset_detail_fields) - tenant_id = current_user.current_tenant_id + result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) + tenant_id = current_tenant_id if data.get("partial_member_list") and data.get("permission") == "partial_members": DatasetPermissionService.update_partial_member_list( @@ -394,9 +466,10 @@ class DatasetApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id): dataset_id_str = str(dataset_id) + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_operator): + if not (current_user.has_edit_permission or current_user.is_dataset_operator): raise Forbidden() try: @@ -435,6 +508,7 @@ class DatasetQueryApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -486,15 +560,14 @@ class DatasetIndexingEstimateApi(Resource): "doc_language", type=str, default="English", required=False, nullable=False, location="json" ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() # validate args DocumentService.estimate_args_validate(args) extract_settings = [] if args["info_list"]["data_source_type"] == "upload_file": file_ids = args["info_list"]["file_info_list"]["file_ids"] file_details = db.session.scalars( - select(UploadFile).where( - UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids) - ) + select(UploadFile).where(UploadFile.tenant_id == current_tenant_id, UploadFile.id.in_(file_ids)) ).all() if file_details is None: @@ -503,7 +576,7 @@ class DatasetIndexingEstimateApi(Resource): if file_details: for file_detail in file_details: extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, + datasource_type=DatasourceType.FILE, upload_file=file_detail, document_model=args["doc_form"], ) @@ -515,14 +588,16 @@ class DatasetIndexingEstimateApi(Resource): credential_id = notion_info.get("credential_id") for page in notion_info["pages"]: extract_setting = ExtractSetting( - datasource_type=DatasourceType.NOTION.value, - notion_info={ - "credential_id": credential_id, - "notion_workspace_id": workspace_id, - "notion_obj_id": page["page_id"], - "notion_page_type": page["type"], - "tenant_id": current_user.current_tenant_id, - }, + datasource_type=DatasourceType.NOTION, + notion_info=NotionInfo.model_validate( + { + "credential_id": credential_id, + "notion_workspace_id": workspace_id, + "notion_obj_id": page["page_id"], + "notion_page_type": page["type"], + "tenant_id": current_tenant_id, + } + ), document_model=args["doc_form"], ) extract_settings.append(extract_setting) @@ -530,15 +605,17 @@ class DatasetIndexingEstimateApi(Resource): website_info_list = args["info_list"]["website_info_list"] for url in website_info_list["urls"]: extract_setting = ExtractSetting( - datasource_type=DatasourceType.WEBSITE.value, - website_info={ - "provider": website_info_list["provider"], - "job_id": website_info_list["job_id"], - "url": url, - "tenant_id": current_user.current_tenant_id, - "mode": "crawl", - "only_main_content": website_info_list["only_main_content"], - }, + datasource_type=DatasourceType.WEBSITE, + website_info=WebsiteInfo.model_validate( + { + "provider": website_info_list["provider"], + "job_id": website_info_list["job_id"], + "url": url, + "tenant_id": current_tenant_id, + "mode": "crawl", + "only_main_content": website_info_list["only_main_content"], + } + ), document_model=args["doc_form"], ) extract_settings.append(extract_setting) @@ -547,7 +624,7 @@ class DatasetIndexingEstimateApi(Resource): indexing_runner = IndexingRunner() try: response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, args["process_rule"], args["doc_form"], @@ -578,6 +655,7 @@ class DatasetRelatedAppListApi(Resource): @account_initialization_required @marshal_with(related_app_list) def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -609,11 +687,10 @@ class DatasetIndexingStatusApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) documents = db.session.scalars( - select(Document).where( - Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id - ) + select(Document).where(Document.dataset_id == dataset_id, Document.tenant_id == current_tenant_id) ).all() documents_status = [] for document in documents: @@ -665,10 +742,9 @@ class DatasetApiKeyApi(Resource): @account_initialization_required @marshal_with(api_key_list) def get(self): + _, current_tenant_id = current_account_with_tenant() keys = db.session.scalars( - select(ApiToken).where( - ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id - ) + select(ApiToken).where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_tenant_id) ).all() return {"items": keys} @@ -678,17 +754,18 @@ class DatasetApiKeyApi(Resource): @marshal_with(api_key_fields) def post(self): # The role of the current user in the ta table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() current_key_count = ( db.session.query(ApiToken) - .where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id) + .where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_tenant_id) .count() ) if current_key_count >= self.max_keys: - flask_restx.abort( + api.abort( 400, message=f"Cannot create more than {self.max_keys} API keys for this resource type.", code="max_keys_exceeded", @@ -696,7 +773,7 @@ class DatasetApiKeyApi(Resource): key = ApiToken.generate_api_key(self.token_prefix, 24) api_token = ApiToken() - api_token.tenant_id = current_user.current_tenant_id + api_token.tenant_id = current_tenant_id api_token.token = key api_token.type = self.resource_type db.session.add(api_token) @@ -716,6 +793,7 @@ class DatasetApiDeleteApi(Resource): @login_required @account_initialization_required def delete(self, api_key_id): + current_user, current_tenant_id = current_account_with_tenant() api_key_id = str(api_key_id) # The role of the current user in the ta table must be admin or owner @@ -725,7 +803,7 @@ class DatasetApiDeleteApi(Resource): key = ( db.session.query(ApiToken) .where( - ApiToken.tenant_id == current_user.current_tenant_id, + ApiToken.tenant_id == current_tenant_id, ApiToken.type == self.resource_type, ApiToken.id == api_key_id, ) @@ -733,7 +811,7 @@ class DatasetApiDeleteApi(Resource): ) if key is None: - flask_restx.abort(404, message="API key not found") + api.abort(404, message="API key not found") db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete() db.session.commit() @@ -776,49 +854,7 @@ class DatasetRetrievalSettingApi(Resource): @account_initialization_required def get(self): vector_type = dify_config.VECTOR_STORE - match vector_type: - case ( - VectorType.RELYT - | VectorType.TIDB_VECTOR - | VectorType.CHROMA - | VectorType.PGVECTO_RS - | VectorType.VIKINGDB - | VectorType.UPSTASH - ): - return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} - case ( - VectorType.QDRANT - | VectorType.WEAVIATE - | VectorType.OPENSEARCH - | VectorType.ANALYTICDB - | VectorType.MYSCALE - | VectorType.ORACLE - | VectorType.ELASTICSEARCH - | VectorType.ELASTICSEARCH_JA - | VectorType.PGVECTOR - | VectorType.VASTBASE - | VectorType.TIDB_ON_QDRANT - | VectorType.LINDORM - | VectorType.COUCHBASE - | VectorType.MILVUS - | VectorType.OPENGAUSS - | VectorType.OCEANBASE - | VectorType.TABLESTORE - | VectorType.HUAWEI_CLOUD - | VectorType.TENCENT - | VectorType.MATRIXONE - | VectorType.CLICKZETTA - | VectorType.BAIDU - ): - return { - "retrieval_method": [ - RetrievalMethod.SEMANTIC_SEARCH.value, - RetrievalMethod.FULL_TEXT_SEARCH.value, - RetrievalMethod.HYBRID_SEARCH.value, - ] - } - case _: - raise ValueError(f"Unsupported vector db type {vector_type}.") + return _get_retrieval_methods_by_vector_type(vector_type, is_mock=False) @console_ns.route("/datasets/retrieval-setting/") @@ -831,48 +867,7 @@ class DatasetRetrievalSettingMockApi(Resource): @login_required @account_initialization_required def get(self, vector_type): - match vector_type: - case ( - VectorType.MILVUS - | VectorType.RELYT - | VectorType.TIDB_VECTOR - | VectorType.CHROMA - | VectorType.PGVECTO_RS - | VectorType.VIKINGDB - | VectorType.UPSTASH - ): - return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} - case ( - VectorType.QDRANT - | VectorType.WEAVIATE - | VectorType.OPENSEARCH - | VectorType.ANALYTICDB - | VectorType.MYSCALE - | VectorType.ORACLE - | VectorType.ELASTICSEARCH - | VectorType.ELASTICSEARCH_JA - | VectorType.COUCHBASE - | VectorType.PGVECTOR - | VectorType.VASTBASE - | VectorType.LINDORM - | VectorType.OPENGAUSS - | VectorType.OCEANBASE - | VectorType.TABLESTORE - | VectorType.TENCENT - | VectorType.HUAWEI_CLOUD - | VectorType.MATRIXONE - | VectorType.CLICKZETTA - | VectorType.BAIDU - ): - return { - "retrieval_method": [ - RetrievalMethod.SEMANTIC_SEARCH.value, - RetrievalMethod.FULL_TEXT_SEARCH.value, - RetrievalMethod.HYBRID_SEARCH.value, - ] - } - case _: - raise ValueError(f"Unsupported vector db type {vector_type}.") + return _get_retrieval_methods_by_vector_type(vector_type, is_mock=True) @console_ns.route("/datasets//error-docs") @@ -907,6 +902,7 @@ class DatasetPermissionUserListApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index e6f5daa87b..9c0c54833e 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -6,7 +6,6 @@ from typing import Literal, cast import sqlalchemy as sa from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse from sqlalchemy import asc, desc, select from werkzeug.exceptions import Forbidden, NotFound @@ -44,7 +43,7 @@ from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.plugin.impl.exc import PluginDaemonClientSideError from core.rag.extractor.entity.datasource_type import DatasourceType -from core.rag.extractor.entity.extract_setting import ExtractSetting +from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo from extensions.ext_database import db from fields.document_fields import ( dataset_and_document_fields, @@ -53,7 +52,7 @@ from fields.document_fields import ( document_with_segments_fields, ) from libs.datetime_utils import naive_utc_now -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile from models.dataset import DocumentPipelineExecutionLog from services.dataset_service import DatasetService, DocumentService @@ -64,6 +63,7 @@ logger = logging.getLogger(__name__) class DocumentResource(Resource): def get_document(self, dataset_id: str, document_id: str) -> Document: + current_user, current_tenant_id = current_account_with_tenant() dataset = DatasetService.get_dataset(dataset_id) if not dataset: raise NotFound("Dataset not found.") @@ -78,12 +78,13 @@ class DocumentResource(Resource): if not document: raise NotFound("Document not found.") - if document.tenant_id != current_user.current_tenant_id: + if document.tenant_id != current_tenant_id: raise Forbidden("No permission.") return document def get_batch_documents(self, dataset_id: str, batch: str) -> Sequence[Document]: + current_user, _ = current_account_with_tenant() dataset = DatasetService.get_dataset(dataset_id) if not dataset: raise NotFound("Dataset not found.") @@ -111,6 +112,7 @@ class GetProcessRuleApi(Resource): @login_required @account_initialization_required def get(self): + current_user, _ = current_account_with_tenant() req_data = request.args document_id = req_data.get("document_id") @@ -167,6 +169,7 @@ class DatasetDocumentListApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) @@ -198,7 +201,7 @@ class DatasetDocumentListApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_user.current_tenant_id) + query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_tenant_id) if search: search = f"%{search}%" @@ -272,6 +275,7 @@ class DatasetDocumentListApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -304,7 +308,7 @@ class DatasetDocumentListApi(Resource): "doc_language", type=str, default="English", required=False, nullable=False, location="json" ) args = parser.parse_args() - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) if not dataset.indexing_technique and not knowledge_config.indexing_technique: raise ValueError("indexing_technique is required.") @@ -371,6 +375,7 @@ class DatasetInitApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def post(self): # The role of the current user in the ta table must be admin, owner, dataset_operator, or editor + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_dataset_editor: raise Forbidden() @@ -394,14 +399,14 @@ class DatasetInitApi(Resource): parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") args = parser.parse_args() - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) if knowledge_config.indexing_technique == "high_quality": if knowledge_config.embedding_model is None or knowledge_config.embedding_model_provider is None: raise ValueError("embedding model and embedding model provider are required for high quality indexing.") try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=args["embedding_model_provider"], model_type=ModelType.TEXT_EMBEDDING, model=args["embedding_model"], @@ -418,7 +423,9 @@ class DatasetInitApi(Resource): try: dataset, documents, batch = DocumentService.save_document_without_dataset_id( - tenant_id=current_user.current_tenant_id, knowledge_config=knowledge_config, account=current_user + tenant_id=current_tenant_id, + knowledge_config=knowledge_config, + account=current_user, ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -444,6 +451,7 @@ class DocumentIndexingEstimateApi(DocumentResource): @login_required @account_initialization_required def get(self, dataset_id, document_id): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -452,7 +460,7 @@ class DocumentIndexingEstimateApi(DocumentResource): raise DocumentAlreadyFinishedError() data_process_rule = document.dataset_process_rule - data_process_rule_dict = data_process_rule.to_dict() + data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {} response = {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []} @@ -472,14 +480,14 @@ class DocumentIndexingEstimateApi(DocumentResource): raise NotFound("File not found.") extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, upload_file=file, document_model=document.doc_form + datasource_type=DatasourceType.FILE, upload_file=file, document_model=document.doc_form ) indexing_runner = IndexingRunner() try: estimate_response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, [extract_setting], data_process_rule_dict, document.doc_form, @@ -508,13 +516,14 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): @login_required @account_initialization_required def get(self, dataset_id, batch): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) batch = str(batch) documents = self.get_batch_documents(dataset_id, batch) if not documents: return {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}, 200 data_process_rule = documents[0].dataset_process_rule - data_process_rule_dict = data_process_rule.to_dict() + data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {} extract_settings = [] for document in documents: if document.indexing_status in {"completed", "error"}: @@ -527,7 +536,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): file_id = data_source_info["upload_file_id"] file_detail = ( db.session.query(UploadFile) - .where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id) + .where(UploadFile.tenant_id == current_tenant_id, UploadFile.id == file_id) .first() ) @@ -535,7 +544,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): raise NotFound("File not found.") extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, upload_file=file_detail, document_model=document.doc_form + datasource_type=DatasourceType.FILE, upload_file=file_detail, document_model=document.doc_form ) extract_settings.append(extract_setting) @@ -543,14 +552,16 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): if not data_source_info: continue extract_setting = ExtractSetting( - datasource_type=DatasourceType.NOTION.value, - notion_info={ - "credential_id": data_source_info["credential_id"], - "notion_workspace_id": data_source_info["notion_workspace_id"], - "notion_obj_id": data_source_info["notion_page_id"], - "notion_page_type": data_source_info["type"], - "tenant_id": current_user.current_tenant_id, - }, + datasource_type=DatasourceType.NOTION, + notion_info=NotionInfo.model_validate( + { + "credential_id": data_source_info["credential_id"], + "notion_workspace_id": data_source_info["notion_workspace_id"], + "notion_obj_id": data_source_info["notion_page_id"], + "notion_page_type": data_source_info["type"], + "tenant_id": current_tenant_id, + } + ), document_model=document.doc_form, ) extract_settings.append(extract_setting) @@ -558,15 +569,17 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): if not data_source_info: continue extract_setting = ExtractSetting( - datasource_type=DatasourceType.WEBSITE.value, - website_info={ - "provider": data_source_info["provider"], - "job_id": data_source_info["job_id"], - "url": data_source_info["url"], - "tenant_id": current_user.current_tenant_id, - "mode": data_source_info["mode"], - "only_main_content": data_source_info["only_main_content"], - }, + datasource_type=DatasourceType.WEBSITE, + website_info=WebsiteInfo.model_validate( + { + "provider": data_source_info["provider"], + "job_id": data_source_info["job_id"], + "url": data_source_info["url"], + "tenant_id": current_tenant_id, + "mode": data_source_info["mode"], + "only_main_content": data_source_info["only_main_content"], + } + ), document_model=document.doc_form, ) extract_settings.append(extract_setting) @@ -576,7 +589,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): indexing_runner = IndexingRunner() try: response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, data_process_rule_dict, document.doc_form, @@ -753,7 +766,7 @@ class DocumentApi(DocumentResource): } else: dataset_process_rules = DatasetService.get_process_rules(dataset_id) - document_process_rules = document.dataset_process_rule.to_dict() + document_process_rules = document.dataset_process_rule.to_dict() if document.dataset_process_rule else {} data_source_info = document.data_source_detail_dict response = { "id": document.id, @@ -827,6 +840,7 @@ class DocumentProcessingApi(DocumentResource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, action: Literal["pause", "resume"]): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -877,6 +891,7 @@ class DocumentMetadataApi(DocumentResource): @login_required @account_initialization_required def put(self, dataset_id, document_id): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -924,6 +939,7 @@ class DocumentStatusApi(DocumentResource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, action: Literal["enable", "disable", "archive", "un_archive"]): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if dataset is None: @@ -1070,9 +1086,12 @@ class DocumentRenameApi(DocumentResource): @marshal_with(document_fields) def post(self, dataset_id, document_id): # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator + current_user, _ = current_account_with_tenant() if not current_user.is_dataset_editor: raise Forbidden() dataset = DatasetService.get_dataset(dataset_id) + if not dataset: + raise NotFound("Dataset not found.") DatasetService.check_dataset_operator_permission(current_user, dataset) parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, nullable=False, location="json") @@ -1093,6 +1112,7 @@ class WebsiteDocumentSyncApi(DocumentResource): @account_initialization_required def get(self, dataset_id, document_id): """sync website document.""" + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if not dataset: @@ -1101,7 +1121,7 @@ class WebsiteDocumentSyncApi(DocumentResource): document = DocumentService.get_document(dataset.id, document_id) if not document: raise NotFound("Document not found.") - if document.tenant_id != current_user.current_tenant_id: + if document.tenant_id != current_tenant_id: raise Forbidden("No permission.") if document.data_source_type != "website_crawl": raise ValueError("Document is not a website document.") @@ -1114,6 +1134,7 @@ class WebsiteDocumentSyncApi(DocumentResource): return {"result": "success"}, 200 +@console_ns.route("/datasets//documents//pipeline-execution-log") class DocumentPipelineExecutionLogApi(DocumentResource): @setup_required @login_required @@ -1147,29 +1168,3 @@ class DocumentPipelineExecutionLogApi(DocumentResource): "input_data": log.input_data, "datasource_node_id": log.datasource_node_id, }, 200 - - -api.add_resource(GetProcessRuleApi, "/datasets/process-rule") -api.add_resource(DatasetDocumentListApi, "/datasets//documents") -api.add_resource(DatasetInitApi, "/datasets/init") -api.add_resource( - DocumentIndexingEstimateApi, "/datasets//documents//indexing-estimate" -) -api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets//batch//indexing-estimate") -api.add_resource(DocumentBatchIndexingStatusApi, "/datasets//batch//indexing-status") -api.add_resource(DocumentIndexingStatusApi, "/datasets//documents//indexing-status") -api.add_resource(DocumentApi, "/datasets//documents/") -api.add_resource( - DocumentProcessingApi, "/datasets//documents//processing/" -) -api.add_resource(DocumentMetadataApi, "/datasets//documents//metadata") -api.add_resource(DocumentStatusApi, "/datasets//documents/status//batch") -api.add_resource(DocumentPauseApi, "/datasets//documents//processing/pause") -api.add_resource(DocumentRecoverApi, "/datasets//documents//processing/resume") -api.add_resource(DocumentRetryApi, "/datasets//retry") -api.add_resource(DocumentRenameApi, "/datasets//documents//rename") - -api.add_resource(WebsiteDocumentSyncApi, "/datasets//documents//website-sync") -api.add_resource( - DocumentPipelineExecutionLogApi, "/datasets//documents//pipeline-execution-log" -) diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 463fd2d7ec..d4d484a2e2 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -1,13 +1,12 @@ import uuid from flask import request -from flask_login import current_user from flask_restx import Resource, marshal, reqparse from sqlalchemy import select from werkzeug.exceptions import Forbidden, NotFound import services -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.error import ProviderNotInitializeError from controllers.console.datasets.error import ( ChildChunkDeleteIndexError, @@ -27,7 +26,7 @@ from core.model_runtime.entities.model_entities import ModelType from extensions.ext_database import db from extensions.ext_redis import redis_client from fields.segment_fields import child_chunk_fields, segment_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.dataset import ChildChunk, DocumentSegment from models.model import UploadFile from services.dataset_service import DatasetService, DocumentService, SegmentService @@ -37,11 +36,14 @@ from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingS from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task +@console_ns.route("/datasets//documents//segments") class DatasetDocumentSegmentListApi(Resource): @setup_required @login_required @account_initialization_required def get(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = str(dataset_id) document_id = str(document_id) dataset = DatasetService.get_dataset(dataset_id) @@ -78,7 +80,7 @@ class DatasetDocumentSegmentListApi(Resource): select(DocumentSegment) .where( DocumentSegment.document_id == str(document_id), - DocumentSegment.tenant_id == current_user.current_tenant_id, + DocumentSegment.tenant_id == current_tenant_id, ) .order_by(DocumentSegment.position.asc()) ) @@ -114,6 +116,8 @@ class DatasetDocumentSegmentListApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id): + current_user, _ = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -139,6 +143,7 @@ class DatasetDocumentSegmentListApi(Resource): return {"result": "success"}, 204 +@console_ns.route("/datasets//documents//segment/") class DatasetDocumentSegmentApi(Resource): @setup_required @login_required @@ -146,6 +151,8 @@ class DatasetDocumentSegmentApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, action): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if not dataset: @@ -169,7 +176,7 @@ class DatasetDocumentSegmentApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -193,6 +200,7 @@ class DatasetDocumentSegmentApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/datasets//documents//segment") class DatasetDocumentSegmentAddApi(Resource): @setup_required @login_required @@ -201,6 +209,8 @@ class DatasetDocumentSegmentAddApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -218,7 +228,7 @@ class DatasetDocumentSegmentAddApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -244,6 +254,7 @@ class DatasetDocumentSegmentAddApi(Resource): return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200 +@console_ns.route("/datasets//documents//segments/") class DatasetDocumentSegmentUpdateApi(Resource): @setup_required @login_required @@ -251,6 +262,8 @@ class DatasetDocumentSegmentUpdateApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -268,7 +281,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -283,7 +296,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -305,7 +318,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): ) args = parser.parse_args() SegmentService.segment_create_args_validate(args, document) - segment = SegmentService.update_segment(SegmentUpdateArgs(**args), segment, document, dataset) + segment = SegmentService.update_segment(SegmentUpdateArgs.model_validate(args), segment, document, dataset) return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200 @setup_required @@ -313,6 +326,8 @@ class DatasetDocumentSegmentUpdateApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -329,7 +344,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -345,6 +360,10 @@ class DatasetDocumentSegmentUpdateApi(Resource): return {"result": "success"}, 204 +@console_ns.route( + "/datasets//documents//segments/batch_import", + "/datasets/batch_import_status/", +) class DatasetDocumentSegmentBatchImportApi(Resource): @setup_required @login_required @@ -353,6 +372,8 @@ class DatasetDocumentSegmentBatchImportApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -384,7 +405,12 @@ class DatasetDocumentSegmentBatchImportApi(Resource): # send batch add segments task redis_client.setnx(indexing_cache_key, "waiting") batch_create_segment_to_index_task.delay( - str(job_id), upload_file_id, dataset_id, document_id, current_user.current_tenant_id, current_user.id + str(job_id), + upload_file_id, + dataset_id, + document_id, + current_tenant_id, + current_user.id, ) except Exception as e: return {"error": str(e)}, 500 @@ -393,7 +419,9 @@ class DatasetDocumentSegmentBatchImportApi(Resource): @setup_required @login_required @account_initialization_required - def get(self, job_id): + def get(self, job_id=None, dataset_id=None, document_id=None): + if job_id is None: + raise NotFound("The job does not exist.") job_id = str(job_id) indexing_cache_key = f"segment_batch_import_{job_id}" cache_result = redis_client.get(indexing_cache_key) @@ -403,6 +431,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource): return {"job_id": job_id, "job_status": cache_result.decode()}, 200 +@console_ns.route("/datasets//documents//segments//child_chunks") class ChildChunkAddApi(Resource): @setup_required @login_required @@ -411,6 +440,8 @@ class ChildChunkAddApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -425,7 +456,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -437,7 +468,7 @@ class ChildChunkAddApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -457,7 +488,8 @@ class ChildChunkAddApi(Resource): parser.add_argument("content", type=str, required=True, nullable=False, location="json") args = parser.parse_args() try: - child_chunk = SegmentService.create_child_chunk(args.get("content"), segment, document, dataset) + content = args["content"] + child_chunk = SegmentService.create_child_chunk(content, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 @@ -466,6 +498,8 @@ class ChildChunkAddApi(Resource): @login_required @account_initialization_required def get(self, dataset_id, document_id, segment_id): + _, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -482,7 +516,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -513,6 +547,8 @@ class ChildChunkAddApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -529,7 +565,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -546,19 +582,25 @@ class ChildChunkAddApi(Resource): parser.add_argument("chunks", type=list, required=True, nullable=False, location="json") args = parser.parse_args() try: - chunks = [ChildChunkUpdateArgs(**chunk) for chunk in args.get("chunks")] + chunks_data = args["chunks"] + chunks = [ChildChunkUpdateArgs.model_validate(chunk) for chunk in chunks_data] child_chunks = SegmentService.update_child_chunks(chunks, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunks, child_chunk_fields)}, 200 +@console_ns.route( + "/datasets//documents//segments//child_chunks/" +) class ChildChunkUpdateApi(Resource): @setup_required @login_required @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id, segment_id, child_chunk_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -575,7 +617,7 @@ class ChildChunkUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -586,7 +628,7 @@ class ChildChunkUpdateApi(Resource): db.session.query(ChildChunk) .where( ChildChunk.id == str(child_chunk_id), - ChildChunk.tenant_id == current_user.current_tenant_id, + ChildChunk.tenant_id == current_tenant_id, ChildChunk.segment_id == segment.id, ChildChunk.document_id == document_id, ) @@ -613,6 +655,8 @@ class ChildChunkUpdateApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id, child_chunk_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -629,7 +673,7 @@ class ChildChunkUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -640,7 +684,7 @@ class ChildChunkUpdateApi(Resource): db.session.query(ChildChunk) .where( ChildChunk.id == str(child_chunk_id), - ChildChunk.tenant_id == current_user.current_tenant_id, + ChildChunk.tenant_id == current_tenant_id, ChildChunk.segment_id == segment.id, ChildChunk.document_id == document_id, ) @@ -660,33 +704,8 @@ class ChildChunkUpdateApi(Resource): parser.add_argument("content", type=str, required=True, nullable=False, location="json") args = parser.parse_args() try: - child_chunk = SegmentService.update_child_chunk( - args.get("content"), child_chunk, segment, document, dataset - ) + content = args["content"] + child_chunk = SegmentService.update_child_chunk(content, child_chunk, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 - - -api.add_resource(DatasetDocumentSegmentListApi, "/datasets//documents//segments") -api.add_resource( - DatasetDocumentSegmentApi, "/datasets//documents//segment/" -) -api.add_resource(DatasetDocumentSegmentAddApi, "/datasets//documents//segment") -api.add_resource( - DatasetDocumentSegmentUpdateApi, - "/datasets//documents//segments/", -) -api.add_resource( - DatasetDocumentSegmentBatchImportApi, - "/datasets//documents//segments/batch_import", - "/datasets/batch_import_status/", -) -api.add_resource( - ChildChunkAddApi, - "/datasets//documents//segments//child_chunks", -) -api.add_resource( - ChildChunkUpdateApi, - "/datasets//documents//segments//child_chunks/", -) diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index e8f5a11b41..f590919180 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -1,5 +1,4 @@ from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound @@ -8,14 +7,14 @@ from controllers.console import api, console_ns from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import account_initialization_required, setup_required from fields.dataset_fields import dataset_detail_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.dataset_service import DatasetService from services.external_knowledge_service import ExternalDatasetService from services.hit_testing_service import HitTestingService from services.knowledge_service import ExternalDatasetTestService -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 100: raise ValueError("Name must be between 1 to 100 characters.") return name @@ -37,12 +36,13 @@ class ExternalApiTemplateListApi(Resource): @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) search = request.args.get("keyword", default=None, type=str) external_knowledge_apis, total = ExternalDatasetService.get_external_knowledge_apis( - page, limit, current_user.current_tenant_id, search + page, limit, current_tenant_id, search ) response = { "data": [item.to_dict() for item in external_knowledge_apis], @@ -57,6 +57,7 @@ class ExternalApiTemplateListApi(Resource): @login_required @account_initialization_required def post(self): + current_user, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument( "name", @@ -82,7 +83,7 @@ class ExternalApiTemplateListApi(Resource): try: external_knowledge_api = ExternalDatasetService.create_external_knowledge_api( - tenant_id=current_user.current_tenant_id, user_id=current_user.id, args=args + tenant_id=current_tenant_id, user_id=current_user.id, args=args ) except services.errors.dataset.DatasetNameDuplicateError: raise DatasetNameDuplicateError() @@ -112,6 +113,7 @@ class ExternalApiTemplateApi(Resource): @login_required @account_initialization_required def patch(self, external_knowledge_api_id): + current_user, current_tenant_id = current_account_with_tenant() external_knowledge_api_id = str(external_knowledge_api_id) parser = reqparse.RequestParser() @@ -133,7 +135,7 @@ class ExternalApiTemplateApi(Resource): ExternalDatasetService.validate_api_list(args["settings"]) external_knowledge_api = ExternalDatasetService.update_external_knowledge_api( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, user_id=current_user.id, external_knowledge_api_id=external_knowledge_api_id, args=args, @@ -145,13 +147,14 @@ class ExternalApiTemplateApi(Resource): @login_required @account_initialization_required def delete(self, external_knowledge_api_id): + current_user, current_tenant_id = current_account_with_tenant() external_knowledge_api_id = str(external_knowledge_api_id) # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_operator): + if not (current_user.has_edit_permission or current_user.is_dataset_operator): raise Forbidden() - ExternalDatasetService.delete_external_knowledge_api(current_user.current_tenant_id, external_knowledge_api_id) + ExternalDatasetService.delete_external_knowledge_api(current_tenant_id, external_knowledge_api_id) return {"result": "success"}, 204 @@ -196,7 +199,8 @@ class ExternalDatasetCreateApi(Resource): @account_initialization_required def post(self): # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -220,7 +224,7 @@ class ExternalDatasetCreateApi(Resource): try: dataset = ExternalDatasetService.create_external_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, user_id=current_user.id, args=args, ) @@ -252,6 +256,7 @@ class ExternalKnowledgeHitTestingApi(Resource): @login_required @account_initialization_required def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: diff --git a/api/controllers/console/datasets/hit_testing_base.py b/api/controllers/console/datasets/hit_testing_base.py index cfbfc50873..6113f1fd17 100644 --- a/api/controllers/console/datasets/hit_testing_base.py +++ b/api/controllers/console/datasets/hit_testing_base.py @@ -1,10 +1,9 @@ import logging -from flask_login import current_user from flask_restx import marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound -import services.dataset_service +import services from controllers.console.app.error import ( CompletionRequestError, ProviderModelCurrentlyNotSupportError, @@ -20,6 +19,8 @@ from core.errors.error import ( ) from core.model_runtime.errors.invoke import InvokeError from fields.hit_testing_fields import hit_testing_record_fields +from libs.login import current_user +from models.account import Account from services.dataset_service import DatasetService from services.hit_testing_service import HitTestingService @@ -29,6 +30,7 @@ logger = logging.getLogger(__name__) class DatasetsHitTestingBase: @staticmethod def get_and_validate_dataset(dataset_id: str): + assert isinstance(current_user, Account) dataset = DatasetService.get_dataset(dataset_id) if dataset is None: raise NotFound("Dataset not found.") @@ -55,6 +57,7 @@ class DatasetsHitTestingBase: @staticmethod def perform_hit_testing(dataset, args): + assert isinstance(current_user, Account) try: response = HitTestingService.retrieve( dataset=dataset, diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py index 21ab5e4fe1..673bac1add 100644 --- a/api/controllers/console/datasets/metadata.py +++ b/api/controllers/console/datasets/metadata.py @@ -1,13 +1,12 @@ from typing import Literal -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import NotFound -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from fields.dataset_fields import dataset_metadata_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.dataset_service import DatasetService from services.entities.knowledge_entities.knowledge_entities import ( MetadataArgs, @@ -16,6 +15,7 @@ from services.entities.knowledge_entities.knowledge_entities import ( from services.metadata_service import MetadataService +@console_ns.route("/datasets//metadata") class DatasetMetadataCreateApi(Resource): @setup_required @login_required @@ -23,11 +23,12 @@ class DatasetMetadataCreateApi(Resource): @enterprise_license_required @marshal_with(dataset_metadata_fields) def post(self, dataset_id): + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("type", type=str, required=True, nullable=False, location="json") parser.add_argument("name", type=str, required=True, nullable=False, location="json") args = parser.parse_args() - metadata_args = MetadataArgs(**args) + metadata_args = MetadataArgs.model_validate(args) dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -50,6 +51,7 @@ class DatasetMetadataCreateApi(Resource): return MetadataService.get_dataset_metadatas(dataset), 200 +@console_ns.route("/datasets//metadata/") class DatasetMetadataApi(Resource): @setup_required @login_required @@ -57,9 +59,11 @@ class DatasetMetadataApi(Resource): @enterprise_license_required @marshal_with(dataset_metadata_fields) def patch(self, dataset_id, metadata_id): + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, nullable=False, location="json") args = parser.parse_args() + name = args["name"] dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) @@ -68,7 +72,7 @@ class DatasetMetadataApi(Resource): raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) + metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, name) return metadata, 200 @setup_required @@ -76,6 +80,7 @@ class DatasetMetadataApi(Resource): @account_initialization_required @enterprise_license_required def delete(self, dataset_id, metadata_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -87,6 +92,7 @@ class DatasetMetadataApi(Resource): return {"result": "success"}, 204 +@console_ns.route("/datasets/metadata/built-in") class DatasetMetadataBuiltInFieldApi(Resource): @setup_required @login_required @@ -97,12 +103,14 @@ class DatasetMetadataBuiltInFieldApi(Resource): return {"fields": built_in_fields}, 200 +@console_ns.route("/datasets//metadata/built-in/") class DatasetMetadataBuiltInFieldActionApi(Resource): @setup_required @login_required @account_initialization_required @enterprise_license_required def post(self, dataset_id, action: Literal["enable", "disable"]): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -116,12 +124,14 @@ class DatasetMetadataBuiltInFieldActionApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/datasets//documents/metadata") class DocumentMetadataEditApi(Resource): @setup_required @login_required @account_initialization_required @enterprise_license_required def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -131,15 +141,8 @@ class DocumentMetadataEditApi(Resource): parser = reqparse.RequestParser() parser.add_argument("operation_data", type=list, required=True, nullable=False, location="json") args = parser.parse_args() - metadata_args = MetadataOperationData(**args) + metadata_args = MetadataOperationData.model_validate(args) MetadataService.update_documents_metadata(dataset, metadata_args) return {"result": "success"}, 200 - - -api.add_resource(DatasetMetadataCreateApi, "/datasets//metadata") -api.add_resource(DatasetMetadataApi, "/datasets//metadata/") -api.add_resource(DatasetMetadataBuiltInFieldApi, "/datasets/metadata/built-in") -api.add_resource(DatasetMetadataBuiltInFieldActionApi, "/datasets//metadata/built-in/") -api.add_resource(DocumentMetadataEditApi, "/datasets//documents/metadata") diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py index 1a845cf326..194bd98fa3 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py @@ -1,33 +1,30 @@ -from fastapi.encoders import jsonable_encoder from flask import make_response, redirect, request -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden, NotFound from configs import dify_config -from controllers.console import api -from controllers.console.wraps import ( - account_initialization_required, - setup_required, -) +from controllers.console import console_ns +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.oauth import OAuthHandler from libs.helper import StrLen -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.provider_ids import DatasourceProviderID from services.datasource_provider_service import DatasourceProviderService from services.plugin.oauth_service import OAuthProxyService +@console_ns.route("/oauth/plugin//datasource/get-authorization-url") class DatasourcePluginOAuthAuthorizationUrl(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, provider_id: str): - user = current_user - tenant_id = user.current_tenant_id - if not current_user.is_editor: - raise Forbidden() + current_user, current_tenant_id = current_account_with_tenant() + + tenant_id = current_tenant_id credential_id = request.args.get("credential_id") datasource_provider_id = DatasourceProviderID(provider_id) @@ -51,7 +48,7 @@ class DatasourcePluginOAuthAuthorizationUrl(Resource): redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/datasource/callback" authorization_url_response = oauth_handler.get_authorization_url( tenant_id=tenant_id, - user_id=user.id, + user_id=current_user.id, plugin_id=plugin_id, provider=provider_name, redirect_uri=redirect_uri, @@ -68,6 +65,7 @@ class DatasourcePluginOAuthAuthorizationUrl(Resource): return response +@console_ns.route("/oauth/plugin//datasource/callback") class DatasourceOAuthCallback(Resource): @setup_required def get(self, provider_id: str): @@ -123,13 +121,14 @@ class DatasourceOAuthCallback(Resource): return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") +@console_ns.route("/auth/plugin/datasource/") class DatasourceAuth(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() + _, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument( @@ -142,7 +141,7 @@ class DatasourceAuth(Resource): try: datasource_provider_service.add_datasource_api_key_provider( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider_id=datasource_provider_id, credentials=args["credentials"], name=args["name"], @@ -157,30 +156,35 @@ class DatasourceAuth(Resource): def get(self, provider_id: str): datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() + _, current_tenant_id = current_account_with_tenant() + datasources = datasource_provider_service.list_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=datasource_provider_id.provider_name, plugin_id=datasource_provider_id.plugin_id, ) return {"result": datasources}, 200 +@console_ns.route("/auth/plugin/datasource//delete") class DatasourceAuthDeleteApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) plugin_id = datasource_provider_id.plugin_id provider_name = datasource_provider_id.provider_name - if not current_user.is_editor: - raise Forbidden() + parser = reqparse.RequestParser() parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() datasource_provider_service = DatasourceProviderService() datasource_provider_service.remove_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, auth_id=args["credential_id"], provider=provider_name, plugin_id=plugin_id, @@ -188,22 +192,25 @@ class DatasourceAuthDeleteApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/auth/plugin/datasource//update") class DatasourceAuthUpdateApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) parser = reqparse.RequestParser() parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") parser.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json") parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() - if not current_user.is_editor: - raise Forbidden() + datasource_provider_service = DatasourceProviderService() datasource_provider_service.update_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, auth_id=args["credential_id"], provider=datasource_provider_id.provider_name, plugin_id=datasource_provider_id.plugin_id, @@ -213,37 +220,41 @@ class DatasourceAuthUpdateApi(Resource): return {"result": "success"}, 201 +@console_ns.route("/auth/plugin/datasource/list") class DatasourceAuthListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_service = DatasourceProviderService() - datasources = datasource_provider_service.get_all_datasource_credentials( - tenant_id=current_user.current_tenant_id - ) + datasources = datasource_provider_service.get_all_datasource_credentials(tenant_id=current_tenant_id) return {"result": jsonable_encoder(datasources)}, 200 +@console_ns.route("/auth/plugin/datasource/default-list") class DatasourceHardCodeAuthListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_service = DatasourceProviderService() - datasources = datasource_provider_service.get_hard_code_datasource_credentials( - tenant_id=current_user.current_tenant_id - ) + datasources = datasource_provider_service.get_hard_code_datasource_credentials(tenant_id=current_tenant_id) return {"result": jsonable_encoder(datasources)}, 200 +@console_ns.route("/auth/plugin/datasource//custom-client") class DatasourceAuthOauthCustomClient(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() + _, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser() parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json") parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") @@ -251,7 +262,7 @@ class DatasourceAuthOauthCustomClient(Resource): datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.setup_oauth_custom_client_params( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, client_params=args.get("client_params", {}), enabled=args.get("enable_oauth_custom_client", False), @@ -262,42 +273,48 @@ class DatasourceAuthOauthCustomClient(Resource): @login_required @account_initialization_required def delete(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.remove_oauth_custom_client_params( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, ) return {"result": "success"}, 200 +@console_ns.route("/auth/plugin/datasource//default") class DatasourceAuthDefaultApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() + _, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser() parser.add_argument("id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.set_default_datasource_provider( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, credential_id=args["id"], ) return {"result": "success"}, 200 +@console_ns.route("/auth/plugin/datasource//update-name") class DatasourceUpdateProviderNameApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() + _, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser() parser.add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json") parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") @@ -305,58 +322,9 @@ class DatasourceUpdateProviderNameApi(Resource): datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.update_datasource_provider_name( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, name=args["name"], credential_id=args["credential_id"], ) return {"result": "success"}, 200 - - -api.add_resource( - DatasourcePluginOAuthAuthorizationUrl, - "/oauth/plugin//datasource/get-authorization-url", -) -api.add_resource( - DatasourceOAuthCallback, - "/oauth/plugin//datasource/callback", -) -api.add_resource( - DatasourceAuth, - "/auth/plugin/datasource/", -) - -api.add_resource( - DatasourceAuthUpdateApi, - "/auth/plugin/datasource//update", -) - -api.add_resource( - DatasourceAuthDeleteApi, - "/auth/plugin/datasource//delete", -) - -api.add_resource( - DatasourceAuthListApi, - "/auth/plugin/datasource/list", -) - -api.add_resource( - DatasourceHardCodeAuthListApi, - "/auth/plugin/datasource/default-list", -) - -api.add_resource( - DatasourceAuthOauthCustomClient, - "/auth/plugin/datasource//custom-client", -) - -api.add_resource( - DatasourceAuthDefaultApi, - "/auth/plugin/datasource//default", -) - -api.add_resource( - DatasourceUpdateProviderNameApi, - "/auth/plugin/datasource//update-name", -) diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py index 05fa681a33..6c04cc877a 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py @@ -4,7 +4,7 @@ from flask_restx import ( # type: ignore ) from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import account_initialization_required, setup_required from libs.login import current_user, login_required @@ -13,6 +13,7 @@ from models.dataset import Pipeline from services.rag_pipeline.rag_pipeline import RagPipelineService +@console_ns.route("/rag/pipelines//workflows/published/datasource/nodes//preview") class DataSourceContentPreviewApi(Resource): @setup_required @login_required @@ -49,9 +50,3 @@ class DataSourceContentPreviewApi(Resource): credential_id=args.get("credential_id"), ) return preview_content, 200 - - -api.add_resource( - DataSourceContentPreviewApi, - "/rag/pipelines//workflows/published/datasource/nodes//preview", -) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py index f04b0e04c3..e021f95283 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py @@ -4,7 +4,7 @@ from flask import request from flask_restx import Resource, reqparse from sqlalchemy.orm import Session -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import ( account_initialization_required, enterprise_license_required, @@ -20,18 +20,19 @@ from services.rag_pipeline.rag_pipeline import RagPipelineService logger = logging.getLogger(__name__) -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 40: raise ValueError("Name must be between 1 to 40 characters.") return name -def _validate_description_length(description): +def _validate_description_length(description: str) -> str: if len(description) > 400: raise ValueError("Description cannot exceed 400 characters.") return description +@console_ns.route("/rag/pipeline/templates") class PipelineTemplateListApi(Resource): @setup_required @login_required @@ -45,6 +46,7 @@ class PipelineTemplateListApi(Resource): return pipeline_templates, 200 +@console_ns.route("/rag/pipeline/templates/") class PipelineTemplateDetailApi(Resource): @setup_required @login_required @@ -57,6 +59,7 @@ class PipelineTemplateDetailApi(Resource): return pipeline_template, 200 +@console_ns.route("/rag/pipeline/customized/templates/") class CustomizedPipelineTemplateApi(Resource): @setup_required @login_required @@ -73,7 +76,7 @@ class CustomizedPipelineTemplateApi(Resource): ) parser.add_argument( "description", - type=str, + type=_validate_description_length, nullable=True, required=False, default="", @@ -85,7 +88,7 @@ class CustomizedPipelineTemplateApi(Resource): nullable=True, ) args = parser.parse_args() - pipeline_template_info = PipelineTemplateInfoEntity(**args) + pipeline_template_info = PipelineTemplateInfoEntity.model_validate(args) RagPipelineService.update_customized_pipeline_template(template_id, pipeline_template_info) return 200 @@ -112,6 +115,7 @@ class CustomizedPipelineTemplateApi(Resource): return {"data": template.yaml_content}, 200 +@console_ns.route("/rag/pipelines//customized/publish") class PublishCustomizedPipelineTemplateApi(Resource): @setup_required @login_required @@ -129,7 +133,7 @@ class PublishCustomizedPipelineTemplateApi(Resource): ) parser.add_argument( "description", - type=str, + type=_validate_description_length, nullable=True, required=False, default="", @@ -144,21 +148,3 @@ class PublishCustomizedPipelineTemplateApi(Resource): rag_pipeline_service = RagPipelineService() rag_pipeline_service.publish_customized_pipeline_template(pipeline_id, args) return {"result": "success"} - - -api.add_resource( - PipelineTemplateListApi, - "/rag/pipeline/templates", -) -api.add_resource( - PipelineTemplateDetailApi, - "/rag/pipeline/templates/", -) -api.add_resource( - CustomizedPipelineTemplateApi, - "/rag/pipeline/customized/templates/", -) -api.add_resource( - PublishCustomizedPipelineTemplateApi, - "/rag/pipelines//customized/publish", -) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py index 34faa4ec85..b394887783 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py @@ -1,10 +1,9 @@ -from flask_login import current_user # type: ignore # type: ignore -from flask_restx import Resource, marshal, reqparse # type: ignore +from flask_restx import Resource, marshal, reqparse from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden import services -from controllers.console import api +from controllers.console import console_ns from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import ( account_initialization_required, @@ -13,25 +12,14 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.dataset_fields import dataset_detail_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.dataset import DatasetPermissionEnum from services.dataset_service import DatasetPermissionService, DatasetService from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, RagPipelineDatasetCreateEntity from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService -def _validate_name(name): - if not name or len(name) < 1 or len(name) > 40: - raise ValueError("Name must be between 1 to 40 characters.") - return name - - -def _validate_description_length(description): - if len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - +@console_ns.route("/rag/pipeline/dataset") class CreateRagPipelineDatasetApi(Resource): @setup_required @login_required @@ -49,7 +37,7 @@ class CreateRagPipelineDatasetApi(Resource): ) args = parser.parse_args() - + current_user, current_tenant_id = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator if not current_user.is_dataset_editor: raise Forbidden() @@ -69,12 +57,12 @@ class CreateRagPipelineDatasetApi(Resource): with Session(db.engine) as session: rag_pipeline_dsl_service = RagPipelineDslService(session) import_info = rag_pipeline_dsl_service.create_rag_pipeline_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, rag_pipeline_dataset_create_entity=rag_pipeline_dataset_create_entity, ) if rag_pipeline_dataset_create_entity.permission == "partial_members": DatasetPermissionService.update_partial_member_list( - current_user.current_tenant_id, + current_tenant_id, import_info["dataset_id"], rag_pipeline_dataset_create_entity.partial_member_list, ) @@ -84,6 +72,7 @@ class CreateRagPipelineDatasetApi(Resource): return import_info, 201 +@console_ns.route("/rag/pipeline/empty-dataset") class CreateEmptyRagPipelineDatasetApi(Resource): @setup_required @login_required @@ -91,10 +80,12 @@ class CreateEmptyRagPipelineDatasetApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def post(self): # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_dataset_editor: raise Forbidden() dataset = DatasetService.create_empty_rag_pipeline_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, rag_pipeline_dataset_create_entity=RagPipelineDatasetCreateEntity( name="", description="", @@ -108,7 +99,3 @@ class CreateEmptyRagPipelineDatasetApi(Resource): ), ) return marshal(dataset, dataset_detail_fields), 201 - - -api.add_resource(CreateRagPipelineDatasetApi, "/rag/pipeline/dataset") -api.add_resource(CreateEmptyRagPipelineDatasetApi, "/rag/pipeline/empty-dataset") diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py index db07e7729a..2e8cc16dc1 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py @@ -1,31 +1,29 @@ import logging -from typing import Any, NoReturn +from typing import NoReturn from flask import Response from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.error import ( DraftWorkflowNotExist, ) from controllers.console.app.workflow_draft_variable import ( - _WORKFLOW_DRAFT_VARIABLE_FIELDS, - _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_FIELDS, # type: ignore[private-usage] + _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, # type: ignore[private-usage] ) from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import account_initialization_required, setup_required from controllers.web.error import InvalidArgumentError, NotFoundError -from core.variables.segment_group import SegmentGroup -from core.variables.segments import ArrayFileSegment, FileSegment, Segment from core.variables.types import SegmentType from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID from extensions.ext_database import db from factories.file_factory import build_from_mapping, build_from_mappings from factories.variable_factory import build_segment_with_type from libs.login import current_user, login_required -from models.account import Account +from models import Account from models.dataset import Pipeline from models.workflow import WorkflowDraftVariable from services.rag_pipeline.rag_pipeline import RagPipelineService @@ -34,32 +32,6 @@ from services.workflow_draft_variable_service import WorkflowDraftVariableList, logger = logging.getLogger(__name__) -def _convert_values_to_json_serializable_object(value: Segment) -> Any: - if isinstance(value, FileSegment): - return value.value.model_dump() - elif isinstance(value, ArrayFileSegment): - return [i.model_dump() for i in value.value] - elif isinstance(value, SegmentGroup): - return [_convert_values_to_json_serializable_object(i) for i in value.value] - else: - return value.value - - -def _serialize_var_value(variable: WorkflowDraftVariable) -> Any: - value = variable.get_value() - # create a copy of the value to avoid affecting the model cache. - value = value.model_copy(deep=True) - # Refresh the url signature before returning it to client. - if isinstance(value, FileSegment): - file = value.value - file.remote_url = file.generate_url() - elif isinstance(value, ArrayFileSegment): - files = value.value - for file in files: - file.remote_url = file.generate_url() - return _convert_values_to_json_serializable_object(value) - - def _create_pagination_parser(): parser = reqparse.RequestParser() parser.add_argument( @@ -104,13 +76,14 @@ def _api_prerequisite(f): @account_initialization_required @get_rag_pipeline def wrapper(*args, **kwargs): - if not isinstance(current_user, Account) or not current_user.is_editor: + if not isinstance(current_user, Account) or not current_user.has_edit_permission: raise Forbidden() return f(*args, **kwargs) return wrapper +@console_ns.route("/rag/pipelines//workflows/draft/variables") class RagPipelineVariableCollectionApi(Resource): @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) @@ -168,6 +141,7 @@ def validate_node_id(node_id: str) -> NoReturn | None: return None +@console_ns.route("/rag/pipelines//workflows/draft/nodes//variables") class RagPipelineNodeVariableCollectionApi(Resource): @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) @@ -190,6 +164,7 @@ class RagPipelineNodeVariableCollectionApi(Resource): return Response("", 204) +@console_ns.route("/rag/pipelines//workflows/draft/variables/") class RagPipelineVariableApi(Resource): _PATCH_NAME_FIELD = "name" _PATCH_VALUE_FIELD = "value" @@ -284,6 +259,7 @@ class RagPipelineVariableApi(Resource): return Response("", 204) +@console_ns.route("/rag/pipelines//workflows/draft/variables//reset") class RagPipelineVariableResetApi(Resource): @_api_prerequisite def put(self, pipeline: Pipeline, variable_id: str): @@ -325,6 +301,7 @@ def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList return draft_vars +@console_ns.route("/rag/pipelines//workflows/draft/system-variables") class RagPipelineSystemVariableCollectionApi(Resource): @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) @@ -332,6 +309,7 @@ class RagPipelineSystemVariableCollectionApi(Resource): return _get_variable_list(pipeline, SYSTEM_VARIABLE_NODE_ID) +@console_ns.route("/rag/pipelines//workflows/draft/environment-variables") class RagPipelineEnvironmentVariableCollectionApi(Resource): @_api_prerequisite def get(self, pipeline: Pipeline): @@ -364,26 +342,3 @@ class RagPipelineEnvironmentVariableCollectionApi(Resource): ) return {"items": env_vars_list} - - -api.add_resource( - RagPipelineVariableCollectionApi, - "/rag/pipelines//workflows/draft/variables", -) -api.add_resource( - RagPipelineNodeVariableCollectionApi, - "/rag/pipelines//workflows/draft/nodes//variables", -) -api.add_resource( - RagPipelineVariableApi, "/rag/pipelines//workflows/draft/variables/" -) -api.add_resource( - RagPipelineVariableResetApi, "/rag/pipelines//workflows/draft/variables//reset" -) -api.add_resource( - RagPipelineSystemVariableCollectionApi, "/rag/pipelines//workflows/draft/system-variables" -) -api.add_resource( - RagPipelineEnvironmentVariableCollectionApi, - "/rag/pipelines//workflows/draft/environment-variables", -) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py index a447f2848a..ca767dbb10 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py @@ -1,11 +1,8 @@ -from typing import cast - -from flask_login import current_user # type: ignore from flask_restx import Resource, marshal_with, reqparse # type: ignore from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import ( account_initialization_required, @@ -13,13 +10,13 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.rag_pipeline_fields import pipeline_import_check_dependencies_fields, pipeline_import_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from models.dataset import Pipeline from services.app_dsl_service import ImportStatus from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService +@console_ns.route("/rag/pipelines/imports") class RagPipelineImportApi(Resource): @setup_required @login_required @@ -27,7 +24,8 @@ class RagPipelineImportApi(Resource): @marshal_with(pipeline_import_fields) def post(self): # Check user role first - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -46,7 +44,7 @@ class RagPipelineImportApi(Resource): with Session(db.engine) as session: import_service = RagPipelineDslService(session) # Import app - account = cast(Account, current_user) + account = current_user result = import_service.import_rag_pipeline( account=account, import_mode=args["mode"], @@ -59,37 +57,40 @@ class RagPipelineImportApi(Resource): # Return appropriate status code based on result status = result.status - if status == ImportStatus.FAILED.value: + if status == ImportStatus.FAILED: return result.model_dump(mode="json"), 400 - elif status == ImportStatus.PENDING.value: + elif status == ImportStatus.PENDING: return result.model_dump(mode="json"), 202 return result.model_dump(mode="json"), 200 +@console_ns.route("/rag/pipelines/imports//confirm") class RagPipelineImportConfirmApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(pipeline_import_fields) def post(self, import_id): + current_user, _ = current_account_with_tenant() # Check user role first - if not current_user.is_editor: + if not current_user.has_edit_permission: raise Forbidden() # Create service with session with Session(db.engine) as session: import_service = RagPipelineDslService(session) # Confirm import - account = cast(Account, current_user) + account = current_user result = import_service.confirm_import(import_id=import_id, account=account) session.commit() # Return appropriate status code based on result - if result.status == ImportStatus.FAILED.value: + if result.status == ImportStatus.FAILED: return result.model_dump(mode="json"), 400 return result.model_dump(mode="json"), 200 +@console_ns.route("/rag/pipelines/imports//check-dependencies") class RagPipelineImportCheckDependenciesApi(Resource): @setup_required @login_required @@ -97,7 +98,8 @@ class RagPipelineImportCheckDependenciesApi(Resource): @account_initialization_required @marshal_with(pipeline_import_check_dependencies_fields) def get(self, pipeline: Pipeline): - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() with Session(db.engine) as session: @@ -107,13 +109,15 @@ class RagPipelineImportCheckDependenciesApi(Resource): return result.model_dump(mode="json"), 200 +@console_ns.route("/rag/pipelines//exports") class RagPipelineExportApi(Resource): @setup_required @login_required @get_rag_pipeline @account_initialization_required def get(self, pipeline: Pipeline): - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() # Add include_secret params @@ -128,22 +132,3 @@ class RagPipelineExportApi(Resource): ) return {"data": result}, 200 - - -# Import Rag Pipeline -api.add_resource( - RagPipelineImportApi, - "/rag/pipelines/imports", -) -api.add_resource( - RagPipelineImportConfirmApi, - "/rag/pipelines/imports//confirm", -) -api.add_resource( - RagPipelineImportCheckDependenciesApi, - "/rag/pipelines/imports//check-dependencies", -) -api.add_resource( - RagPipelineExportApi, - "/rag/pipelines//exports", -) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index 01ddb8a871..d4d6da7fe2 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -9,7 +9,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.error import ( ConversationCompletedError, DraftWorkflowNotExist, @@ -18,6 +18,7 @@ from controllers.console.app.error import ( from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import ( account_initialization_required, + edit_permission_required, setup_required, ) from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError @@ -36,8 +37,8 @@ from fields.workflow_run_fields import ( ) from libs import helper from libs.helper import TimestampField, uuid_value -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, current_user, login_required +from models import Account from models.dataset import Pipeline from models.model import EndUser from services.errors.app import WorkflowHashNotEqualError @@ -50,20 +51,18 @@ from services.rag_pipeline.rag_pipeline_transform_service import RagPipelineTran logger = logging.getLogger(__name__) +@console_ns.route("/rag/pipelines//workflows/draft") class DraftRagPipelineApi(Resource): @setup_required @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required @marshal_with(workflow_fields) def get(self, pipeline: Pipeline): """ Get draft rag pipeline's workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - # fetch draft workflow by app_model rag_pipeline_service = RagPipelineService() workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline) @@ -78,13 +77,13 @@ class DraftRagPipelineApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def post(self, pipeline: Pipeline): """ Sync draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() content_type = request.headers.get("Content-Type", "") @@ -147,18 +146,19 @@ class DraftRagPipelineApi(Resource): } +@console_ns.route("/rag/pipelines//workflows/draft/iteration/nodes//run") class RagPipelineDraftRunIterationNodeApi(Resource): @setup_required @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def post(self, pipeline: Pipeline, node_id: str): """ Run draft workflow iteration node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") @@ -181,6 +181,7 @@ class RagPipelineDraftRunIterationNodeApi(Resource): raise InternalServerError() +@console_ns.route("/rag/pipelines//workflows/draft/loop/nodes//run") class RagPipelineDraftRunLoopNodeApi(Resource): @setup_required @login_required @@ -191,7 +192,8 @@ class RagPipelineDraftRunLoopNodeApi(Resource): Run draft workflow loop node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -215,6 +217,7 @@ class RagPipelineDraftRunLoopNodeApi(Resource): raise InternalServerError() +@console_ns.route("/rag/pipelines//workflows/draft/run") class DraftRagPipelineRunApi(Resource): @setup_required @login_required @@ -225,7 +228,8 @@ class DraftRagPipelineRunApi(Resource): Run draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -249,6 +253,7 @@ class DraftRagPipelineRunApi(Resource): raise InvokeRateLimitHttpError(ex.description) +@console_ns.route("/rag/pipelines//workflows/published/run") class PublishedRagPipelineRunApi(Resource): @setup_required @login_required @@ -259,7 +264,8 @@ class PublishedRagPipelineRunApi(Resource): Run published workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -298,7 +304,7 @@ class PublishedRagPipelineRunApi(Resource): # Run rag pipeline datasource # """ # # The role of the current user in the ta table must be admin, owner, or editor -# if not current_user.is_editor: +# if not current_user.has_edit_permission: # raise Forbidden() # # if not isinstance(current_user, Account): @@ -339,7 +345,7 @@ class PublishedRagPipelineRunApi(Resource): # Run rag pipeline datasource # """ # # The role of the current user in the ta table must be admin, owner, or editor -# if not current_user.is_editor: +# if not current_user.has_edit_permission: # raise Forbidden() # # if not isinstance(current_user, Account): @@ -369,6 +375,7 @@ class PublishedRagPipelineRunApi(Resource): # # return result # +@console_ns.route("/rag/pipelines//workflows/published/datasource/nodes//run") class RagPipelinePublishedDatasourceNodeRunApi(Resource): @setup_required @login_required @@ -379,7 +386,8 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource): Run rag pipeline datasource """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -411,6 +419,7 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource): ) +@console_ns.route("/rag/pipelines//workflows/draft/datasource/nodes//run") class RagPipelineDraftDatasourceNodeRunApi(Resource): @setup_required @login_required @@ -421,7 +430,8 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource): Run rag pipeline datasource """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -453,6 +463,7 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource): ) +@console_ns.route("/rag/pipelines//workflows/draft/nodes//run") class RagPipelineDraftNodeRunApi(Resource): @setup_required @login_required @@ -464,7 +475,8 @@ class RagPipelineDraftNodeRunApi(Resource): Run draft workflow node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -486,6 +498,7 @@ class RagPipelineDraftNodeRunApi(Resource): return workflow_node_execution +@console_ns.route("/rag/pipelines//workflow-runs/tasks//stop") class RagPipelineTaskStopApi(Resource): @setup_required @login_required @@ -496,7 +509,8 @@ class RagPipelineTaskStopApi(Resource): Stop workflow task """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id) @@ -504,6 +518,7 @@ class RagPipelineTaskStopApi(Resource): return {"result": "success"} +@console_ns.route("/rag/pipelines//workflows/publish") class PublishedRagPipelineApi(Resource): @setup_required @login_required @@ -515,7 +530,8 @@ class PublishedRagPipelineApi(Resource): Get published pipeline """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() if not pipeline.is_published: return None @@ -535,7 +551,8 @@ class PublishedRagPipelineApi(Resource): Publish workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() rag_pipeline_service = RagPipelineService() @@ -559,6 +576,7 @@ class PublishedRagPipelineApi(Resource): } +@console_ns.route("/rag/pipelines//workflows/default-workflow-block-configs") class DefaultRagPipelineBlockConfigsApi(Resource): @setup_required @login_required @@ -569,7 +587,8 @@ class DefaultRagPipelineBlockConfigsApi(Resource): Get default block config """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() # Get default block configs @@ -577,6 +596,7 @@ class DefaultRagPipelineBlockConfigsApi(Resource): return rag_pipeline_service.get_default_block_configs() +@console_ns.route("/rag/pipelines//workflows/default-workflow-block-configs/") class DefaultRagPipelineBlockConfigApi(Resource): @setup_required @login_required @@ -587,7 +607,8 @@ class DefaultRagPipelineBlockConfigApi(Resource): Get default block config """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -608,6 +629,7 @@ class DefaultRagPipelineBlockConfigApi(Resource): return rag_pipeline_service.get_default_block_config(node_type=block_type, filters=filters) +@console_ns.route("/rag/pipelines//workflows") class PublishedAllRagPipelineApi(Resource): @setup_required @login_required @@ -618,7 +640,8 @@ class PublishedAllRagPipelineApi(Resource): """ Get published workflows """ - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -656,6 +679,7 @@ class PublishedAllRagPipelineApi(Resource): } +@console_ns.route("/rag/pipelines//workflows/") class RagPipelineByIdApi(Resource): @setup_required @login_required @@ -667,7 +691,8 @@ class RagPipelineByIdApi(Resource): Update workflow attributes """ # Check permission - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() parser = reqparse.RequestParser() @@ -713,18 +738,17 @@ class RagPipelineByIdApi(Resource): return workflow +@console_ns.route("/rag/pipelines//workflows/published/processing/parameters") class PublishedRagPipelineSecondStepApi(Resource): @setup_required @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get second step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() @@ -738,18 +762,17 @@ class PublishedRagPipelineSecondStepApi(Resource): } +@console_ns.route("/rag/pipelines//workflows/published/pre-processing/parameters") class PublishedRagPipelineFirstStepApi(Resource): @setup_required @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get first step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() @@ -763,18 +786,17 @@ class PublishedRagPipelineFirstStepApi(Resource): } +@console_ns.route("/rag/pipelines//workflows/draft/pre-processing/parameters") class DraftRagPipelineFirstStepApi(Resource): @setup_required @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get first step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() @@ -788,18 +810,17 @@ class DraftRagPipelineFirstStepApi(Resource): } +@console_ns.route("/rag/pipelines//workflows/draft/processing/parameters") class DraftRagPipelineSecondStepApi(Resource): @setup_required @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get second step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() @@ -814,6 +835,7 @@ class DraftRagPipelineSecondStepApi(Resource): } +@console_ns.route("/rag/pipelines//workflow-runs") class RagPipelineWorkflowRunListApi(Resource): @setup_required @login_required @@ -835,6 +857,7 @@ class RagPipelineWorkflowRunListApi(Resource): return result +@console_ns.route("/rag/pipelines//workflow-runs/") class RagPipelineWorkflowRunDetailApi(Resource): @setup_required @login_required @@ -853,13 +876,14 @@ class RagPipelineWorkflowRunDetailApi(Resource): return workflow_run +@console_ns.route("/rag/pipelines//workflow-runs//node-executions") class RagPipelineWorkflowRunNodeExecutionListApi(Resource): @setup_required @login_required @account_initialization_required @get_rag_pipeline @marshal_with(workflow_run_node_execution_list_fields) - def get(self, pipeline: Pipeline, run_id): + def get(self, pipeline: Pipeline, run_id: str): """ Get workflow run node execution list """ @@ -876,21 +900,17 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource): return {"data": node_executions} +@console_ns.route("/rag/pipelines/datasource-plugins") class DatasourceListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user - if not isinstance(user, Account): - raise Forbidden() - tenant_id = user.current_tenant_id - if not tenant_id: - raise Forbidden() - - return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(tenant_id)) + _, current_tenant_id = current_account_with_tenant() + return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(current_tenant_id)) +@console_ns.route("/rag/pipelines//workflows/draft/nodes//last-run") class RagPipelineWorkflowLastRunApi(Resource): @setup_required @login_required @@ -912,15 +932,16 @@ class RagPipelineWorkflowLastRunApi(Resource): return node_exec +@console_ns.route("/rag/pipelines/transform/datasets/") class RagPipelineTransformApi(Resource): @setup_required @login_required @account_initialization_required - def post(self, dataset_id): - if not isinstance(current_user, Account): - raise Forbidden() + @edit_permission_required + def post(self, dataset_id: str): + current_user, _ = current_account_with_tenant() - if not (current_user.has_edit_permission or current_user.is_dataset_operator): + if not current_user.is_dataset_operator: raise Forbidden() dataset_id = str(dataset_id) @@ -929,19 +950,19 @@ class RagPipelineTransformApi(Resource): return result +@console_ns.route("/rag/pipelines//workflows/draft/datasource/variables-inspect") class RagPipelineDatasourceVariableApi(Resource): @setup_required @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required @marshal_with(workflow_run_node_execution_fields) def post(self, pipeline: Pipeline): """ Set datasource variables """ - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("datasource_type", type=str, required=True, location="json") parser.add_argument("datasource_info", type=dict, required=True, location="json") @@ -958,6 +979,7 @@ class RagPipelineDatasourceVariableApi(Resource): return workflow_node_execution +@console_ns.route("/rag/pipelines/recommended-plugins") class RagPipelineRecommendedPluginApi(Resource): @setup_required @login_required @@ -966,114 +988,3 @@ class RagPipelineRecommendedPluginApi(Resource): rag_pipeline_service = RagPipelineService() recommended_plugins = rag_pipeline_service.get_recommended_plugins() return recommended_plugins - - -api.add_resource( - DraftRagPipelineApi, - "/rag/pipelines//workflows/draft", -) -api.add_resource( - DraftRagPipelineRunApi, - "/rag/pipelines//workflows/draft/run", -) -api.add_resource( - PublishedRagPipelineRunApi, - "/rag/pipelines//workflows/published/run", -) -api.add_resource( - RagPipelineTaskStopApi, - "/rag/pipelines//workflow-runs/tasks//stop", -) -api.add_resource( - RagPipelineDraftNodeRunApi, - "/rag/pipelines//workflows/draft/nodes//run", -) -api.add_resource( - RagPipelinePublishedDatasourceNodeRunApi, - "/rag/pipelines//workflows/published/datasource/nodes//run", -) - -api.add_resource( - RagPipelineDraftDatasourceNodeRunApi, - "/rag/pipelines//workflows/draft/datasource/nodes//run", -) - -api.add_resource( - RagPipelineDraftRunIterationNodeApi, - "/rag/pipelines//workflows/draft/iteration/nodes//run", -) - -api.add_resource( - RagPipelineDraftRunLoopNodeApi, - "/rag/pipelines//workflows/draft/loop/nodes//run", -) - -api.add_resource( - PublishedRagPipelineApi, - "/rag/pipelines//workflows/publish", -) -api.add_resource( - PublishedAllRagPipelineApi, - "/rag/pipelines//workflows", -) -api.add_resource( - DefaultRagPipelineBlockConfigsApi, - "/rag/pipelines//workflows/default-workflow-block-configs", -) -api.add_resource( - DefaultRagPipelineBlockConfigApi, - "/rag/pipelines//workflows/default-workflow-block-configs/", -) -api.add_resource( - RagPipelineByIdApi, - "/rag/pipelines//workflows/", -) -api.add_resource( - RagPipelineWorkflowRunListApi, - "/rag/pipelines//workflow-runs", -) -api.add_resource( - RagPipelineWorkflowRunDetailApi, - "/rag/pipelines//workflow-runs/", -) -api.add_resource( - RagPipelineWorkflowRunNodeExecutionListApi, - "/rag/pipelines//workflow-runs//node-executions", -) -api.add_resource( - DatasourceListApi, - "/rag/pipelines/datasource-plugins", -) -api.add_resource( - PublishedRagPipelineSecondStepApi, - "/rag/pipelines//workflows/published/processing/parameters", -) -api.add_resource( - PublishedRagPipelineFirstStepApi, - "/rag/pipelines//workflows/published/pre-processing/parameters", -) -api.add_resource( - DraftRagPipelineSecondStepApi, - "/rag/pipelines//workflows/draft/processing/parameters", -) -api.add_resource( - DraftRagPipelineFirstStepApi, - "/rag/pipelines//workflows/draft/pre-processing/parameters", -) -api.add_resource( - RagPipelineWorkflowLastRunApi, - "/rag/pipelines//workflows/draft/nodes//last-run", -) -api.add_resource( - RagPipelineTransformApi, - "/rag/pipelines/transform/datasets/", -) -api.add_resource( - RagPipelineDatasourceVariableApi, - "/rag/pipelines//workflows/draft/datasource/variables-inspect", -) - -api.add_resource( - RagPipelineRecommendedPluginApi, - "/rag/pipelines/recommended-plugins", -) diff --git a/api/controllers/console/datasets/wraps.py b/api/controllers/console/datasets/wraps.py index 98abb3ef8d..a8c1298e3e 100644 --- a/api/controllers/console/datasets/wraps.py +++ b/api/controllers/console/datasets/wraps.py @@ -3,8 +3,7 @@ from functools import wraps from controllers.console.datasets.error import PipelineNotFoundError from extensions.ext_database import db -from libs.login import current_user -from models.account import Account +from libs.login import current_account_with_tenant from models.dataset import Pipeline @@ -17,8 +16,7 @@ def get_rag_pipeline( if not kwargs.get("pipeline_id"): raise ValueError("missing pipeline_id in path parameters") - if not isinstance(current_user, Account): - raise ValueError("current_user is not an account") + _, current_tenant_id = current_account_with_tenant() pipeline_id = kwargs.get("pipeline_id") pipeline_id = str(pipeline_id) @@ -27,7 +25,7 @@ def get_rag_pipeline( pipeline = ( db.session.query(Pipeline) - .where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_user.current_tenant_id) + .where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_tenant_id) .first() ) diff --git a/api/controllers/console/explore/audio.py b/api/controllers/console/explore/audio.py index dc275fe18a..7c20fb49d8 100644 --- a/api/controllers/console/explore/audio.py +++ b/api/controllers/console/explore/audio.py @@ -26,9 +26,15 @@ from services.errors.audio import ( UnsupportedAudioTypeServiceError, ) +from .. import console_ns + logger = logging.getLogger(__name__) +@console_ns.route( + "/installed-apps//audio-to-text", + endpoint="installed_app_audio", +) class ChatAudioApi(InstalledAppResource): def post(self, installed_app): app_model = installed_app.app @@ -65,6 +71,10 @@ class ChatAudioApi(InstalledAppResource): raise InternalServerError() +@console_ns.route( + "/installed-apps//text-to-audio", + endpoint="installed_app_text", +) class ChatTextApi(InstalledAppResource): def post(self, installed_app): from flask_restx import reqparse diff --git a/api/controllers/console/explore/completion.py b/api/controllers/console/explore/completion.py index a99708b7cd..1102b815eb 100644 --- a/api/controllers/console/explore/completion.py +++ b/api/controllers/console/explore/completion.py @@ -33,10 +33,16 @@ from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError +from .. import console_ns + logger = logging.getLogger(__name__) # define completion api for user +@console_ns.route( + "/installed-apps//completion-messages", + endpoint="installed_app_completion", +) class CompletionApi(InstalledAppResource): def post(self, installed_app): app_model = installed_app.app @@ -87,6 +93,10 @@ class CompletionApi(InstalledAppResource): raise InternalServerError() +@console_ns.route( + "/installed-apps//completion-messages//stop", + endpoint="installed_app_stop_completion", +) class CompletionStopApi(InstalledAppResource): def post(self, installed_app, task_id): app_model = installed_app.app @@ -100,6 +110,10 @@ class CompletionStopApi(InstalledAppResource): return {"result": "success"}, 200 +@console_ns.route( + "/installed-apps//chat-messages", + endpoint="installed_app_chat_completion", +) class ChatApi(InstalledAppResource): def post(self, installed_app): app_model = installed_app.app @@ -153,6 +167,10 @@ class ChatApi(InstalledAppResource): raise InternalServerError() +@console_ns.route( + "/installed-apps//chat-messages//stop", + endpoint="installed_app_stop_chat_completion", +) class ChatStopApi(InstalledAppResource): def post(self, installed_app, task_id): app_model = installed_app.app diff --git a/api/controllers/console/explore/conversation.py b/api/controllers/console/explore/conversation.py index 1aef9c544d..feabea2524 100644 --- a/api/controllers/console/explore/conversation.py +++ b/api/controllers/console/explore/conversation.py @@ -16,7 +16,13 @@ from services.conversation_service import ConversationService from services.errors.conversation import ConversationNotExistsError, LastConversationNotExistsError from services.web_conversation_service import WebConversationService +from .. import console_ns + +@console_ns.route( + "/installed-apps//conversations", + endpoint="installed_app_conversations", +) class ConversationListApi(InstalledAppResource): @marshal_with(conversation_infinite_scroll_pagination_fields) def get(self, installed_app): @@ -52,6 +58,10 @@ class ConversationListApi(InstalledAppResource): raise NotFound("Last Conversation Not Exists.") +@console_ns.route( + "/installed-apps//conversations/", + endpoint="installed_app_conversation", +) class ConversationApi(InstalledAppResource): def delete(self, installed_app, c_id): app_model = installed_app.app @@ -70,6 +80,10 @@ class ConversationApi(InstalledAppResource): return {"result": "success"}, 204 +@console_ns.route( + "/installed-apps//conversations//name", + endpoint="installed_app_conversation_rename", +) class ConversationRenameApi(InstalledAppResource): @marshal_with(simple_conversation_fields) def post(self, installed_app, c_id): @@ -95,6 +109,10 @@ class ConversationRenameApi(InstalledAppResource): raise NotFound("Conversation Not Exists.") +@console_ns.route( + "/installed-apps//conversations//pin", + endpoint="installed_app_conversation_pin", +) class ConversationPinApi(InstalledAppResource): def patch(self, installed_app, c_id): app_model = installed_app.app @@ -114,6 +132,10 @@ class ConversationPinApi(InstalledAppResource): return {"result": "success"} +@console_ns.route( + "/installed-apps//conversations//unpin", + endpoint="installed_app_conversation_unpin", +) class ConversationUnPinApi(InstalledAppResource): def patch(self, installed_app, c_id): app_model = installed_app.app diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index bdc3fb0dbd..7ead93a1b6 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -6,14 +6,14 @@ from flask_restx import Resource, inputs, marshal_with, reqparse from sqlalchemy import and_, select from werkzeug.exceptions import BadRequest, Forbidden, NotFound -from controllers.console import api +from controllers.console import console_ns from controllers.console.explore.wraps import InstalledAppResource from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check from extensions.ext_database import db from fields.installed_app_fields import installed_app_list_fields from libs.datetime_utils import naive_utc_now -from libs.login import current_user, login_required -from models import Account, App, InstalledApp, RecommendedApp +from libs.login import current_account_with_tenant, login_required +from models import App, InstalledApp, RecommendedApp from services.account_service import TenantService from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -22,15 +22,14 @@ from services.feature_service import FeatureService logger = logging.getLogger(__name__) +@console_ns.route("/installed-apps") class InstalledAppsListApi(Resource): @login_required @account_initialization_required @marshal_with(installed_app_list_fields) def get(self): app_id = request.args.get("app_id", default=None, type=str) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - current_tenant_id = current_user.current_tenant_id + current_user, current_tenant_id = current_account_with_tenant() if app_id: installed_apps = db.session.scalars( @@ -120,9 +119,8 @@ class InstalledAppsListApi(Resource): if recommended_app is None: raise NotFound("App not found") - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - current_tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + app = db.session.query(App).where(App.id == args["app_id"]).first() if app is None: @@ -154,6 +152,7 @@ class InstalledAppsListApi(Resource): return {"message": "App installed successfully"} +@console_ns.route("/installed-apps/") class InstalledAppApi(InstalledAppResource): """ update and delete an installed app @@ -161,9 +160,8 @@ class InstalledAppApi(InstalledAppResource): """ def delete(self, installed_app): - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - if installed_app.app_owner_tenant_id == current_user.current_tenant_id: + _, current_tenant_id = current_account_with_tenant() + if installed_app.app_owner_tenant_id == current_tenant_id: raise BadRequest("You can't uninstall an app owned by the current tenant") db.session.delete(installed_app) @@ -185,7 +183,3 @@ class InstalledAppApi(InstalledAppResource): db.session.commit() return {"result": "success", "message": "App info updated successfully"} - - -api.add_resource(InstalledAppsListApi, "/installed-apps") -api.add_resource(InstalledAppApi, "/installed-apps/") diff --git a/api/controllers/console/explore/message.py b/api/controllers/console/explore/message.py index c46c1c1f4f..064e026753 100644 --- a/api/controllers/console/explore/message.py +++ b/api/controllers/console/explore/message.py @@ -23,8 +23,7 @@ from core.model_runtime.errors.invoke import InvokeError from fields.message_fields import message_infinite_scroll_pagination_fields from libs import helper from libs.helper import uuid_value -from libs.login import current_user -from models import Account +from libs.login import current_account_with_tenant from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.app import MoreLikeThisDisabledError @@ -36,12 +35,19 @@ from services.errors.message import ( ) from services.message_service import MessageService +from .. import console_ns + logger = logging.getLogger(__name__) +@console_ns.route( + "/installed-apps//messages", + endpoint="installed_app_messages", +) class MessageListApi(InstalledAppResource): @marshal_with(message_infinite_scroll_pagination_fields) def get(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app app_mode = AppMode.value_of(app_model.mode) @@ -55,8 +61,6 @@ class MessageListApi(InstalledAppResource): args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") return MessageService.pagination_by_first_id( app_model, current_user, args["conversation_id"], args["first_id"], args["limit"] ) @@ -66,8 +70,13 @@ class MessageListApi(InstalledAppResource): raise NotFound("First Message Not Exists.") +@console_ns.route( + "/installed-apps//messages//feedbacks", + endpoint="installed_app_message_feedback", +) class MessageFeedbackApi(InstalledAppResource): def post(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app message_id = str(message_id) @@ -78,8 +87,6 @@ class MessageFeedbackApi(InstalledAppResource): args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") MessageService.create_feedback( app_model=app_model, message_id=message_id, @@ -93,8 +100,13 @@ class MessageFeedbackApi(InstalledAppResource): return {"result": "success"} +@console_ns.route( + "/installed-apps//messages//more-like-this", + endpoint="installed_app_more_like_this", +) class MessageMoreLikeThisApi(InstalledAppResource): def get(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() @@ -110,8 +122,6 @@ class MessageMoreLikeThisApi(InstalledAppResource): streaming = args["response_mode"] == "streaming" try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") response = AppGenerateService.generate_more_like_this( app_model=app_model, user=current_user, @@ -139,8 +149,13 @@ class MessageMoreLikeThisApi(InstalledAppResource): raise InternalServerError() +@console_ns.route( + "/installed-apps//messages//suggested-questions", + endpoint="installed_app_suggested_question", +) class MessageSuggestedQuestionApi(InstalledAppResource): def get(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -149,8 +164,6 @@ class MessageSuggestedQuestionApi(InstalledAppResource): message_id = str(message_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") questions = MessageService.get_suggested_questions_after_answer( app_model=app_model, user=current_user, message_id=message_id, invoke_from=InvokeFrom.EXPLORE ) diff --git a/api/controllers/console/explore/parameter.py b/api/controllers/console/explore/parameter.py index 7742ea24a9..9c6b2aedfb 100644 --- a/api/controllers/console/explore/parameter.py +++ b/api/controllers/console/explore/parameter.py @@ -1,7 +1,7 @@ from flask_restx import marshal_with from controllers.common import fields -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.error import AppUnavailableError from controllers.console.explore.wraps import InstalledAppResource from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict @@ -9,6 +9,7 @@ from models.model import AppMode, InstalledApp from services.app_service import AppService +@console_ns.route("/installed-apps//parameters", endpoint="installed_app_parameters") class AppParameterApi(InstalledAppResource): """Resource for app variables.""" @@ -39,6 +40,7 @@ class AppParameterApi(InstalledAppResource): return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) +@console_ns.route("/installed-apps//meta", endpoint="installed_app_meta") class ExploreAppMetaApi(InstalledAppResource): def get(self, installed_app: InstalledApp): """Get app meta""" @@ -46,9 +48,3 @@ class ExploreAppMetaApi(InstalledAppResource): if not app_model: raise ValueError("App not found") return AppService().get_app_meta(app_model) - - -api.add_resource( - AppParameterApi, "/installed-apps//parameters", endpoint="installed_app_parameters" -) -api.add_resource(ExploreAppMetaApi, "/installed-apps//meta", endpoint="installed_app_meta") diff --git a/api/controllers/console/explore/recommended_app.py b/api/controllers/console/explore/recommended_app.py index 974222ddf7..6d627a929a 100644 --- a/api/controllers/console/explore/recommended_app.py +++ b/api/controllers/console/explore/recommended_app.py @@ -1,7 +1,7 @@ from flask_restx import Resource, fields, marshal_with, reqparse from constants.languages import languages -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required from libs.helper import AppIconUrlField from libs.login import current_user, login_required @@ -35,6 +35,7 @@ recommended_app_list_fields = { } +@console_ns.route("/explore/apps") class RecommendedAppListApi(Resource): @login_required @account_initialization_required @@ -56,13 +57,10 @@ class RecommendedAppListApi(Resource): return RecommendedAppService.get_recommended_apps_and_categories(language_prefix) +@console_ns.route("/explore/apps/") class RecommendedAppApi(Resource): @login_required @account_initialization_required def get(self, app_id): app_id = str(app_id) return RecommendedAppService.get_recommend_app_detail(app_id) - - -api.add_resource(RecommendedAppListApi, "/explore/apps") -api.add_resource(RecommendedAppApi, "/explore/apps/") diff --git a/api/controllers/console/explore/saved_message.py b/api/controllers/console/explore/saved_message.py index 6f05f898f9..830685975b 100644 --- a/api/controllers/console/explore/saved_message.py +++ b/api/controllers/console/explore/saved_message.py @@ -2,13 +2,12 @@ from flask_restx import fields, marshal_with, reqparse from flask_restx.inputs import int_range from werkzeug.exceptions import NotFound -from controllers.console import api +from controllers.console import console_ns from controllers.console.explore.error import NotCompletionAppError from controllers.console.explore.wraps import InstalledAppResource from fields.conversation_fields import message_file_fields from libs.helper import TimestampField, uuid_value -from libs.login import current_user -from models import Account +from libs.login import current_account_with_tenant from services.errors.message import MessageNotExistsError from services.saved_message_service import SavedMessageService @@ -25,6 +24,7 @@ message_fields = { } +@console_ns.route("/installed-apps//saved-messages", endpoint="installed_app_saved_messages") class SavedMessageListApi(InstalledAppResource): saved_message_infinite_scroll_pagination_fields = { "limit": fields.Integer, @@ -34,6 +34,7 @@ class SavedMessageListApi(InstalledAppResource): @marshal_with(saved_message_infinite_scroll_pagination_fields) def get(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() @@ -43,11 +44,10 @@ class SavedMessageListApi(InstalledAppResource): parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") args = parser.parse_args() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") return SavedMessageService.pagination_by_last_id(app_model, current_user, args["last_id"], args["limit"]) def post(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() @@ -57,8 +57,6 @@ class SavedMessageListApi(InstalledAppResource): args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") SavedMessageService.save(app_model, current_user, args["message_id"]) except MessageNotExistsError: raise NotFound("Message Not Exists.") @@ -66,8 +64,12 @@ class SavedMessageListApi(InstalledAppResource): return {"result": "success"} +@console_ns.route( + "/installed-apps//saved-messages/", endpoint="installed_app_saved_message" +) class SavedMessageApi(InstalledAppResource): def delete(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app message_id = str(message_id) @@ -75,20 +77,6 @@ class SavedMessageApi(InstalledAppResource): if app_model.mode != "completion": raise NotCompletionAppError() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") SavedMessageService.delete(app_model, current_user, message_id) return {"result": "success"}, 204 - - -api.add_resource( - SavedMessageListApi, - "/installed-apps//saved-messages", - endpoint="installed_app_saved_messages", -) -api.add_resource( - SavedMessageApi, - "/installed-apps//saved-messages/", - endpoint="installed_app_saved_message", -) diff --git a/api/controllers/console/explore/workflow.py b/api/controllers/console/explore/workflow.py index 61e0f1b36a..e32f2814eb 100644 --- a/api/controllers/console/explore/workflow.py +++ b/api/controllers/console/explore/workflow.py @@ -27,9 +27,12 @@ from models.model import AppMode, InstalledApp from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError +from .. import console_ns + logger = logging.getLogger(__name__) +@console_ns.route("/installed-apps//workflows/run") class InstalledAppWorkflowRunApi(InstalledAppResource): def post(self, installed_app: InstalledApp): """ @@ -70,6 +73,7 @@ class InstalledAppWorkflowRunApi(InstalledAppResource): raise InternalServerError() +@console_ns.route("/installed-apps//workflows/tasks//stop") class InstalledAppWorkflowTaskStopApi(InstalledAppResource): def post(self, installed_app: InstalledApp, task_id: str): """ diff --git a/api/controllers/console/explore/wraps.py b/api/controllers/console/explore/wraps.py index 3a8ba64a03..df4eed18eb 100644 --- a/api/controllers/console/explore/wraps.py +++ b/api/controllers/console/explore/wraps.py @@ -2,14 +2,13 @@ from collections.abc import Callable from functools import wraps from typing import Concatenate, ParamSpec, TypeVar -from flask_login import current_user from flask_restx import Resource from werkzeug.exceptions import NotFound from controllers.console.explore.error import AppAccessDeniedError from controllers.console.wraps import account_initialization_required from extensions.ext_database import db -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import InstalledApp from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -24,11 +23,10 @@ def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | Non def decorator(view: Callable[Concatenate[InstalledApp, P], R]): @wraps(view) def decorated(installed_app_id: str, *args: P.args, **kwargs: P.kwargs): + _, current_tenant_id = current_account_with_tenant() installed_app = ( db.session.query(InstalledApp) - .where( - InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_user.current_tenant_id - ) + .where(InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_tenant_id) .first() ) @@ -54,6 +52,7 @@ def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] | def decorator(view: Callable[Concatenate[InstalledApp, P], R]): @wraps(view) def decorated(installed_app: InstalledApp, *args: P.args, **kwargs: P.kwargs): + current_user, _ = current_account_with_tenant() feature = FeatureService.get_system_features() if feature.webapp_auth.enabled: app_id = installed_app.app_id diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index 57f5ab191e..f77996eb6a 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -1,11 +1,10 @@ -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from constants import HIDDEN_VALUE from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from fields.api_based_extension_fields import api_based_extension_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.api_based_extension import APIBasedExtension from services.api_based_extension_service import APIBasedExtensionService from services.code_based_extension_service import CodeBasedExtensionService @@ -47,7 +46,7 @@ class APIBasedExtensionAPI(Resource): @account_initialization_required @marshal_with(api_based_extension_fields) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return APIBasedExtensionService.get_all_by_tenant_id(tenant_id) @api.doc("create_api_based_extension") @@ -68,14 +67,16 @@ class APIBasedExtensionAPI(Resource): @account_initialization_required @marshal_with(api_based_extension_fields) def post(self): + _, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") parser.add_argument("api_endpoint", type=str, required=True, location="json") parser.add_argument("api_key", type=str, required=True, location="json") args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() extension_data = APIBasedExtension( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, name=args["name"], api_endpoint=args["api_endpoint"], api_key=args["api_key"], @@ -96,7 +97,7 @@ class APIBasedExtensionDetailAPI(Resource): @marshal_with(api_based_extension_fields) def get(self, id): api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) @@ -120,9 +121,9 @@ class APIBasedExtensionDetailAPI(Resource): @marshal_with(api_based_extension_fields) def post(self, id): api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() - extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) + extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id) parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") @@ -147,9 +148,9 @@ class APIBasedExtensionDetailAPI(Resource): @account_initialization_required def delete(self, id): api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() - extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) + extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id) APIBasedExtensionService.delete(extension_data_from_db) diff --git a/api/controllers/console/feature.py b/api/controllers/console/feature.py index d43b839291..39bcf3424c 100644 --- a/api/controllers/console/feature.py +++ b/api/controllers/console/feature.py @@ -1,7 +1,6 @@ -from flask_login import current_user from flask_restx import Resource, fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.feature_service import FeatureService from . import api, console_ns @@ -23,7 +22,9 @@ class FeatureApi(Resource): @cloud_utm_record def get(self): """Get feature configuration for current tenant""" - return FeatureService.get_features(current_user.current_tenant_id).model_dump() + _, current_tenant_id = current_account_with_tenant() + + return FeatureService.get_features(current_tenant_id).model_dump() @console_ns.route("/system-features") diff --git a/api/controllers/console/files.py b/api/controllers/console/files.py index 105f802878..1cd193f7ad 100644 --- a/api/controllers/console/files.py +++ b/api/controllers/console/files.py @@ -1,7 +1,6 @@ from typing import Literal from flask import request -from flask_login import current_user from flask_restx import Resource, marshal_with from werkzeug.exceptions import Forbidden @@ -22,13 +21,15 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.file_fields import file_fields, upload_config_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from services.file_service import FileService +from . import console_ns + PREVIEW_WORDS_LIMIT = 3000 +@console_ns.route("/files/upload") class FileApi(Resource): @setup_required @login_required @@ -50,6 +51,7 @@ class FileApi(Resource): @marshal_with(file_fields) @cloud_edition_billing_resource_check("documents") def post(self): + current_user, _ = current_account_with_tenant() source_str = request.form.get("source") source: Literal["datasets"] | None = "datasets" if source_str == "datasets" else None @@ -62,16 +64,12 @@ class FileApi(Resource): if not file.filename: raise FilenameNotExistsError - if source == "datasets" and not current_user.is_dataset_editor: raise Forbidden() if source not in ("datasets", None): source = None - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - try: upload_file = FileService(db.engine).upload_file( filename=file.filename, @@ -88,6 +86,7 @@ class FileApi(Resource): return upload_file, 201 +@console_ns.route("/files//preview") class FilePreviewApi(Resource): @setup_required @login_required @@ -98,9 +97,10 @@ class FilePreviewApi(Resource): return {"content": text} +@console_ns.route("/files/support-type") class FileSupportTypeApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - return {"allowed_extensions": DOCUMENT_EXTENSIONS} + return {"allowed_extensions": list(DOCUMENT_EXTENSIONS)} diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index dd4f34b9bd..b053f222df 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -1,8 +1,6 @@ import urllib.parse -from typing import cast import httpx -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse import services @@ -16,10 +14,13 @@ from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields -from models.account import Account +from libs.login import current_account_with_tenant from services.file_service import FileService +from . import console_ns + +@console_ns.route("/remote-files/") class RemoteFileInfoApi(Resource): @marshal_with(remote_file_info_fields) def get(self, url): @@ -35,6 +36,7 @@ class RemoteFileInfoApi(Resource): } +@console_ns.route("/remote-files/upload") class RemoteFileUploadApi(Resource): @marshal_with(file_fields_with_signed_url) def post(self): @@ -61,7 +63,7 @@ class RemoteFileUploadApi(Resource): content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content try: - user = cast(Account, current_user) + user, _ = current_account_with_tenant() upload_file = FileService(db.engine).upload_file( filename=file_info.filename, content=content, diff --git a/api/controllers/console/spec.py b/api/controllers/console/spec.py index ca54715fe0..1795e2d172 100644 --- a/api/controllers/console/spec.py +++ b/api/controllers/console/spec.py @@ -2,7 +2,6 @@ import logging from flask_restx import Resource -from controllers.console import api from controllers.console.wraps import ( account_initialization_required, setup_required, @@ -10,9 +9,12 @@ from controllers.console.wraps import ( from core.schemas.schema_manager import SchemaManager from libs.login import login_required +from . import console_ns + logger = logging.getLogger(__name__) +@console_ns.route("/spec/schema-definitions") class SpecSchemaDefinitionsApi(Resource): @setup_required @login_required @@ -30,6 +32,3 @@ class SpecSchemaDefinitionsApi(Resource): logger.exception("Failed to get schema definitions from local registry") # Return empty array as fallback return [], 200 - - -api.add_resource(SpecSchemaDefinitionsApi, "/spec/schema-definitions") diff --git a/api/controllers/console/tag/tags.py b/api/controllers/console/tag/tags.py index da236ee5af..5748ca110d 100644 --- a/api/controllers/console/tag/tags.py +++ b/api/controllers/console/tag/tags.py @@ -1,12 +1,11 @@ from flask import request -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from fields.tag_fields import dataset_tag_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.model import Tag from services.tag_service import TagService @@ -17,15 +16,17 @@ def _validate_name(name): return name +@console_ns.route("/tags") class TagListApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(dataset_tag_fields) def get(self): + _, current_tenant_id = current_account_with_tenant() tag_type = request.args.get("type", type=str, default="") keyword = request.args.get("keyword", default=None, type=str) - tags = TagService.get_tags(tag_type, current_user.current_tenant_id, keyword) + tags = TagService.get_tags(tag_type, current_tenant_id, keyword) return tags, 200 @@ -33,8 +34,9 @@ class TagListApi(Resource): @login_required @account_initialization_required def post(self): + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_editor): + if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() parser = reqparse.RequestParser() @@ -52,14 +54,16 @@ class TagListApi(Resource): return response, 200 +@console_ns.route("/tags/") class TagUpdateDeleteApi(Resource): @setup_required @login_required @account_initialization_required def patch(self, tag_id): + current_user, _ = current_account_with_tenant() tag_id = str(tag_id) # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_editor): + if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() parser = reqparse.RequestParser() @@ -79,9 +83,10 @@ class TagUpdateDeleteApi(Resource): @login_required @account_initialization_required def delete(self, tag_id): + current_user, _ = current_account_with_tenant() tag_id = str(tag_id) # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: + if not current_user.has_edit_permission: raise Forbidden() TagService.delete_tag(tag_id) @@ -89,13 +94,15 @@ class TagUpdateDeleteApi(Resource): return 204 +@console_ns.route("/tag-bindings/create") class TagBindingCreateApi(Resource): @setup_required @login_required @account_initialization_required def post(self): + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator - if not (current_user.is_editor or current_user.is_dataset_editor): + if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() parser = reqparse.RequestParser() @@ -114,13 +121,15 @@ class TagBindingCreateApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/tag-bindings/remove") class TagBindingDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self): + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator - if not (current_user.is_editor or current_user.is_dataset_editor): + if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() parser = reqparse.RequestParser() @@ -133,9 +142,3 @@ class TagBindingDeleteApi(Resource): TagService.delete_tag_binding(args) return {"result": "success"}, 200 - - -api.add_resource(TagListApi, "/tags") -api.add_resource(TagUpdateDeleteApi, "/tags/") -api.add_resource(TagBindingCreateApi, "/tag-bindings/create") -api.add_resource(TagBindingDeleteApi, "/tag-bindings/remove") diff --git a/api/controllers/console/workspace/__init__.py b/api/controllers/console/workspace/__init__.py index 4a048f3c5e..876e2301f2 100644 --- a/api/controllers/console/workspace/__init__.py +++ b/api/controllers/console/workspace/__init__.py @@ -2,11 +2,11 @@ from collections.abc import Callable from functools import wraps from typing import ParamSpec, TypeVar -from flask_login import current_user from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden from extensions.ext_database import db +from libs.login import current_account_with_tenant from models.account import TenantPluginPermission P = ParamSpec("P") @@ -20,8 +20,9 @@ def plugin_permission_required( def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): + current_user, current_tenant_id = current_account_with_tenant() user = current_user - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id with Session(db.engine) as session: permission = ( diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 7a41a8a5cc..a5e6b8f473 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -2,14 +2,13 @@ from datetime import datetime import pytz from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session from configs import dify_config from constants.languages import supported_language -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ( EmailAlreadyInUseError, EmailChangeLimitError, @@ -37,21 +36,19 @@ from extensions.ext_database import db from fields.member_fields import account_fields from libs.datetime_utils import naive_utc_now from libs.helper import TimestampField, email, extract_remote_ip, timezone -from libs.login import login_required -from models import AccountIntegrate, InvitationCode -from models.account import Account +from libs.login import current_account_with_tenant, login_required +from models import Account, AccountIntegrate, InvitationCode from services.account_service import AccountService from services.billing_service import BillingService from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError +@console_ns.route("/account/init") class AccountInitApi(Resource): @setup_required @login_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() if account.status == "active": raise AccountAlreadyInitedError() @@ -97,6 +94,7 @@ class AccountInitApi(Resource): return {"result": "success"} +@console_ns.route("/account/profile") class AccountProfileApi(Resource): @setup_required @login_required @@ -104,19 +102,18 @@ class AccountProfileApi(Resource): @marshal_with(account_fields) @enterprise_license_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() return current_user +@console_ns.route("/account/name") class AccountNameApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") args = parser.parse_args() @@ -130,14 +127,14 @@ class AccountNameApi(Resource): return updated_account +@console_ns.route("/account/avatar") class AccountAvatarApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("avatar", type=str, required=True, location="json") args = parser.parse_args() @@ -147,14 +144,14 @@ class AccountAvatarApi(Resource): return updated_account +@console_ns.route("/account/interface-language") class AccountInterfaceLanguageApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("interface_language", type=supported_language, required=True, location="json") args = parser.parse_args() @@ -164,14 +161,14 @@ class AccountInterfaceLanguageApi(Resource): return updated_account +@console_ns.route("/account/interface-theme") class AccountInterfaceThemeApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("interface_theme", type=str, choices=["light", "dark"], required=True, location="json") args = parser.parse_args() @@ -181,14 +178,14 @@ class AccountInterfaceThemeApi(Resource): return updated_account +@console_ns.route("/account/timezone") class AccountTimezoneApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("timezone", type=str, required=True, location="json") args = parser.parse_args() @@ -202,14 +199,14 @@ class AccountTimezoneApi(Resource): return updated_account +@console_ns.route("/account/password") class AccountPasswordApi(Resource): @setup_required @login_required @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("password", type=str, required=False, location="json") parser.add_argument("new_password", type=str, required=True, location="json") @@ -227,6 +224,7 @@ class AccountPasswordApi(Resource): return {"result": "success"} +@console_ns.route("/account/integrates") class AccountIntegrateApi(Resource): integrate_fields = { "provider": fields.String, @@ -244,9 +242,7 @@ class AccountIntegrateApi(Resource): @account_initialization_required @marshal_with(integrate_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() account_integrates = db.session.scalars( select(AccountIntegrate).where(AccountIntegrate.account_id == account.id) @@ -283,14 +279,13 @@ class AccountIntegrateApi(Resource): return {"data": integrate_data} +@console_ns.route("/account/delete/verify") class AccountDeleteVerifyApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() token, code = AccountService.generate_account_deletion_verification_code(account) AccountService.send_account_deletion_verification_email(account, code) @@ -298,14 +293,13 @@ class AccountDeleteVerifyApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/account/delete") class AccountDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("token", type=str, required=True, location="json") @@ -320,6 +314,7 @@ class AccountDeleteApi(Resource): return {"result": "success"} +@console_ns.route("/account/delete/feedback") class AccountDeleteUpdateFeedbackApi(Resource): @setup_required def post(self): @@ -333,6 +328,7 @@ class AccountDeleteUpdateFeedbackApi(Resource): return {"result": "success"} +@console_ns.route("/account/education/verify") class EducationVerifyApi(Resource): verify_fields = { "token": fields.String, @@ -345,13 +341,12 @@ class EducationVerifyApi(Resource): @cloud_edition_billing_enabled @marshal_with(verify_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() return BillingService.EducationIdentity.verify(account.id, account.email) +@console_ns.route("/account/education") class EducationApi(Resource): status_fields = { "result": fields.Boolean, @@ -366,9 +361,7 @@ class EducationApi(Resource): @only_edition_cloud @cloud_edition_billing_enabled def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("token", type=str, required=True, location="json") @@ -385,9 +378,7 @@ class EducationApi(Resource): @cloud_edition_billing_enabled @marshal_with(status_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() res = BillingService.EducationIdentity.status(account.id) # convert expire_at to UTC timestamp from isoformat @@ -396,6 +387,7 @@ class EducationApi(Resource): return res +@console_ns.route("/account/education/autocomplete") class EducationAutoCompleteApi(Resource): data_fields = { "data": fields.List(fields.String), @@ -419,12 +411,14 @@ class EducationAutoCompleteApi(Resource): return BillingService.EducationIdentity.autocomplete(args["keywords"], args["page"], args["limit"]) +@console_ns.route("/account/change-email") class ChangeEmailSendEmailApi(Resource): @enable_change_email @setup_required @login_required @account_initialization_required def post(self): + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("email", type=email, required=True, location="json") parser.add_argument("language", type=str, required=False, location="json") @@ -451,8 +445,6 @@ class ChangeEmailSendEmailApi(Resource): raise InvalidTokenError() user_email = reset_data.get("email", "") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") if user_email != current_user.email: raise InvalidEmailError() else: @@ -467,6 +459,7 @@ class ChangeEmailSendEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/account/change-email/validity") class ChangeEmailCheckApi(Resource): @enable_change_email @setup_required @@ -508,6 +501,7 @@ class ChangeEmailCheckApi(Resource): return {"is_valid": True, "email": token_data.get("email"), "token": new_token} +@console_ns.route("/account/change-email/reset") class ChangeEmailResetApi(Resource): @enable_change_email @setup_required @@ -533,8 +527,7 @@ class ChangeEmailResetApi(Resource): AccountService.revoke_change_email_token(args["token"]) old_email = reset_data.get("old_email", "") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if current_user.email != old_email: raise AccountNotFound() @@ -547,6 +540,7 @@ class ChangeEmailResetApi(Resource): return updated_account +@console_ns.route("/account/change-email/check-email-unique") class CheckEmailUnique(Resource): @setup_required def post(self): @@ -558,28 +552,3 @@ class CheckEmailUnique(Resource): if not AccountService.check_email_unique(args["email"]): raise EmailAlreadyInUseError() return {"result": "success"} - - -# Register API resources -api.add_resource(AccountInitApi, "/account/init") -api.add_resource(AccountProfileApi, "/account/profile") -api.add_resource(AccountNameApi, "/account/name") -api.add_resource(AccountAvatarApi, "/account/avatar") -api.add_resource(AccountInterfaceLanguageApi, "/account/interface-language") -api.add_resource(AccountInterfaceThemeApi, "/account/interface-theme") -api.add_resource(AccountTimezoneApi, "/account/timezone") -api.add_resource(AccountPasswordApi, "/account/password") -api.add_resource(AccountIntegrateApi, "/account/integrates") -api.add_resource(AccountDeleteVerifyApi, "/account/delete/verify") -api.add_resource(AccountDeleteApi, "/account/delete") -api.add_resource(AccountDeleteUpdateFeedbackApi, "/account/delete/feedback") -api.add_resource(EducationVerifyApi, "/account/education/verify") -api.add_resource(EducationApi, "/account/education") -api.add_resource(EducationAutoCompleteApi, "/account/education/autocomplete") -# Change email -api.add_resource(ChangeEmailSendEmailApi, "/account/change-email") -api.add_resource(ChangeEmailCheckApi, "/account/change-email/validity") -api.add_resource(ChangeEmailResetApi, "/account/change-email/reset") -api.add_resource(CheckEmailUnique, "/account/change-email/check-email-unique") -# api.add_resource(AccountEmailApi, '/account/email') -# api.add_resource(AccountEmailVerifyApi, '/account/email-verify') diff --git a/api/controllers/console/workspace/agent_providers.py b/api/controllers/console/workspace/agent_providers.py index 0a2c8fcfb4..0a8f49d2e5 100644 --- a/api/controllers/console/workspace/agent_providers.py +++ b/api/controllers/console/workspace/agent_providers.py @@ -1,10 +1,9 @@ -from flask_login import current_user from flask_restx import Resource, fields from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.agent_service import AgentService @@ -21,10 +20,11 @@ class AgentProviderListApi(Resource): @login_required @account_initialization_required def get(self): + current_user, current_tenant_id = current_account_with_tenant() user = current_user user_id = user.id - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id return jsonable_encoder(AgentService.list_agent_providers(user_id, tenant_id)) @@ -43,7 +43,5 @@ class AgentProviderApi(Resource): @login_required @account_initialization_required def get(self, provider_name: str): - user = current_user - user_id = user.id - tenant_id = user.current_tenant_id - return jsonable_encoder(AgentService.get_agent_provider(user_id, tenant_id, provider_name)) + current_user, current_tenant_id = current_account_with_tenant() + return jsonable_encoder(AgentService.get_agent_provider(current_user.id, current_tenant_id, provider_name)) diff --git a/api/controllers/console/workspace/endpoint.py b/api/controllers/console/workspace/endpoint.py index 0657b764cc..b31011b4a3 100644 --- a/api/controllers/console/workspace/endpoint.py +++ b/api/controllers/console/workspace/endpoint.py @@ -1,4 +1,3 @@ -from flask_login import current_user from flask_restx import Resource, fields, reqparse from werkzeug.exceptions import Forbidden @@ -6,7 +5,7 @@ from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginPermissionDeniedError -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.plugin.endpoint_service import EndpointService @@ -34,7 +33,7 @@ class EndpointCreateApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() @@ -51,7 +50,7 @@ class EndpointCreateApi(Resource): try: return { "success": EndpointService.create_endpoint( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, user_id=user.id, plugin_unique_identifier=plugin_unique_identifier, name=name, @@ -80,7 +79,7 @@ class EndpointListApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("page", type=int, required=True, location="args") @@ -93,7 +92,7 @@ class EndpointListApi(Resource): return jsonable_encoder( { "endpoints": EndpointService.list_endpoints( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, user_id=user.id, page=page, page_size=page_size, @@ -123,7 +122,7 @@ class EndpointListForSinglePluginApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("page", type=int, required=True, location="args") @@ -138,7 +137,7 @@ class EndpointListForSinglePluginApi(Resource): return jsonable_encoder( { "endpoints": EndpointService.list_endpoints_for_single_plugin( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, user_id=user.id, plugin_id=plugin_id, page=page, @@ -165,7 +164,7 @@ class EndpointDeleteApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("endpoint_id", type=str, required=True) @@ -177,9 +176,7 @@ class EndpointDeleteApi(Resource): endpoint_id = args["endpoint_id"] return { - "success": EndpointService.delete_endpoint( - tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id - ) + "success": EndpointService.delete_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id) } @@ -207,7 +204,7 @@ class EndpointUpdateApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("endpoint_id", type=str, required=True) @@ -224,7 +221,7 @@ class EndpointUpdateApi(Resource): return { "success": EndpointService.update_endpoint( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id, name=name, @@ -250,7 +247,7 @@ class EndpointEnableApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("endpoint_id", type=str, required=True) @@ -262,9 +259,7 @@ class EndpointEnableApi(Resource): raise Forbidden() return { - "success": EndpointService.enable_endpoint( - tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id - ) + "success": EndpointService.enable_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id) } @@ -285,7 +280,7 @@ class EndpointDisableApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("endpoint_id", type=str, required=True) @@ -297,7 +292,5 @@ class EndpointDisableApi(Resource): raise Forbidden() return { - "success": EndpointService.disable_endpoint( - tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id - ) + "success": EndpointService.disable_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id) } diff --git a/api/controllers/console/workspace/load_balancing_config.py b/api/controllers/console/workspace/load_balancing_config.py index 7c1bc7c075..4e6f1fa3a5 100644 --- a/api/controllers/console/workspace/load_balancing_config.py +++ b/api/controllers/console/workspace/load_balancing_config.py @@ -1,26 +1,28 @@ from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError -from libs.login import current_user, login_required -from models.account import Account, TenantAccountRole +from libs.login import current_account_with_tenant, login_required +from models import TenantAccountRole from services.model_load_balancing_service import ModelLoadBalancingService +@console_ns.route( + "/workspaces/current/model-providers//models/load-balancing-configs/credentials-validate" +) class LoadBalancingCredentialsValidateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() - tenant_id = current_user.current_tenant_id - assert tenant_id is not None + tenant_id = current_tenant_id parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") @@ -61,17 +63,19 @@ class LoadBalancingCredentialsValidateApi(Resource): return response +@console_ns.route( + "/workspaces/current/model-providers//models/load-balancing-configs//credentials-validate" +) class LoadBalancingConfigCredentialsValidateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str, config_id: str): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() - tenant_id = current_user.current_tenant_id - assert tenant_id is not None + tenant_id = current_tenant_id parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") @@ -111,15 +115,3 @@ class LoadBalancingConfigCredentialsValidateApi(Resource): response["error"] = error return response - - -# Load Balancing Config -api.add_resource( - LoadBalancingCredentialsValidateApi, - "/workspaces/current/model-providers//models/load-balancing-configs/credentials-validate", -) - -api.add_resource( - LoadBalancingConfigCredentialsValidateApi, - "/workspaces/current/model-providers//models/load-balancing-configs//credentials-validate", -) diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index 77f0c9a735..4f080708cc 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -1,12 +1,11 @@ from urllib import parse from flask import abort, request -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse import services from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ( CannotTransferOwnerToSelfError, EmailCodeError, @@ -26,13 +25,14 @@ from controllers.console.wraps import ( from extensions.ext_database import db from fields.member_fields import account_with_role_list_fields from libs.helper import extract_remote_ip -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.account import Account, TenantAccountRole from services.account_service import AccountService, RegisterService, TenantService from services.errors.account import AccountAlreadyInTenantError from services.feature_service import FeatureService +@console_ns.route("/workspaces/current/members") class MemberListApi(Resource): """List all members of current tenant.""" @@ -41,14 +41,14 @@ class MemberListApi(Resource): @account_initialization_required @marshal_with(account_with_role_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") members = TenantService.get_tenant_members(current_user.current_tenant) return {"result": "success", "accounts": members}, 200 +@console_ns.route("/workspaces/current/members/invite-email") class MemberInviteEmailApi(Resource): """Invite a new member by email.""" @@ -68,9 +68,7 @@ class MemberInviteEmailApi(Resource): interface_language = args["language"] if not TenantAccountRole.is_non_owner_role(invitee_role): return {"code": "invalid-role", "message": "Invalid role"}, 400 - - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() inviter = current_user if not inviter.current_tenant: raise ValueError("No current tenant") @@ -111,6 +109,7 @@ class MemberInviteEmailApi(Resource): }, 201 +@console_ns.route("/workspaces/current/members/") class MemberCancelInviteApi(Resource): """Cancel an invitation by member id.""" @@ -118,8 +117,7 @@ class MemberCancelInviteApi(Resource): @login_required @account_initialization_required def delete(self, member_id): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") member = db.session.query(Account).where(Account.id == str(member_id)).first() @@ -143,6 +141,7 @@ class MemberCancelInviteApi(Resource): }, 200 +@console_ns.route("/workspaces/current/members//update-role") class MemberUpdateRoleApi(Resource): """Update member role.""" @@ -157,9 +156,7 @@ class MemberUpdateRoleApi(Resource): if not TenantAccountRole.is_valid_role(new_role): return {"code": "invalid-role", "message": "Invalid role"}, 400 - - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") member = db.session.get(Account, str(member_id)) @@ -177,6 +174,7 @@ class MemberUpdateRoleApi(Resource): return {"result": "success"} +@console_ns.route("/workspaces/current/dataset-operators") class DatasetOperatorMemberListApi(Resource): """List all members of current tenant.""" @@ -185,14 +183,14 @@ class DatasetOperatorMemberListApi(Resource): @account_initialization_required @marshal_with(account_with_role_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") members = TenantService.get_dataset_operator_members(current_user.current_tenant) return {"result": "success", "accounts": members}, 200 +@console_ns.route("/workspaces/current/members/send-owner-transfer-confirm-email") class SendOwnerTransferEmailApi(Resource): """Send owner transfer email.""" @@ -207,10 +205,8 @@ class SendOwnerTransferEmailApi(Resource): ip_address = extract_remote_ip(request) if AccountService.is_email_send_ip_limit(ip_address): raise EmailSendIpLimitError() - + current_user, _ = current_account_with_tenant() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): @@ -233,6 +229,7 @@ class SendOwnerTransferEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/workspaces/current/members/owner-transfer-check") class OwnerTransferCheckApi(Resource): @setup_required @login_required @@ -244,8 +241,7 @@ class OwnerTransferCheckApi(Resource): parser.add_argument("token", type=str, required=True, nullable=False, location="json") args = parser.parse_args() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): @@ -278,6 +274,7 @@ class OwnerTransferCheckApi(Resource): return {"is_valid": True, "email": token_data.get("email"), "token": new_token} +@console_ns.route("/workspaces/current/members//owner-transfer") class OwnerTransfer(Resource): @setup_required @login_required @@ -289,8 +286,7 @@ class OwnerTransfer(Resource): args = parser.parse_args() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): @@ -339,14 +335,3 @@ class OwnerTransfer(Resource): raise ValueError(str(e)) return {"result": "success"} - - -api.add_resource(MemberListApi, "/workspaces/current/members") -api.add_resource(MemberInviteEmailApi, "/workspaces/current/members/invite-email") -api.add_resource(MemberCancelInviteApi, "/workspaces/current/members/") -api.add_resource(MemberUpdateRoleApi, "/workspaces/current/members//update-role") -api.add_resource(DatasetOperatorMemberListApi, "/workspaces/current/dataset-operators") -# owner transfer -api.add_resource(SendOwnerTransferEmailApi, "/workspaces/current/members/send-owner-transfer-confirm-email") -api.add_resource(OwnerTransferCheckApi, "/workspaces/current/members/owner-transfer-check") -api.add_resource(OwnerTransfer, "/workspaces/current/members//owner-transfer") diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index 0c9db660aa..acdd467b30 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -1,32 +1,28 @@ import io from flask import send_file -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder from libs.helper import StrLen, uuid_value -from libs.login import login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService from services.model_provider_service import ModelProviderService +@console_ns.route("/workspaces/current/model-providers") class ModelProviderListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + tenant_id = current_tenant_id parser = reqparse.RequestParser() parser.add_argument( @@ -45,16 +41,14 @@ class ModelProviderListApi(Resource): return jsonable_encoder({"data": provider_list}) +@console_ns.route("/workspaces/current/model-providers//credentials") class ModelProviderCredentialApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + tenant_id = current_tenant_id # if credential_id is not provided, return current used credential parser = reqparse.RequestParser() parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") @@ -71,8 +65,7 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() @@ -83,11 +76,9 @@ class ModelProviderCredentialApi(Resource): model_provider_service = ModelProviderService() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") try: model_provider_service.create_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credentials=args["credentials"], credential_name=args["name"], @@ -101,8 +92,7 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def put(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() @@ -114,11 +104,9 @@ class ModelProviderCredentialApi(Resource): model_provider_service = ModelProviderService() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") try: model_provider_service.update_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credentials=args["credentials"], credential_id=args["credential_id"], @@ -133,62 +121,55 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") model_provider_service = ModelProviderService() model_provider_service.remove_provider_credential( - tenant_id=current_user.current_tenant_id, provider=provider, credential_id=args["credential_id"] + tenant_id=current_tenant_id, provider=provider, credential_id=args["credential_id"] ) return {"result": "success"}, 204 +@console_ns.route("/workspaces/current/model-providers//credentials/switch") class ModelProviderCredentialSwitchApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") service = ModelProviderService() service.switch_active_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credential_id=args["credential_id"], ) return {"result": "success"} +@console_ns.route("/workspaces/current/model-providers//credentials/validate") class ModelProviderValidateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + _, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + tenant_id = current_tenant_id model_provider_service = ModelProviderService() @@ -211,6 +192,7 @@ class ModelProviderValidateApi(Resource): return response +@console_ns.route("/workspaces//model-providers///") class ModelProviderIconApi(Resource): """ Get model provider icon @@ -229,19 +211,17 @@ class ModelProviderIconApi(Resource): return send_file(io.BytesIO(icon), mimetype=mimetype) +@console_ns.route("/workspaces/current/model-providers//preferred-provider-type") class PreferredProviderTypeUpdateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + tenant_id = current_tenant_id parser = reqparse.RequestParser() parser.add_argument( @@ -262,6 +242,7 @@ class PreferredProviderTypeUpdateApi(Resource): return {"result": "success"} +@console_ns.route("/workspaces/current/model-providers//checkout-url") class ModelProviderPaymentCheckoutUrlApi(Resource): @setup_required @login_required @@ -269,33 +250,12 @@ class ModelProviderPaymentCheckoutUrlApi(Resource): def get(self, provider: str): if provider != "anthropic": raise ValueError(f"provider name {provider} is invalid") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() BillingService.is_tenant_owner_or_admin(current_user) - if not current_user.current_tenant_id: - raise ValueError("No current tenant") data = BillingService.get_model_provider_payment_link( provider_name=provider, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, account_id=current_user.id, prefilled_email=current_user.email, ) return data - - -api.add_resource(ModelProviderListApi, "/workspaces/current/model-providers") - -api.add_resource(ModelProviderCredentialApi, "/workspaces/current/model-providers//credentials") -api.add_resource( - ModelProviderCredentialSwitchApi, "/workspaces/current/model-providers//credentials/switch" -) -api.add_resource(ModelProviderValidateApi, "/workspaces/current/model-providers//credentials/validate") - -api.add_resource( - PreferredProviderTypeUpdateApi, "/workspaces/current/model-providers//preferred-provider-type" -) -api.add_resource(ModelProviderPaymentCheckoutUrlApi, "/workspaces/current/model-providers//checkout-url") -api.add_resource( - ModelProviderIconApi, - "/workspaces//model-providers///", -) diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index f174fcc5d3..d5d1aed00e 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -1,27 +1,29 @@ import logging -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder from libs.helper import StrLen, uuid_value -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.model_load_balancing_service import ModelLoadBalancingService from services.model_provider_service import ModelProviderService logger = logging.getLogger(__name__) +@console_ns.route("/workspaces/current/default-model") class DefaultModelApi(Resource): @setup_required @login_required @account_initialization_required def get(self): + _, tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser() parser.add_argument( "model_type", @@ -33,8 +35,6 @@ class DefaultModelApi(Resource): ) args = parser.parse_args() - tenant_id = current_user.current_tenant_id - model_provider_service = ModelProviderService() default_model_entity = model_provider_service.get_default_model_of_model_type( tenant_id=tenant_id, model_type=args["model_type"] @@ -46,15 +46,14 @@ class DefaultModelApi(Resource): @login_required @account_initialization_required def post(self): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("model_settings", type=list, required=True, nullable=False, location="json") args = parser.parse_args() - - tenant_id = current_user.current_tenant_id - model_provider_service = ModelProviderService() model_settings = args["model_settings"] for model_setting in model_settings: @@ -85,12 +84,13 @@ class DefaultModelApi(Resource): return {"result": "success"} +@console_ns.route("/workspaces/current/model-providers//models") class ModelProviderModelApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() models = model_provider_service.get_models_by_provider(tenant_id=tenant_id, provider=provider) @@ -102,11 +102,11 @@ class ModelProviderModelApi(Resource): @account_initialization_required def post(self, provider: str): # To save the model's load balance configs + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id - parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") parser.add_argument( @@ -127,7 +127,7 @@ class ModelProviderModelApi(Resource): raise ValueError("credential_id is required when configuring a custom-model") service = ModelProviderService() service.switch_active_custom_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -162,11 +162,11 @@ class ModelProviderModelApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id - parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") parser.add_argument( @@ -187,12 +187,13 @@ class ModelProviderModelApi(Resource): return {"result": "success"}, 204 +@console_ns.route("/workspaces/current/model-providers//models/credentials") class ModelProviderModelCredentialApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="args") @@ -254,6 +255,8 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def post(self, provider: str): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() @@ -271,7 +274,6 @@ class ModelProviderModelCredentialApi(Resource): parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() - tenant_id = current_user.current_tenant_id model_provider_service = ModelProviderService() try: @@ -298,6 +300,8 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def put(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() @@ -320,7 +324,7 @@ class ModelProviderModelCredentialApi(Resource): try: model_provider_service.update_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -337,6 +341,8 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() parser = reqparse.RequestParser() @@ -354,7 +360,7 @@ class ModelProviderModelCredentialApi(Resource): model_provider_service = ModelProviderService() model_provider_service.remove_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -364,11 +370,14 @@ class ModelProviderModelCredentialApi(Resource): return {"result": "success"}, 204 +@console_ns.route("/workspaces/current/model-providers//models/credentials/switch") class ModelProviderModelCredentialSwitchApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() parser = reqparse.RequestParser() @@ -386,7 +395,7 @@ class ModelProviderModelCredentialSwitchApi(Resource): service = ModelProviderService() service.add_model_credential_to_model_list( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -395,12 +404,15 @@ class ModelProviderModelCredentialSwitchApi(Resource): return {"result": "success"} +@console_ns.route( + "/workspaces/current/model-providers//models/enable", endpoint="model-provider-model-enable" +) class ModelProviderModelEnableApi(Resource): @setup_required @login_required @account_initialization_required def patch(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") @@ -422,12 +434,15 @@ class ModelProviderModelEnableApi(Resource): return {"result": "success"} +@console_ns.route( + "/workspaces/current/model-providers//models/disable", endpoint="model-provider-model-disable" +) class ModelProviderModelDisableApi(Resource): @setup_required @login_required @account_initialization_required def patch(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") @@ -449,12 +464,13 @@ class ModelProviderModelDisableApi(Resource): return {"result": "success"} +@console_ns.route("/workspaces/current/model-providers//models/credentials/validate") class ModelProviderModelValidateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") @@ -494,6 +510,7 @@ class ModelProviderModelValidateApi(Resource): return response +@console_ns.route("/workspaces/current/model-providers//models/parameter-rules") class ModelProviderModelParameterRuleApi(Resource): @setup_required @login_required @@ -502,8 +519,7 @@ class ModelProviderModelParameterRuleApi(Resource): parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="args") args = parser.parse_args() - - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() parameter_rules = model_provider_service.get_model_parameter_rules( @@ -513,43 +529,14 @@ class ModelProviderModelParameterRuleApi(Resource): return jsonable_encoder({"data": parameter_rules}) +@console_ns.route("/workspaces/current/models/model-types/") class ModelProviderAvailableModelApi(Resource): @setup_required @login_required @account_initialization_required def get(self, model_type): - tenant_id = current_user.current_tenant_id - + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() models = model_provider_service.get_models_by_model_type(tenant_id=tenant_id, model_type=model_type) return jsonable_encoder({"data": models}) - - -api.add_resource(ModelProviderModelApi, "/workspaces/current/model-providers//models") -api.add_resource( - ModelProviderModelEnableApi, - "/workspaces/current/model-providers//models/enable", - endpoint="model-provider-model-enable", -) -api.add_resource( - ModelProviderModelDisableApi, - "/workspaces/current/model-providers//models/disable", - endpoint="model-provider-model-disable", -) -api.add_resource( - ModelProviderModelCredentialApi, "/workspaces/current/model-providers//models/credentials" -) -api.add_resource( - ModelProviderModelCredentialSwitchApi, - "/workspaces/current/model-providers//models/credentials/switch", -) -api.add_resource( - ModelProviderModelValidateApi, "/workspaces/current/model-providers//models/credentials/validate" -) - -api.add_resource( - ModelProviderModelParameterRuleApi, "/workspaces/current/model-providers//models/parameter-rules" -) -api.add_resource(ModelProviderAvailableModelApi, "/workspaces/current/models/model-types/") -api.add_resource(DefaultModelApi, "/workspaces/current/default-model") diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index fd5421fa64..ed5426376f 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -1,17 +1,16 @@ import io from flask import request, send_file -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.workspace import plugin_permission_required from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginDaemonClientSideError -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.account import TenantPluginAutoUpgradeStrategy, TenantPluginPermission from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService from services.plugin.plugin_parameter_service import PluginParameterService @@ -19,13 +18,14 @@ from services.plugin.plugin_permission_service import PluginPermissionService from services.plugin.plugin_service import PluginService +@console_ns.route("/workspaces/current/plugin/debugging-key") class PluginDebuggingKeyApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(debug_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return { @@ -37,12 +37,13 @@ class PluginDebuggingKeyApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/list") class PluginListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("page", type=int, required=False, location="args", default=1) parser.add_argument("page_size", type=int, required=False, location="args", default=256) @@ -55,6 +56,7 @@ class PluginListApi(Resource): return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total}) +@console_ns.route("/workspaces/current/plugin/list/latest-versions") class PluginListLatestVersionsApi(Resource): @setup_required @login_required @@ -72,12 +74,13 @@ class PluginListLatestVersionsApi(Resource): return jsonable_encoder({"versions": versions}) +@console_ns.route("/workspaces/current/plugin/list/installations/ids") class PluginListInstallationsFromIdsApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("plugin_ids", type=list, required=True, location="json") @@ -91,6 +94,7 @@ class PluginListInstallationsFromIdsApi(Resource): return jsonable_encoder({"plugins": plugins}) +@console_ns.route("/workspaces/current/plugin/icon") class PluginIconApi(Resource): @setup_required def get(self): @@ -108,13 +112,14 @@ class PluginIconApi(Resource): return send_file(io.BytesIO(icon_bytes), mimetype=mimetype, max_age=icon_cache_max_age) +@console_ns.route("/workspaces/current/plugin/upload/pkg") class PluginUploadFromPkgApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() file = request.files["pkg"] @@ -131,13 +136,14 @@ class PluginUploadFromPkgApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/upload/github") class PluginUploadFromGithubApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("repo", type=str, required=True, location="json") @@ -153,13 +159,14 @@ class PluginUploadFromGithubApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/upload/bundle") class PluginUploadFromBundleApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() file = request.files["bundle"] @@ -176,13 +183,14 @@ class PluginUploadFromBundleApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/install/pkg") class PluginInstallFromPkgApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("plugin_unique_identifiers", type=list, required=True, location="json") @@ -201,13 +209,14 @@ class PluginInstallFromPkgApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/install/github") class PluginInstallFromGithubApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("repo", type=str, required=True, location="json") @@ -230,13 +239,14 @@ class PluginInstallFromGithubApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/install/marketplace") class PluginInstallFromMarketplaceApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("plugin_unique_identifiers", type=list, required=True, location="json") @@ -255,13 +265,14 @@ class PluginInstallFromMarketplaceApi(Resource): return jsonable_encoder(response) +@console_ns.route("/workspaces/current/plugin/marketplace/pkg") class PluginFetchMarketplacePkgApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("plugin_unique_identifier", type=str, required=True, location="args") @@ -280,13 +291,14 @@ class PluginFetchMarketplacePkgApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/fetch-manifest") class PluginFetchManifestApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("plugin_unique_identifier", type=str, required=True, location="args") @@ -304,13 +316,14 @@ class PluginFetchManifestApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks") class PluginFetchInstallTasksApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("page", type=int, required=True, location="args") @@ -325,13 +338,14 @@ class PluginFetchInstallTasksApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks/") class PluginFetchInstallTaskApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def get(self, task_id: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return jsonable_encoder({"task": PluginService.fetch_install_task(tenant_id, task_id)}) @@ -339,13 +353,14 @@ class PluginFetchInstallTaskApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks//delete") class PluginDeleteInstallTaskApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self, task_id: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_install_task(tenant_id, task_id)} @@ -353,13 +368,14 @@ class PluginDeleteInstallTaskApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks/delete_all") class PluginDeleteAllInstallTaskItemsApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_all_install_task_items(tenant_id)} @@ -367,13 +383,14 @@ class PluginDeleteAllInstallTaskItemsApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/tasks//delete/") class PluginDeleteInstallTaskItemApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self, task_id: str, identifier: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_install_task_item(tenant_id, task_id, identifier)} @@ -381,13 +398,14 @@ class PluginDeleteInstallTaskItemApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/upgrade/marketplace") class PluginUpgradeFromMarketplaceApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") @@ -404,13 +422,14 @@ class PluginUpgradeFromMarketplaceApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/upgrade/github") class PluginUpgradeFromGithubApi(Resource): @setup_required @login_required @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") @@ -435,6 +454,7 @@ class PluginUpgradeFromGithubApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/uninstall") class PluginUninstallApi(Resource): @setup_required @login_required @@ -445,7 +465,7 @@ class PluginUninstallApi(Resource): req.add_argument("plugin_installation_id", type=str, required=True, location="json") args = req.parse_args() - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.uninstall(tenant_id, args["plugin_installation_id"])} @@ -453,11 +473,13 @@ class PluginUninstallApi(Resource): raise ValueError(e) +@console_ns.route("/workspaces/current/plugin/permission/change") class PluginChangePermissionApi(Resource): @setup_required @login_required @account_initialization_required def post(self): + current_user, current_tenant_id = current_account_with_tenant() user = current_user if not user.is_admin_or_owner: raise Forbidden() @@ -470,17 +492,18 @@ class PluginChangePermissionApi(Resource): install_permission = TenantPluginPermission.InstallPermission(args["install_permission"]) debug_permission = TenantPluginPermission.DebugPermission(args["debug_permission"]) - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id return {"success": PluginPermissionService.change_permission(tenant_id, install_permission, debug_permission)} +@console_ns.route("/workspaces/current/plugin/permission/fetch") class PluginFetchPermissionApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() permission = PluginPermissionService.get_permission(tenant_id) if not permission: @@ -499,16 +522,17 @@ class PluginFetchPermissionApi(Resource): ) +@console_ns.route("/workspaces/current/plugin/parameters/dynamic-options") class PluginFetchDynamicSelectOptionsApi(Resource): @setup_required @login_required @account_initialization_required def get(self): # check if the user is admin or owner + current_user, tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id user_id = current_user.id parser = reqparse.RequestParser() @@ -535,12 +559,13 @@ class PluginFetchDynamicSelectOptionsApi(Resource): return jsonable_encoder({"options": options}) +@console_ns.route("/workspaces/current/plugin/preferences/change") class PluginChangePreferencesApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() @@ -549,8 +574,6 @@ class PluginChangePreferencesApi(Resource): req.add_argument("auto_upgrade", type=dict, required=True, location="json") args = req.parse_args() - tenant_id = user.current_tenant_id - permission = args["permission"] install_permission = TenantPluginPermission.InstallPermission(permission.get("install_permission", "everyone")) @@ -590,12 +613,13 @@ class PluginChangePreferencesApi(Resource): return jsonable_encoder({"success": True}) +@console_ns.route("/workspaces/current/plugin/preferences/fetch") class PluginFetchPreferencesApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() permission = PluginPermissionService.get_permission(tenant_id) permission_dict = { @@ -628,48 +652,17 @@ class PluginFetchPreferencesApi(Resource): return jsonable_encoder({"permission": permission_dict, "auto_upgrade": auto_upgrade_dict}) +@console_ns.route("/workspaces/current/plugin/preferences/autoupgrade/exclude") class PluginAutoUpgradeExcludePluginApi(Resource): @setup_required @login_required @account_initialization_required def post(self): # exclude one single plugin - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() req = reqparse.RequestParser() req.add_argument("plugin_id", type=str, required=True, location="json") args = req.parse_args() return jsonable_encoder({"success": PluginAutoUpgradeService.exclude_plugin(tenant_id, args["plugin_id"])}) - - -api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key") -api.add_resource(PluginListApi, "/workspaces/current/plugin/list") -api.add_resource(PluginListLatestVersionsApi, "/workspaces/current/plugin/list/latest-versions") -api.add_resource(PluginListInstallationsFromIdsApi, "/workspaces/current/plugin/list/installations/ids") -api.add_resource(PluginIconApi, "/workspaces/current/plugin/icon") -api.add_resource(PluginUploadFromPkgApi, "/workspaces/current/plugin/upload/pkg") -api.add_resource(PluginUploadFromGithubApi, "/workspaces/current/plugin/upload/github") -api.add_resource(PluginUploadFromBundleApi, "/workspaces/current/plugin/upload/bundle") -api.add_resource(PluginInstallFromPkgApi, "/workspaces/current/plugin/install/pkg") -api.add_resource(PluginInstallFromGithubApi, "/workspaces/current/plugin/install/github") -api.add_resource(PluginUpgradeFromMarketplaceApi, "/workspaces/current/plugin/upgrade/marketplace") -api.add_resource(PluginUpgradeFromGithubApi, "/workspaces/current/plugin/upgrade/github") -api.add_resource(PluginInstallFromMarketplaceApi, "/workspaces/current/plugin/install/marketplace") -api.add_resource(PluginFetchManifestApi, "/workspaces/current/plugin/fetch-manifest") -api.add_resource(PluginFetchInstallTasksApi, "/workspaces/current/plugin/tasks") -api.add_resource(PluginFetchInstallTaskApi, "/workspaces/current/plugin/tasks/") -api.add_resource(PluginDeleteInstallTaskApi, "/workspaces/current/plugin/tasks//delete") -api.add_resource(PluginDeleteAllInstallTaskItemsApi, "/workspaces/current/plugin/tasks/delete_all") -api.add_resource(PluginDeleteInstallTaskItemApi, "/workspaces/current/plugin/tasks//delete/") -api.add_resource(PluginUninstallApi, "/workspaces/current/plugin/uninstall") -api.add_resource(PluginFetchMarketplacePkgApi, "/workspaces/current/plugin/marketplace/pkg") - -api.add_resource(PluginChangePermissionApi, "/workspaces/current/plugin/permission/change") -api.add_resource(PluginFetchPermissionApi, "/workspaces/current/plugin/permission/fetch") - -api.add_resource(PluginFetchDynamicSelectOptionsApi, "/workspaces/current/plugin/parameters/dynamic-options") - -api.add_resource(PluginFetchPreferencesApi, "/workspaces/current/plugin/preferences/fetch") -api.add_resource(PluginChangePreferencesApi, "/workspaces/current/plugin/preferences/change") -api.add_resource(PluginAutoUpgradeExcludePluginApi, "/workspaces/current/plugin/preferences/autoupgrade/exclude") diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 8693d99e23..17a935ade7 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -2,7 +2,6 @@ import io from urllib.parse import urlparse from flask import make_response, redirect, request, send_file -from flask_login import current_user from flask_restx import ( Resource, reqparse, @@ -10,7 +9,7 @@ from flask_restx import ( from werkzeug.exceptions import Forbidden from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import ( account_initialization_required, enterprise_license_required, @@ -24,7 +23,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.oauth import OAuthHandler from core.tools.entities.tool_entities import CredentialType from libs.helper import StrLen, alphanumeric, uuid_value -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.provider_ids import ToolProviderID from services.plugin.oauth_service import OAuthProxyService from services.tools.api_tools_manage_service import ApiToolManageService @@ -47,15 +46,15 @@ def is_valid_url(url: str) -> bool: return False +@console_ns.route("/workspaces/current/tool-providers") class ToolProviderListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id req = reqparse.RequestParser() req.add_argument( @@ -71,14 +70,13 @@ class ToolProviderListApi(Resource): return ToolCommonService.list_tool_providers(user_id, tenant_id, args.get("type", None)) +@console_ns.route("/workspaces/current/tool-provider/builtin//tools") class ToolBuiltinProviderListToolsApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - user = current_user - - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.list_builtin_tool_provider_tools( @@ -88,28 +86,27 @@ class ToolBuiltinProviderListToolsApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//info") class ToolBuiltinProviderInfoApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - user = current_user - - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(tenant_id, provider)) +@console_ns.route("/workspaces/current/tool-provider/builtin//delete") class ToolBuiltinProviderDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - tenant_id = user.current_tenant_id req = reqparse.RequestParser() req.add_argument("credential_id", type=str, required=True, nullable=False, location="json") args = req.parse_args() @@ -121,15 +118,15 @@ class ToolBuiltinProviderDeleteApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//add") class ToolBuiltinProviderAddApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") @@ -150,18 +147,18 @@ class ToolBuiltinProviderAddApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//update") class ToolBuiltinProviderUpdateApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") @@ -181,12 +178,13 @@ class ToolBuiltinProviderUpdateApi(Resource): return result +@console_ns.route("/workspaces/current/tool-provider/builtin//credentials") class ToolBuiltinProviderGetCredentialsApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_credentials( @@ -196,6 +194,7 @@ class ToolBuiltinProviderGetCredentialsApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//icon") class ToolBuiltinProviderIconApi(Resource): @setup_required def get(self, provider): @@ -204,18 +203,18 @@ class ToolBuiltinProviderIconApi(Resource): return send_file(io.BytesIO(icon_bytes), mimetype=mimetype, max_age=icon_cache_max_age) +@console_ns.route("/workspaces/current/tool-provider/api/add") class ToolApiProviderAddApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") @@ -243,15 +242,15 @@ class ToolApiProviderAddApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/remote") class ToolApiProviderGetRemoteSchemaApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() @@ -266,15 +265,15 @@ class ToolApiProviderGetRemoteSchemaApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/tools") class ToolApiProviderListToolsApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() @@ -291,18 +290,18 @@ class ToolApiProviderListToolsApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/update") class ToolApiProviderUpdateApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") @@ -332,18 +331,18 @@ class ToolApiProviderUpdateApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/delete") class ToolApiProviderDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() @@ -358,15 +357,15 @@ class ToolApiProviderDeleteApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/get") class ToolApiProviderGetApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() @@ -381,13 +380,13 @@ class ToolApiProviderGetApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/builtin//credential/schema/") class ToolBuiltinProviderCredentialsSchemaApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider, credential_type): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.list_builtin_provider_credentials_schema( @@ -396,6 +395,7 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/schema") class ToolApiProviderSchemaApi(Resource): @setup_required @login_required @@ -412,6 +412,7 @@ class ToolApiProviderSchemaApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/api/test/pre") class ToolApiProviderPreviousTestApi(Resource): @setup_required @login_required @@ -427,9 +428,9 @@ class ToolApiProviderPreviousTestApi(Resource): parser.add_argument("schema", type=str, required=True, nullable=False, location="json") args = parser.parse_args() - + _, current_tenant_id = current_account_with_tenant() return ApiToolManageService.test_api_tool_preview( - current_user.current_tenant_id, + current_tenant_id, args["provider_name"] or "", args["tool_name"], args["credentials"], @@ -439,18 +440,18 @@ class ToolApiProviderPreviousTestApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/workflow/create") class ToolWorkflowProviderCreateApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id reqparser = reqparse.RequestParser() reqparser.add_argument("workflow_app_id", type=uuid_value, required=True, nullable=False, location="json") @@ -478,18 +479,18 @@ class ToolWorkflowProviderCreateApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/workflow/update") class ToolWorkflowProviderUpdateApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id reqparser = reqparse.RequestParser() reqparser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") @@ -520,18 +521,18 @@ class ToolWorkflowProviderUpdateApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/workflow/delete") class ToolWorkflowProviderDeleteApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id reqparser = reqparse.RequestParser() reqparser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") @@ -545,15 +546,15 @@ class ToolWorkflowProviderDeleteApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/workflow/get") class ToolWorkflowProviderGetApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("workflow_tool_id", type=uuid_value, required=False, nullable=True, location="args") @@ -579,15 +580,15 @@ class ToolWorkflowProviderGetApi(Resource): return jsonable_encoder(tool) +@console_ns.route("/workspaces/current/tool-provider/workflow/tools") class ToolWorkflowProviderListToolApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="args") @@ -603,15 +604,15 @@ class ToolWorkflowProviderListToolApi(Resource): ) +@console_ns.route("/workspaces/current/tools/builtin") class ToolBuiltinListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id return jsonable_encoder( [ @@ -624,13 +625,13 @@ class ToolBuiltinListApi(Resource): ) +@console_ns.route("/workspaces/current/tools/api") class ToolApiListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( [ @@ -642,15 +643,15 @@ class ToolApiListApi(Resource): ) +@console_ns.route("/workspaces/current/tools/workflow") class ToolWorkflowListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id return jsonable_encoder( [ @@ -663,6 +664,7 @@ class ToolWorkflowListApi(Resource): ) +@console_ns.route("/workspaces/current/tool-labels") class ToolLabelsApi(Resource): @setup_required @login_required @@ -672,6 +674,7 @@ class ToolLabelsApi(Resource): return jsonable_encoder(ToolLabelsService.list_tool_labels()) +@console_ns.route("/oauth/plugin//tool/authorization-url") class ToolPluginOAuthApi(Resource): @setup_required @login_required @@ -682,19 +685,18 @@ class ToolPluginOAuthApi(Resource): provider_name = tool_provider.provider_name # todo check permission - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - tenant_id = user.current_tenant_id oauth_client_params = BuiltinToolManageService.get_oauth_client(tenant_id=tenant_id, provider=provider) if oauth_client_params is None: raise Forbidden("no oauth available client config found for this tool provider") oauth_handler = OAuthHandler() context_id = OAuthProxyService.create_proxy_context( - user_id=current_user.id, tenant_id=tenant_id, plugin_id=plugin_id, provider=provider_name + user_id=user.id, tenant_id=tenant_id, plugin_id=plugin_id, provider=provider_name ) redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback" authorization_url_response = oauth_handler.get_authorization_url( @@ -716,6 +718,7 @@ class ToolPluginOAuthApi(Resource): return response +@console_ns.route("/oauth/plugin//tool/callback") class ToolOAuthCallback(Resource): @setup_required def get(self, provider): @@ -766,19 +769,22 @@ class ToolOAuthCallback(Resource): return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") +@console_ns.route("/workspaces/current/tool-provider/builtin//default-credential") class ToolBuiltinProviderSetDefaultApi(Resource): @setup_required @login_required @account_initialization_required def post(self, provider): + current_user, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() return BuiltinToolManageService.set_default_provider( - tenant_id=current_user.current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"] + tenant_id=current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"] ) +@console_ns.route("/workspaces/current/tool-provider/builtin//oauth/custom-client") class ToolOAuthCustomClient(Resource): @setup_required @login_required @@ -789,13 +795,13 @@ class ToolOAuthCustomClient(Resource): parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") args = parser.parse_args() - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() return BuiltinToolManageService.save_custom_oauth_client_params( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, provider=provider, client_params=args.get("client_params", {}), enable_oauth_custom_client=args.get("enable_oauth_custom_client", True), @@ -805,41 +811,42 @@ class ToolOAuthCustomClient(Resource): @login_required @account_initialization_required def get(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( - BuiltinToolManageService.get_custom_oauth_client_params( - tenant_id=current_user.current_tenant_id, provider=provider - ) + BuiltinToolManageService.get_custom_oauth_client_params(tenant_id=current_tenant_id, provider=provider) ) @setup_required @login_required @account_initialization_required def delete(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( - BuiltinToolManageService.delete_custom_oauth_client_params( - tenant_id=current_user.current_tenant_id, provider=provider - ) + BuiltinToolManageService.delete_custom_oauth_client_params(tenant_id=current_tenant_id, provider=provider) ) +@console_ns.route("/workspaces/current/tool-provider/builtin//oauth/client-schema") class ToolBuiltinProviderGetOauthClientSchemaApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_oauth_client_schema( - tenant_id=current_user.current_tenant_id, provider_name=provider + tenant_id=current_tenant_id, provider_name=provider ) ) +@console_ns.route("/workspaces/current/tool-provider/builtin//credential/info") class ToolBuiltinProviderGetCredentialInfoApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_credential_info( @@ -849,6 +856,7 @@ class ToolBuiltinProviderGetCredentialInfoApi(Resource): ) +@console_ns.route("/workspaces/current/tool-provider/mcp") class ToolProviderMCPApi(Resource): @setup_required @login_required @@ -867,12 +875,12 @@ class ToolProviderMCPApi(Resource): ) parser.add_argument("headers", type=dict, required=False, nullable=True, location="json", default={}) args = parser.parse_args() - user = current_user + user, tenant_id = current_account_with_tenant() if not is_valid_url(args["server_url"]): raise ValueError("Server URL is not valid.") return jsonable_encoder( MCPToolManageService.create_mcp_provider( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, server_url=args["server_url"], name=args["name"], icon=args["icon"], @@ -907,8 +915,9 @@ class ToolProviderMCPApi(Resource): pass else: raise ValueError("Server URL is not valid.") + _, current_tenant_id = current_account_with_tenant() MCPToolManageService.update_mcp_provider( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider_id=args["provider_id"], server_url=args["server_url"], name=args["name"], @@ -929,10 +938,12 @@ class ToolProviderMCPApi(Resource): parser = reqparse.RequestParser() parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() - MCPToolManageService.delete_mcp_tool(tenant_id=current_user.current_tenant_id, provider_id=args["provider_id"]) + _, current_tenant_id = current_account_with_tenant() + MCPToolManageService.delete_mcp_tool(tenant_id=current_tenant_id, provider_id=args["provider_id"]) return {"result": "success"} +@console_ns.route("/workspaces/current/tool-provider/mcp/auth") class ToolMCPAuthApi(Resource): @setup_required @login_required @@ -943,7 +954,7 @@ class ToolMCPAuthApi(Resource): parser.add_argument("authorization_code", type=str, required=False, nullable=True, location="json") args = parser.parse_args() provider_id = args["provider_id"] - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id) if not provider: raise ValueError("provider not found") @@ -978,35 +989,37 @@ class ToolMCPAuthApi(Resource): raise ValueError(f"Failed to connect to MCP server: {e}") from e +@console_ns.route("/workspaces/current/tool-provider/mcp/tools/") class ToolMCPDetailApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider_id): - user = current_user - provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, user.current_tenant_id) + _, tenant_id = current_account_with_tenant() + provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id) return jsonable_encoder(ToolTransformService.mcp_provider_to_user_provider(provider, for_list=True)) +@console_ns.route("/workspaces/current/tools/mcp") class ToolMCPListAllApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() tools = MCPToolManageService.retrieve_mcp_tools(tenant_id=tenant_id) return [tool.to_dict() for tool in tools] +@console_ns.route("/workspaces/current/tool-provider/mcp/update/") class ToolMCPUpdateApi(Resource): @setup_required @login_required @account_initialization_required def get(self, provider_id): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() tools = MCPToolManageService.list_mcp_tool_from_remote_server( tenant_id=tenant_id, provider_id=provider_id, @@ -1014,6 +1027,7 @@ class ToolMCPUpdateApi(Resource): return jsonable_encoder(tools) +@console_ns.route("/mcp/oauth/callback") class ToolMCPCallbackApi(Resource): def get(self): parser = reqparse.RequestParser() @@ -1024,67 +1038,3 @@ class ToolMCPCallbackApi(Resource): authorization_code = args["code"] handle_callback(state_key, authorization_code) return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") - - -# tool provider -api.add_resource(ToolProviderListApi, "/workspaces/current/tool-providers") - -# tool oauth -api.add_resource(ToolPluginOAuthApi, "/oauth/plugin//tool/authorization-url") -api.add_resource(ToolOAuthCallback, "/oauth/plugin//tool/callback") -api.add_resource(ToolOAuthCustomClient, "/workspaces/current/tool-provider/builtin//oauth/custom-client") - -# builtin tool provider -api.add_resource(ToolBuiltinProviderListToolsApi, "/workspaces/current/tool-provider/builtin//tools") -api.add_resource(ToolBuiltinProviderInfoApi, "/workspaces/current/tool-provider/builtin//info") -api.add_resource(ToolBuiltinProviderAddApi, "/workspaces/current/tool-provider/builtin//add") -api.add_resource(ToolBuiltinProviderDeleteApi, "/workspaces/current/tool-provider/builtin//delete") -api.add_resource(ToolBuiltinProviderUpdateApi, "/workspaces/current/tool-provider/builtin//update") -api.add_resource( - ToolBuiltinProviderSetDefaultApi, "/workspaces/current/tool-provider/builtin//default-credential" -) -api.add_resource( - ToolBuiltinProviderGetCredentialInfoApi, "/workspaces/current/tool-provider/builtin//credential/info" -) -api.add_resource( - ToolBuiltinProviderGetCredentialsApi, "/workspaces/current/tool-provider/builtin//credentials" -) -api.add_resource( - ToolBuiltinProviderCredentialsSchemaApi, - "/workspaces/current/tool-provider/builtin//credential/schema/", -) -api.add_resource( - ToolBuiltinProviderGetOauthClientSchemaApi, - "/workspaces/current/tool-provider/builtin//oauth/client-schema", -) -api.add_resource(ToolBuiltinProviderIconApi, "/workspaces/current/tool-provider/builtin//icon") - -# api tool provider -api.add_resource(ToolApiProviderAddApi, "/workspaces/current/tool-provider/api/add") -api.add_resource(ToolApiProviderGetRemoteSchemaApi, "/workspaces/current/tool-provider/api/remote") -api.add_resource(ToolApiProviderListToolsApi, "/workspaces/current/tool-provider/api/tools") -api.add_resource(ToolApiProviderUpdateApi, "/workspaces/current/tool-provider/api/update") -api.add_resource(ToolApiProviderDeleteApi, "/workspaces/current/tool-provider/api/delete") -api.add_resource(ToolApiProviderGetApi, "/workspaces/current/tool-provider/api/get") -api.add_resource(ToolApiProviderSchemaApi, "/workspaces/current/tool-provider/api/schema") -api.add_resource(ToolApiProviderPreviousTestApi, "/workspaces/current/tool-provider/api/test/pre") - -# workflow tool provider -api.add_resource(ToolWorkflowProviderCreateApi, "/workspaces/current/tool-provider/workflow/create") -api.add_resource(ToolWorkflowProviderUpdateApi, "/workspaces/current/tool-provider/workflow/update") -api.add_resource(ToolWorkflowProviderDeleteApi, "/workspaces/current/tool-provider/workflow/delete") -api.add_resource(ToolWorkflowProviderGetApi, "/workspaces/current/tool-provider/workflow/get") -api.add_resource(ToolWorkflowProviderListToolApi, "/workspaces/current/tool-provider/workflow/tools") - -# mcp tool provider -api.add_resource(ToolMCPDetailApi, "/workspaces/current/tool-provider/mcp/tools/") -api.add_resource(ToolProviderMCPApi, "/workspaces/current/tool-provider/mcp") -api.add_resource(ToolMCPUpdateApi, "/workspaces/current/tool-provider/mcp/update/") -api.add_resource(ToolMCPAuthApi, "/workspaces/current/tool-provider/mcp/auth") -api.add_resource(ToolMCPCallbackApi, "/mcp/oauth/callback") - -api.add_resource(ToolBuiltinListApi, "/workspaces/current/tools/builtin") -api.add_resource(ToolApiListApi, "/workspaces/current/tools/api") -api.add_resource(ToolMCPListAllApi, "/workspaces/current/tools/mcp") -api.add_resource(ToolWorkflowListApi, "/workspaces/current/tools/workflow") -api.add_resource(ToolLabelsApi, "/workspaces/current/tool-labels") diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index 6bec70b5da..5be427e9bb 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -1,7 +1,6 @@ import logging from flask import request -from flask_login import current_user from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy import select from werkzeug.exceptions import Unauthorized @@ -14,7 +13,7 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) -from controllers.console import api +from controllers.console import console_ns from controllers.console.admin import admin_required from controllers.console.error import AccountNotLinkTenantError from controllers.console.wraps import ( @@ -24,8 +23,8 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from libs.helper import TimestampField -from libs.login import login_required -from models.account import Account, Tenant, TenantStatus +from libs.login import current_account_with_tenant, login_required +from models.account import Tenant, TenantStatus from services.account_service import TenantService from services.feature_service import FeatureService from services.file_service import FileService @@ -65,13 +64,13 @@ tenants_fields = { workspace_fields = {"id": fields.String, "name": fields.String, "status": fields.String, "created_at": TimestampField} +@console_ns.route("/workspaces") class TenantListApi(Resource): @setup_required @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() tenants = TenantService.get_join_tenants(current_user) tenant_dicts = [] @@ -85,7 +84,7 @@ class TenantListApi(Resource): "status": tenant.status, "created_at": tenant.created_at, "plan": features.billing.subscription.plan if features.billing.enabled else "sandbox", - "current": tenant.id == current_user.current_tenant_id if current_user.current_tenant_id else False, + "current": tenant.id == current_tenant_id if current_tenant_id else False, } tenant_dicts.append(tenant_dict) @@ -93,6 +92,7 @@ class TenantListApi(Resource): return {"workspaces": marshal(tenant_dicts, tenants_fields)}, 200 +@console_ns.route("/all-workspaces") class WorkspaceListApi(Resource): @setup_required @admin_required @@ -118,6 +118,8 @@ class WorkspaceListApi(Resource): }, 200 +@console_ns.route("/workspaces/current", endpoint="workspaces_current") +@console_ns.route("/info", endpoint="info") # Deprecated class TenantApi(Resource): @setup_required @login_required @@ -127,8 +129,7 @@ class TenantApi(Resource): if request.path == "/info": logger.warning("Deprecated URL /info was used.") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() tenant = current_user.current_tenant if not tenant: raise ValueError("No current tenant") @@ -143,18 +144,16 @@ class TenantApi(Resource): else: raise Unauthorized("workspace is archived") - if not tenant: - raise ValueError("No tenant available") return WorkspaceService.get_tenant_info(tenant), 200 +@console_ns.route("/workspaces/switch") class SwitchWorkspaceApi(Resource): @setup_required @login_required @account_initialization_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("tenant_id", type=str, required=True, location="json") args = parser.parse_args() @@ -172,22 +171,19 @@ class SwitchWorkspaceApi(Resource): return {"result": "success", "new_tenant": marshal(WorkspaceService.get_tenant_info(new_tenant), tenant_fields)} +@console_ns.route("/workspaces/custom-config") class CustomConfigWorkspaceApi(Resource): @setup_required @login_required @account_initialization_required @cloud_edition_billing_resource_check("workspace_custom") def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + _, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("remove_webapp_brand", type=bool, location="json") parser.add_argument("replace_webapp_logo", type=str, location="json") args = parser.parse_args() - - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant = db.get_or_404(Tenant, current_user.current_tenant_id) + tenant = db.get_or_404(Tenant, current_tenant_id) custom_config_dict = { "remove_webapp_brand": args["remove_webapp_brand"], @@ -202,14 +198,14 @@ class CustomConfigWorkspaceApi(Resource): return {"result": "success", "tenant": marshal(WorkspaceService.get_tenant_info(tenant), tenant_fields)} +@console_ns.route("/workspaces/custom-config/webapp-logo/upload") class WebappLogoWorkspaceApi(Resource): @setup_required @login_required @account_initialization_required @cloud_edition_billing_resource_check("workspace_custom") def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() # check file if "file" not in request.files: raise NoFileUploadedError() @@ -242,32 +238,22 @@ class WebappLogoWorkspaceApi(Resource): return {"id": upload_file.id}, 201 +@console_ns.route("/workspaces/info") class WorkspaceInfoApi(Resource): @setup_required @login_required @account_initialization_required # Change workspace name def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + _, current_tenant_id = current_account_with_tenant() parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") args = parser.parse_args() - if not current_user.current_tenant_id: + if not current_tenant_id: raise ValueError("No current tenant") - tenant = db.get_or_404(Tenant, current_user.current_tenant_id) + tenant = db.get_or_404(Tenant, current_tenant_id) tenant.name = args["name"] db.session.commit() return {"result": "success", "tenant": marshal(WorkspaceService.get_tenant_info(tenant), tenant_fields)} - - -api.add_resource(TenantListApi, "/workspaces") # GET for getting all tenants -api.add_resource(WorkspaceListApi, "/all-workspaces") # GET for getting all tenants -api.add_resource(TenantApi, "/workspaces/current", endpoint="workspaces_current") # GET for getting current tenant info -api.add_resource(TenantApi, "/info", endpoint="info") # Deprecated -api.add_resource(SwitchWorkspaceApi, "/workspaces/switch") # POST for switching tenant -api.add_resource(CustomConfigWorkspaceApi, "/workspaces/custom-config") -api.add_resource(WebappLogoWorkspaceApi, "/workspaces/custom-config/webapp-logo/upload") -api.add_resource(WorkspaceInfoApi, "/workspaces/info") # POST for changing workspace info diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index 914d386c78..2fa28711c3 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -7,12 +7,12 @@ from functools import wraps from typing import ParamSpec, TypeVar from flask import abort, request -from flask_login import current_user from configs import dify_config from controllers.console.workspace.error import AccountNotInitializedError from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.login import current_account_with_tenant from models.account import AccountStatus from models.dataset import RateLimitLog from models.model import DifySetup @@ -29,9 +29,8 @@ def account_initialization_required(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): # check account initialization - account = current_user - - if account.status == AccountStatus.UNINITIALIZED: + current_user, _ = current_account_with_tenant() + if current_user.status == AccountStatus.UNINITIALIZED: raise AccountNotInitializedError() return view(*args, **kwargs) @@ -75,7 +74,8 @@ def only_edition_self_hosted(view: Callable[P, R]): def cloud_edition_billing_enabled(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if not features.billing.enabled: abort(403, "Billing feature is not enabled.") return view(*args, **kwargs) @@ -87,7 +87,8 @@ def cloud_edition_billing_resource_check(resource: str): def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: members = features.members apps = features.apps @@ -128,7 +129,8 @@ def cloud_edition_billing_knowledge_limit_check(resource: str): def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: if resource == "add_segment": if features.billing.subscription.plan == "sandbox": @@ -151,10 +153,11 @@ def cloud_edition_billing_rate_limit_check(resource: str): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): if resource == "knowledge": - knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(current_tenant_id) if knowledge_rate_limit.enabled: current_time = int(time.time() * 1000) - key = f"rate_limit_{current_user.current_tenant_id}" + key = f"rate_limit_{current_tenant_id}" redis_client.zadd(key, {current_time: current_time}) @@ -165,7 +168,7 @@ def cloud_edition_billing_rate_limit_check(resource: str): if request_count > knowledge_rate_limit.limit: # add ratelimit record rate_limit_log = RateLimitLog( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, subscription_plan=knowledge_rate_limit.subscription_plan, operation="knowledge", ) @@ -185,14 +188,15 @@ def cloud_utm_record(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): with contextlib.suppress(Exception): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: utm_info = request.cookies.get("utm_info") if utm_info: utm_info_dict: dict = json.loads(utm_info) - OperationService.record_utm(current_user.current_tenant_id, utm_info_dict) + OperationService.record_utm(current_tenant_id, utm_info_dict) return view(*args, **kwargs) @@ -242,9 +246,9 @@ def email_password_login_enabled(view: Callable[P, R]): return decorated -def email_register_enabled(view): +def email_register_enabled(view: Callable[P, R]): @wraps(view) - def decorated(*args, **kwargs): + def decorated(*args: P.args, **kwargs: P.kwargs): features = FeatureService.get_system_features() if features.is_allow_register: return view(*args, **kwargs) @@ -271,7 +275,8 @@ def enable_change_email(view: Callable[P, R]): def is_allow_transfer_owner(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.is_allow_transfer_workspace: return view(*args, **kwargs) @@ -281,12 +286,26 @@ def is_allow_transfer_owner(view: Callable[P, R]): return decorated -def knowledge_pipeline_publish_enabled(view): +def knowledge_pipeline_publish_enabled(view: Callable[P, R]): @wraps(view) - def decorated(*args, **kwargs): - features = FeatureService.get_features(current_user.current_tenant_id) + def decorated(*args: P.args, **kwargs: P.kwargs): + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.knowledge_pipeline.publish_enabled: return view(*args, **kwargs) abort(403) return decorated + + +def edit_permission_required(f: Callable[P, R]): + @wraps(f) + def decorated_function(*args: P.args, **kwargs: P.kwargs): + from werkzeug.exceptions import Forbidden + + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: + raise Forbidden() + return f(*args, **kwargs) + + return decorated_function diff --git a/api/controllers/inner_api/mail.py b/api/controllers/inner_api/mail.py index 0b2be03e43..39411a077a 100644 --- a/api/controllers/inner_api/mail.py +++ b/api/controllers/inner_api/mail.py @@ -17,7 +17,7 @@ class BaseMail(Resource): def post(self): args = _mail_parser.parse_args() - send_inner_email_task.delay( + send_inner_email_task.delay( # type: ignore to=args["to"], subject=args["subject"], body=args["body"], diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index deab50076d..e4fe8d44bf 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -31,7 +31,7 @@ from core.plugin.entities.request import ( ) from core.tools.entities.tool_entities import ToolProviderType from libs.helper import length_prefixed_response -from models.account import Account, Tenant +from models import Account, Tenant from models.model import EndUser diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index 04102c49f3..1f588bedce 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -24,20 +24,14 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser: NOTE: user_id is not trusted, it could be maliciously set to any value. As a result, it could only be considered as an end user id. """ + if not user_id: + user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID + is_anonymous = user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID try: with Session(db.engine) as session: - if not user_id: - user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID.value + user_model = None - user_model = ( - session.query(EndUser) - .where( - EndUser.id == user_id, - EndUser.tenant_id == tenant_id, - ) - .first() - ) - if not user_model: + if is_anonymous: user_model = ( session.query(EndUser) .where( @@ -46,11 +40,21 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser: ) .first() ) + else: + user_model = ( + session.query(EndUser) + .where( + EndUser.id == user_id, + EndUser.tenant_id == tenant_id, + ) + .first() + ) + if not user_model: user_model = EndUser( tenant_id=tenant_id, type="service_api", - is_anonymous=user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID.value, + is_anonymous=is_anonymous, session_id=user_id, ) session.add(user_model) @@ -81,7 +85,7 @@ def get_user_tenant(view: Callable[P, R] | None = None): raise ValueError("tenant_id is required") if not user_id: - user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID.value + user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID try: tenant_model = ( @@ -124,7 +128,7 @@ def plugin_data(view: Callable[P, R] | None = None, *, payload_type: type[BaseMo raise ValueError("invalid json") try: - payload = payload_type(**data) + payload = payload_type.model_validate(data) except Exception as e: raise ValueError(f"invalid payload: {str(e)}") diff --git a/api/controllers/inner_api/workspace/workspace.py b/api/controllers/inner_api/workspace/workspace.py index 47f0240cd2..861da57708 100644 --- a/api/controllers/inner_api/workspace/workspace.py +++ b/api/controllers/inner_api/workspace/workspace.py @@ -7,7 +7,7 @@ from controllers.inner_api import inner_api_ns from controllers.inner_api.wraps import enterprise_inner_api_only from events.tenant_event import tenant_was_created from extensions.ext_database import db -from models.account import Account +from models import Account from services.account_service import TenantService diff --git a/api/controllers/service_api/app/annotation.py b/api/controllers/service_api/app/annotation.py index ad1bdc7334..0521f1537c 100644 --- a/api/controllers/service_api/app/annotation.py +++ b/api/controllers/service_api/app/annotation.py @@ -10,7 +10,7 @@ from controllers.service_api.wraps import validate_app_token from extensions.ext_redis import redis_client from fields.annotation_fields import annotation_fields, build_annotation_model from libs.login import current_user -from models.account import Account +from models import Account from models.model import App from services.annotation_service import AppAnnotationService diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 6a70345f7c..92bbb76f0f 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -1,10 +1,10 @@ -from typing import Literal +from typing import Any, Literal, cast from flask import request from flask_restx import marshal, reqparse from werkzeug.exceptions import Forbidden, NotFound -import services.dataset_service +import services from controllers.service_api import service_api_ns from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError from controllers.service_api.wraps import ( @@ -17,6 +17,7 @@ from core.provider_manager import ProviderManager from fields.dataset_fields import dataset_detail_fields from fields.tag_fields import build_dataset_tag_fields from libs.login import current_user +from libs.validators import validate_description_length from models.account import Account from models.dataset import Dataset, DatasetPermissionEnum from models.provider_ids import ModelProviderID @@ -31,12 +32,6 @@ def _validate_name(name): return name -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - # Define parsers for dataset operations dataset_create_parser = reqparse.RequestParser() dataset_create_parser.add_argument( @@ -48,7 +43,7 @@ dataset_create_parser.add_argument( ) dataset_create_parser.add_argument( "description", - type=_validate_description_length, + type=validate_description_length, nullable=True, required=False, default="", @@ -101,7 +96,7 @@ dataset_update_parser.add_argument( type=_validate_name, ) dataset_update_parser.add_argument( - "description", location="json", store_missing=False, type=_validate_description_length + "description", location="json", store_missing=False, type=validate_description_length ) dataset_update_parser.add_argument( "indexing_technique", @@ -254,19 +249,21 @@ class DatasetListApi(DatasetApiResource): """Resource for creating datasets.""" args = dataset_create_parser.parse_args() - if args.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - tenant_id, args.get("embedding_model_provider"), args.get("embedding_model") - ) + embedding_model_provider = args.get("embedding_model_provider") + embedding_model = args.get("embedding_model") + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting(tenant_id, embedding_model_provider, embedding_model) + + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) try: @@ -283,7 +280,7 @@ class DatasetListApi(DatasetApiResource): external_knowledge_id=args["external_knowledge_id"], embedding_model_provider=args["embedding_model_provider"], embedding_model_name=args["embedding_model"], - retrieval_model=RetrievalModel(**args["retrieval_model"]) + retrieval_model=RetrievalModel.model_validate(args["retrieval_model"]) if args["retrieval_model"] is not None else None, ) @@ -317,7 +314,7 @@ class DatasetApi(DatasetApiResource): DatasetService.check_dataset_permission(dataset, current_user) except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - data = marshal(dataset, dataset_detail_fields) + data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) # check embedding setting provider_manager = ProviderManager() assert isinstance(current_user, Account) @@ -331,8 +328,8 @@ class DatasetApi(DatasetApiResource): for embedding_model in embedding_models: model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}") - if data["indexing_technique"] == "high_quality": - item_model = f"{data['embedding_model']}:{data['embedding_model_provider']}" + if data.get("indexing_technique") == "high_quality": + item_model = f"{data.get('embedding_model')}:{data.get('embedding_model_provider')}" if item_model in model_names: data["embedding_available"] = True else: @@ -341,7 +338,9 @@ class DatasetApi(DatasetApiResource): data["embedding_available"] = True # force update search method to keyword_search if indexing_technique is economic - data["retrieval_model_dict"]["search_method"] = "keyword_search" + retrieval_model_dict = data.get("retrieval_model_dict") + if retrieval_model_dict: + retrieval_model_dict["search_method"] = "keyword_search" if data.get("permission") == "partial_members": part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str) @@ -372,19 +371,24 @@ class DatasetApi(DatasetApiResource): data = request.get_json() # check embedding model setting - if data.get("indexing_technique") == "high_quality" or data.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - dataset.tenant_id, data.get("embedding_model_provider"), data.get("embedding_model") - ) + embedding_model_provider = data.get("embedding_model_provider") + embedding_model = data.get("embedding_model") + if data.get("indexing_technique") == "high_quality" or embedding_model_provider: + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting( + dataset.tenant_id, embedding_model_provider, embedding_model + ) + + retrieval_model = data.get("retrieval_model") if ( - data.get("retrieval_model") - and data.get("retrieval_model").get("reranking_model") - and data.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( dataset.tenant_id, - data.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - data.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator @@ -397,7 +401,7 @@ class DatasetApi(DatasetApiResource): if dataset is None: raise NotFound("Dataset not found.") - result_data = marshal(dataset, dataset_detail_fields) + result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) assert isinstance(current_user, Account) tenant_id = current_user.current_tenant_id @@ -591,9 +595,10 @@ class DatasetTagsApi(DatasetApiResource): args = tag_update_parser.parse_args() args["type"] = "knowledge" - tag = TagService.update_tags(args, args.get("tag_id")) + tag_id = args["tag_id"] + tag = TagService.update_tags(args, tag_id) - binding_count = TagService.get_tag_binding_count(args.get("tag_id")) + binding_count = TagService.get_tag_binding_count(tag_id) response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": binding_count} @@ -616,7 +621,7 @@ class DatasetTagsApi(DatasetApiResource): if not current_user.has_edit_permission: raise Forbidden() args = tag_delete_parser.parse_args() - TagService.delete_tag(args.get("tag_id")) + TagService.delete_tag(args["tag_id"]) return 204 diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index e01bc8940c..961a338bc5 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -108,19 +108,21 @@ class DocumentAddByTextApi(DatasetApiResource): if text is None or name is None: raise ValueError("Both 'text' and 'name' must be non-null values.") - if args.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - tenant_id, args.get("embedding_model_provider"), args.get("embedding_model") - ) + embedding_model_provider = args.get("embedding_model_provider") + embedding_model = args.get("embedding_model") + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting(tenant_id, embedding_model_provider, embedding_model) + + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) if not current_user: @@ -134,7 +136,7 @@ class DocumentAddByTextApi(DatasetApiResource): "info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}}, } args["data_source"] = data_source - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) # validate args DocumentService.document_create_args_validate(knowledge_config) @@ -187,15 +189,16 @@ class DocumentUpdateByTextApi(DatasetApiResource): if not dataset: raise ValueError("Dataset does not exist.") + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) # indexing_technique is already set in dataset since this is an update @@ -218,7 +221,7 @@ class DocumentUpdateByTextApi(DatasetApiResource): args["data_source"] = data_source # validate args args["original_document_id"] = str(document_id) - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) DocumentService.document_create_args_validate(knowledge_config) try: @@ -325,7 +328,7 @@ class DocumentAddByFileApi(DatasetApiResource): } args["data_source"] = data_source # validate args - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) DocumentService.document_create_args_validate(knowledge_config) dataset_process_rule = dataset.latest_process_rule if "process_rule" not in args else None @@ -423,7 +426,7 @@ class DocumentUpdateByFileApi(DatasetApiResource): # validate args args["original_document_id"] = str(document_id) - knowledge_config = KnowledgeConfig(**args) + knowledge_config = KnowledgeConfig.model_validate(args) DocumentService.document_create_args_validate(knowledge_config) try: diff --git a/api/controllers/service_api/dataset/metadata.py b/api/controllers/service_api/dataset/metadata.py index c6032048e6..51420fdd5f 100644 --- a/api/controllers/service_api/dataset/metadata.py +++ b/api/controllers/service_api/dataset/metadata.py @@ -51,7 +51,7 @@ class DatasetMetadataCreateServiceApi(DatasetApiResource): def post(self, tenant_id, dataset_id): """Create metadata for a dataset.""" args = metadata_create_parser.parse_args() - metadata_args = MetadataArgs(**args) + metadata_args = MetadataArgs.model_validate(args) dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -106,7 +106,7 @@ class DatasetMetadataServiceApi(DatasetApiResource): raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) + metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args["name"]) return marshal(metadata, dataset_metadata_fields), 200 @service_api_ns.doc("delete_dataset_metadata") @@ -200,7 +200,7 @@ class DocumentMetadataEditServiceApi(DatasetApiResource): DatasetService.check_dataset_permission(dataset, current_user) args = document_metadata_parser.parse_args() - metadata_args = MetadataOperationData(**args) + metadata_args = MetadataOperationData.model_validate(args) MetadataService.update_documents_metadata(dataset, metadata_args) diff --git a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py index f05325d711..38891f0180 100644 --- a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py @@ -17,7 +17,7 @@ from core.app.apps.pipeline.pipeline_generator import PipelineGenerator from core.app.entities.app_invoke_entities import InvokeFrom from libs import helper from libs.login import current_user -from models.account import Account +from models import Account from models.dataset import Pipeline from models.engine import db from services.errors.file import FileTooLargeError, UnsupportedFileTypeError @@ -98,7 +98,7 @@ class DatasourceNodeRunApi(DatasetApiResource): parser.add_argument("is_published", type=bool, required=True, location="json") args: ParseResult = parser.parse_args() - datasource_node_run_api_entity: DatasourceNodeRunApiEntity = DatasourceNodeRunApiEntity(**args) + datasource_node_run_api_entity = DatasourceNodeRunApiEntity.model_validate(args) assert isinstance(current_user, Account) rag_pipeline_service: RagPipelineService = RagPipelineService() pipeline: Pipeline = rag_pipeline_service.get_pipeline(tenant_id=tenant_id, dataset_id=dataset_id) diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index a22155b07a..acbbf4531b 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -1,5 +1,4 @@ from flask import request -from flask_login import current_user from flask_restx import marshal, reqparse from werkzeug.exceptions import NotFound @@ -16,6 +15,7 @@ from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType from extensions.ext_database import db from fields.segment_fields import child_chunk_fields, segment_fields +from libs.login import current_account_with_tenant from models.dataset import Dataset from services.dataset_service import DatasetService, DocumentService, SegmentService from services.entities.knowledge_entities.knowledge_entities import SegmentUpdateArgs @@ -66,6 +66,7 @@ class SegmentApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str): + _, current_tenant_id = current_account_with_tenant() """Create single segment.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -84,7 +85,7 @@ class SegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -117,6 +118,7 @@ class SegmentApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str): + _, current_tenant_id = current_account_with_tenant() """Get segments.""" # check dataset page = request.args.get("page", default=1, type=int) @@ -133,7 +135,7 @@ class SegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -149,7 +151,7 @@ class SegmentApi(DatasetApiResource): segments, total = SegmentService.get_segments( document_id=document_id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, status_list=args["status"], keyword=args["keyword"], page=page, @@ -184,6 +186,7 @@ class DatasetSegmentApi(DatasetApiResource): ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -195,7 +198,7 @@ class DatasetSegmentApi(DatasetApiResource): if not document: raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") SegmentService.delete_segment(segment, document, dataset) @@ -217,6 +220,7 @@ class DatasetSegmentApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -232,7 +236,7 @@ class DatasetSegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -244,7 +248,7 @@ class DatasetSegmentApi(DatasetApiResource): except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -252,7 +256,7 @@ class DatasetSegmentApi(DatasetApiResource): args = segment_update_parser.parse_args() updated_segment = SegmentService.update_segment( - SegmentUpdateArgs(**args["segment"]), segment, document, dataset + SegmentUpdateArgs.model_validate(args["segment"]), segment, document, dataset ) return {"data": marshal(updated_segment, segment_fields), "doc_form": document.doc_form}, 200 @@ -266,6 +270,7 @@ class DatasetSegmentApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -277,7 +282,7 @@ class DatasetSegmentApi(DatasetApiResource): if not document: raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -307,6 +312,7 @@ class ChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() """Create child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -319,7 +325,7 @@ class ChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -328,7 +334,7 @@ class ChildChunkApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -364,6 +370,7 @@ class ChildChunkApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() """Get child chunks.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -376,7 +383,7 @@ class ChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -423,6 +430,7 @@ class DatasetChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str, child_chunk_id: str): + _, current_tenant_id = current_account_with_tenant() """Delete child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -435,7 +443,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -444,9 +452,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check child chunk - child_chunk = SegmentService.get_child_chunk_by_id( - child_chunk_id=child_chunk_id, tenant_id=current_user.current_tenant_id - ) + child_chunk = SegmentService.get_child_chunk_by_id(child_chunk_id=child_chunk_id, tenant_id=current_tenant_id) if not child_chunk: raise NotFound("Child chunk not found.") @@ -483,6 +489,7 @@ class DatasetChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str, child_chunk_id: str): + _, current_tenant_id = current_account_with_tenant() """Update child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -495,7 +502,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # get segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -504,9 +511,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Segment not found.") # get child chunk - child_chunk = SegmentService.get_child_chunk_by_id( - child_chunk_id=child_chunk_id, tenant_id=current_user.current_tenant_id - ) + child_chunk = SegmentService.get_child_chunk_by_id(child_chunk_id=child_chunk_id, tenant_id=current_tenant_id) if not child_chunk: raise NotFound("Child chunk not found.") diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index ee8e1d105b..638ab528f3 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -17,7 +17,7 @@ from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account, Tenant, TenantAccountJoin, TenantStatus +from models import Account, Tenant, TenantAccountJoin, TenantStatus from models.dataset import Dataset, RateLimitLog from models.model import ApiToken, App, DefaultEndUserSessionID, EndUser from services.feature_service import FeatureService @@ -313,7 +313,7 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: str | None = Create or update session terminal based on user ID. """ if not user_id: - user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID.value + user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID with Session(db.engine, expire_on_commit=False) as session: end_user = ( @@ -332,7 +332,7 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: str | None = tenant_id=app_model.tenant_id, app_id=app_model.id, type="service_api", - is_anonymous=user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID.value, + is_anonymous=user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID, session_id=user_id, ) session.add(end_user) diff --git a/api/controllers/web/forgot_password.py b/api/controllers/web/forgot_password.py index c743d0f52b..cbafd70e99 100644 --- a/api/controllers/web/forgot_password.py +++ b/api/controllers/web/forgot_password.py @@ -20,7 +20,7 @@ from controllers.web import web_ns from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password -from models.account import Account +from models import Account from services.account_service import AccountService diff --git a/api/controllers/web/passport.py b/api/controllers/web/passport.py index 6f7105a724..7190f06426 100644 --- a/api/controllers/web/passport.py +++ b/api/controllers/web/passport.py @@ -126,6 +126,8 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: end_user_id = enterprise_user_decoded.get("end_user_id") session_id = enterprise_user_decoded.get("session_id") user_auth_type = enterprise_user_decoded.get("auth_type") + exchanged_token_expires_unix = enterprise_user_decoded.get("exp") + if not user_auth_type: raise Unauthorized("Missing auth_type in the token.") @@ -169,8 +171,11 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: ) db.session.add(end_user) db.session.commit() - exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES) - exp = int(exp_dt.timestamp()) + + exp = int((datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)).timestamp()) + if exchanged_token_expires_unix: + exp = int(exchanged_token_expires_unix) + payload = { "iss": site.id, "sub": "Web API Passport", diff --git a/api/core/app/app_config/easy_ui_based_app/agent/manager.py b/api/core/app/app_config/easy_ui_based_app/agent/manager.py index eab26e5af9..c1f336fdde 100644 --- a/api/core/app/app_config/easy_ui_based_app/agent/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/agent/manager.py @@ -40,7 +40,7 @@ class AgentConfigManager: "credential_id": tool.get("credential_id", None), } - agent_tools.append(AgentToolEntity(**agent_tool_properties)) + agent_tools.append(AgentToolEntity.model_validate(agent_tool_properties)) if "strategy" in config["agent_mode"] and config["agent_mode"]["strategy"] not in { "react_router", diff --git a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py index 4b824bde76..aacafb2dad 100644 --- a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py @@ -1,4 +1,5 @@ import uuid +from typing import Literal, cast from core.app.app_config.entities import ( DatasetEntity, @@ -74,6 +75,9 @@ class DatasetConfigManager: return None query_variable = config.get("dataset_query_variable") + metadata_model_config_dict = dataset_configs.get("metadata_model_config") + metadata_filtering_conditions_dict = dataset_configs.get("metadata_filtering_conditions") + if dataset_configs["retrieval_model"] == "single": return DatasetEntity( dataset_ids=dataset_ids, @@ -82,18 +86,23 @@ class DatasetConfigManager: retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of( dataset_configs["retrieval_model"] ), - metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"), - metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config")) - if dataset_configs.get("metadata_model_config") + metadata_filtering_mode=cast( + Literal["disabled", "automatic", "manual"], + dataset_configs.get("metadata_filtering_mode", "disabled"), + ), + metadata_model_config=ModelConfig(**metadata_model_config_dict) + if isinstance(metadata_model_config_dict, dict) else None, - metadata_filtering_conditions=MetadataFilteringCondition( - **dataset_configs.get("metadata_filtering_conditions", {}) - ) - if dataset_configs.get("metadata_filtering_conditions") + metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict) + if isinstance(metadata_filtering_conditions_dict, dict) else None, ), ) else: + score_threshold_val = dataset_configs.get("score_threshold") + reranking_model_val = dataset_configs.get("reranking_model") + weights_val = dataset_configs.get("weights") + return DatasetEntity( dataset_ids=dataset_ids, retrieve_config=DatasetRetrieveConfigEntity( @@ -101,22 +110,23 @@ class DatasetConfigManager: retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of( dataset_configs["retrieval_model"] ), - top_k=dataset_configs.get("top_k", 4), - score_threshold=dataset_configs.get("score_threshold") - if dataset_configs.get("score_threshold_enabled", False) + top_k=int(dataset_configs.get("top_k", 4)), + score_threshold=float(score_threshold_val) + if dataset_configs.get("score_threshold_enabled", False) and score_threshold_val is not None else None, - reranking_model=dataset_configs.get("reranking_model"), - weights=dataset_configs.get("weights"), - reranking_enabled=dataset_configs.get("reranking_enabled", True), + reranking_model=reranking_model_val if isinstance(reranking_model_val, dict) else None, + weights=weights_val if isinstance(weights_val, dict) else None, + reranking_enabled=bool(dataset_configs.get("reranking_enabled", True)), rerank_mode=dataset_configs.get("reranking_mode", "reranking_model"), - metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"), - metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config")) - if dataset_configs.get("metadata_model_config") + metadata_filtering_mode=cast( + Literal["disabled", "automatic", "manual"], + dataset_configs.get("metadata_filtering_mode", "disabled"), + ), + metadata_model_config=ModelConfig(**metadata_model_config_dict) + if isinstance(metadata_model_config_dict, dict) else None, - metadata_filtering_conditions=MetadataFilteringCondition( - **dataset_configs.get("metadata_filtering_conditions", {}) - ) - if dataset_configs.get("metadata_filtering_conditions") + metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict) + if isinstance(metadata_filtering_conditions_dict, dict) else None, ), ) @@ -134,18 +144,17 @@ class DatasetConfigManager: config = cls.extract_dataset_config_for_legacy_compatibility(tenant_id, app_mode, config) # dataset_configs - if not config.get("dataset_configs"): - config["dataset_configs"] = {"retrieval_model": "single"} + if "dataset_configs" not in config or not config.get("dataset_configs"): + config["dataset_configs"] = {} + config["dataset_configs"]["retrieval_model"] = config["dataset_configs"].get("retrieval_model", "single") if not isinstance(config["dataset_configs"], dict): raise ValueError("dataset_configs must be of object type") - if not config["dataset_configs"].get("datasets"): + if "datasets" not in config["dataset_configs"] or not config["dataset_configs"].get("datasets"): config["dataset_configs"]["datasets"] = {"strategy": "router", "datasets": []} - need_manual_query_datasets = config.get("dataset_configs") and config["dataset_configs"].get( - "datasets", {} - ).get("datasets") + need_manual_query_datasets = config.get("dataset_configs", {}).get("datasets", {}).get("datasets") if need_manual_query_datasets and app_mode == AppMode.COMPLETION: # Only check when mode is completion @@ -166,8 +175,8 @@ class DatasetConfigManager: :param config: app model config args """ # Extract dataset config for legacy compatibility - if not config.get("agent_mode"): - config["agent_mode"] = {"enabled": False, "tools": []} + if "agent_mode" not in config or not config.get("agent_mode"): + config["agent_mode"] = {} if not isinstance(config["agent_mode"], dict): raise ValueError("agent_mode must be of object type") @@ -180,19 +189,22 @@ class DatasetConfigManager: raise ValueError("enabled in agent_mode must be of boolean type") # tools - if not config["agent_mode"].get("tools"): + if "tools" not in config["agent_mode"] or not config["agent_mode"].get("tools"): config["agent_mode"]["tools"] = [] if not isinstance(config["agent_mode"]["tools"], list): raise ValueError("tools in agent_mode must be a list of objects") # strategy - if not config["agent_mode"].get("strategy"): - config["agent_mode"]["strategy"] = PlanningStrategy.ROUTER.value + if "strategy" not in config["agent_mode"] or not config["agent_mode"].get("strategy"): + config["agent_mode"]["strategy"] = PlanningStrategy.ROUTER has_datasets = False - if config["agent_mode"]["strategy"] in {PlanningStrategy.ROUTER.value, PlanningStrategy.REACT_ROUTER.value}: - for tool in config["agent_mode"]["tools"]: + if config.get("agent_mode", {}).get("strategy") in { + PlanningStrategy.ROUTER, + PlanningStrategy.REACT_ROUTER, + }: + for tool in config.get("agent_mode", {}).get("tools", []): key = list(tool.keys())[0] if key == "dataset": # old style, use tool name as key @@ -217,7 +229,7 @@ class DatasetConfigManager: has_datasets = True - need_manual_query_datasets = has_datasets and config["agent_mode"]["enabled"] + need_manual_query_datasets = has_datasets and config.get("agent_mode", {}).get("enabled") if need_manual_query_datasets and app_mode == AppMode.COMPLETION: # Only check when mode is completion diff --git a/api/core/app/app_config/easy_ui_based_app/model_config/converter.py b/api/core/app/app_config/easy_ui_based_app/model_config/converter.py index 5b5eefe315..b816c8d7d0 100644 --- a/api/core/app/app_config/easy_ui_based_app/model_config/converter.py +++ b/api/core/app/app_config/easy_ui_based_app/model_config/converter.py @@ -68,9 +68,13 @@ class ModelConfigConverter: # get model mode model_mode = model_config.mode if not model_mode: - model_mode = LLMMode.CHAT.value + model_mode = LLMMode.CHAT if model_schema and model_schema.model_properties.get(ModelPropertyKey.MODE): - model_mode = LLMMode(model_schema.model_properties[ModelPropertyKey.MODE]).value + try: + model_mode = LLMMode(model_schema.model_properties[ModelPropertyKey.MODE]) + except ValueError: + # Fall back to CHAT mode if the stored value is invalid + model_mode = LLMMode.CHAT if not model_schema: raise ValueError(f"Model {model_name} not exist.") diff --git a/api/core/app/app_config/easy_ui_based_app/prompt_template/manager.py b/api/core/app/app_config/easy_ui_based_app/prompt_template/manager.py index ec4f6074ab..21614c010c 100644 --- a/api/core/app/app_config/easy_ui_based_app/prompt_template/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/prompt_template/manager.py @@ -100,7 +100,7 @@ class PromptTemplateConfigManager: if config["model"]["mode"] not in model_mode_vals: raise ValueError(f"model.mode must be in {model_mode_vals} when prompt_type is advanced") - if app_mode == AppMode.CHAT and config["model"]["mode"] == ModelMode.COMPLETION.value: + if app_mode == AppMode.CHAT and config["model"]["mode"] == ModelMode.COMPLETION: user_prefix = config["completion_prompt_config"]["conversation_histories_role"]["user_prefix"] assistant_prefix = config["completion_prompt_config"]["conversation_histories_role"]["assistant_prefix"] @@ -110,7 +110,7 @@ class PromptTemplateConfigManager: if not assistant_prefix: config["completion_prompt_config"]["conversation_histories_role"]["assistant_prefix"] = "Assistant" - if config["model"]["mode"] == ModelMode.CHAT.value: + if config["model"]["mode"] == ModelMode.CHAT: prompt_list = config["chat_prompt_config"]["prompt"] if len(prompt_list) > 10: diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index e021b0aca7..b5af6382e8 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -70,8 +70,7 @@ from core.workflow.system_variable import SystemVariable from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from models import Conversation, EndUser, Message, MessageFile -from models.account import Account +from models import Account, Conversation, EndUser, Message, MessageFile from models.enums import CreatorUserRole from models.workflow import Workflow diff --git a/api/core/app/apps/agent_chat/app_config_manager.py b/api/core/app/apps/agent_chat/app_config_manager.py index 9ce841f432..801619ddbc 100644 --- a/api/core/app/apps/agent_chat/app_config_manager.py +++ b/api/core/app/apps/agent_chat/app_config_manager.py @@ -186,7 +186,7 @@ class AgentChatAppConfigManager(BaseAppConfigManager): raise ValueError("enabled in agent_mode must be of boolean type") if not agent_mode.get("strategy"): - agent_mode["strategy"] = PlanningStrategy.ROUTER.value + agent_mode["strategy"] = PlanningStrategy.ROUTER if agent_mode["strategy"] not in [member.value for member in list(PlanningStrategy.__members__.values())]: raise ValueError("strategy in agent_mode must be in the specified strategy list") diff --git a/api/core/app/apps/agent_chat/app_runner.py b/api/core/app/apps/agent_chat/app_runner.py index 388bed5255..759398b556 100644 --- a/api/core/app/apps/agent_chat/app_runner.py +++ b/api/core/app/apps/agent_chat/app_runner.py @@ -198,9 +198,9 @@ class AgentChatAppRunner(AppRunner): # start agent runner if agent_entity.strategy == AgentEntity.Strategy.CHAIN_OF_THOUGHT: # check LLM mode - if model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.CHAT.value: + if model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.CHAT: runner_cls = CotChatAgentRunner - elif model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.COMPLETION.value: + elif model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.COMPLETION: runner_cls = CotCompletionAgentRunner else: raise ValueError(f"Invalid LLM mode: {model_schema.model_properties.get(ModelPropertyKey.MODE)}") diff --git a/api/core/app/apps/base_app_queue_manager.py b/api/core/app/apps/base_app_queue_manager.py index fdba952eeb..074555e31b 100644 --- a/api/core/app/apps/base_app_queue_manager.py +++ b/api/core/app/apps/base_app_queue_manager.py @@ -1,9 +1,13 @@ +import logging import queue +import threading import time from abc import abstractmethod from enum import IntEnum, auto from typing import Any +from cachetools import TTLCache, cachedmethod +from redis.exceptions import RedisError from sqlalchemy.orm import DeclarativeMeta from configs import dify_config @@ -18,6 +22,8 @@ from core.app.entities.queue_entities import ( ) from extensions.ext_redis import redis_client +logger = logging.getLogger(__name__) + class PublishFrom(IntEnum): APPLICATION_MANAGER = auto() @@ -35,13 +41,14 @@ class AppQueueManager: self.invoke_from = invoke_from # Public accessor for invoke_from user_prefix = "account" if self._invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end-user" - redis_client.setex( - AppQueueManager._generate_task_belong_cache_key(self._task_id), 1800, f"{user_prefix}-{self._user_id}" - ) + self._task_belong_cache_key = AppQueueManager._generate_task_belong_cache_key(self._task_id) + redis_client.setex(self._task_belong_cache_key, 1800, f"{user_prefix}-{self._user_id}") q: queue.Queue[WorkflowQueueMessage | MessageQueueMessage | None] = queue.Queue() self._q = q + self._stopped_cache: TTLCache[tuple, bool] = TTLCache(maxsize=1, ttl=1) + self._cache_lock = threading.Lock() def listen(self): """ @@ -79,9 +86,21 @@ class AppQueueManager: Stop listen to queue :return: """ + self._clear_task_belong_cache() self._q.put(None) - def publish_error(self, e, pub_from: PublishFrom): + def _clear_task_belong_cache(self) -> None: + """ + Remove the task belong cache key once listening is finished. + """ + try: + redis_client.delete(self._task_belong_cache_key) + except RedisError: + logger.exception( + "Failed to clear task belong cache for task %s (key: %s)", self._task_id, self._task_belong_cache_key + ) + + def publish_error(self, e, pub_from: PublishFrom) -> None: """ Publish error :param e: error @@ -142,6 +161,7 @@ class AppQueueManager: stopped_cache_key = cls._generate_stopped_cache_key(task_id) redis_client.setex(stopped_cache_key, 600, 1) + @cachedmethod(lambda self: self._stopped_cache, lock=lambda self: self._cache_lock) def _is_stopped(self) -> bool: """ Check if task is stopped diff --git a/api/core/app/apps/base_app_runner.py b/api/core/app/apps/base_app_runner.py index e7db3bc41b..61ac040c05 100644 --- a/api/core/app/apps/base_app_runner.py +++ b/api/core/app/apps/base_app_runner.py @@ -61,9 +61,6 @@ class AppRunner: if model_context_tokens is None: return -1 - if max_tokens is None: - max_tokens = 0 - prompt_tokens = model_instance.get_llm_num_tokens(prompt_messages) if prompt_tokens + max_tokens > model_context_tokens: diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index 8bd956b314..c1251d2feb 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -23,7 +23,7 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.ops.ops_trace_manager import TraceQueueManager from extensions.ext_database import db from factories import file_factory -from models.account import Account +from models import Account from models.model import App, EndUser from services.conversation_service import ConversationService diff --git a/api/core/app/apps/completion/generate_response_converter.py b/api/core/app/apps/completion/generate_response_converter.py index d7e9ebdf24..a4f574642d 100644 --- a/api/core/app/apps/completion/generate_response_converter.py +++ b/api/core/app/apps/completion/generate_response_converter.py @@ -112,7 +112,7 @@ class CompletionAppGenerateResponseConverter(AppGenerateResponseConverter): metadata = {} sub_stream_response_dict["metadata"] = cls._get_simple_metadata(metadata) response_chunk.update(sub_stream_response_dict) - if isinstance(sub_stream_response, ErrorStreamResponse): + elif isinstance(sub_stream_response, ErrorStreamResponse): data = cls._error_to_stream_response(sub_stream_response.err) response_chunk.update(data) else: diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index 170c6a274b..7a51b8f3a5 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -207,6 +207,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): from_source=from_source, from_end_user_id=end_user_id, from_account_id=account_id, + app_mode=app_config.app_mode, ) db.session.add(message) diff --git a/api/core/app/apps/pipeline/pipeline_runner.py b/api/core/app/apps/pipeline/pipeline_runner.py index 145f629c4d..a8a7dde2b4 100644 --- a/api/core/app/apps/pipeline/pipeline_runner.py +++ b/api/core/app/apps/pipeline/pipeline_runner.py @@ -116,7 +116,7 @@ class PipelineRunner(WorkflowBasedAppRunner): rag_pipeline_variables = [] if workflow.rag_pipeline_variables: for v in workflow.rag_pipeline_variables: - rag_pipeline_variable = RAGPipelineVariable(**v) + rag_pipeline_variable = RAGPipelineVariable.model_validate(v) if ( rag_pipeline_variable.belong_to_node_id in (self.application_generate_entity.start_node_id, "shared") @@ -229,8 +229,8 @@ class PipelineRunner(WorkflowBasedAppRunner): workflow_id=workflow.id, graph_config=graph_config, user_id=self.application_generate_entity.user_id, - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 56b0d91141..ec4dc87643 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -61,7 +61,7 @@ from core.workflow.repositories.workflow_node_execution_repository import Workfl from core.workflow.system_variable import SystemVariable from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager from extensions.ext_database import db -from models.account import Account +from models import Account from models.enums import CreatorUserRole from models.model import EndUser from models.workflow import ( diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 564daba86d..68eb455d26 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -100,8 +100,8 @@ class WorkflowBasedAppRunner: workflow_id=workflow_id, graph_config=graph_config, user_id=user_id, - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) @@ -244,8 +244,8 @@ class WorkflowBasedAppRunner: workflow_id=workflow.id, graph_config=graph_config, user_id="", - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) diff --git a/api/core/app/task_pipeline/message_cycle_manager.py b/api/core/app/task_pipeline/message_cycle_manager.py index 0004fb592e..7a384e5c92 100644 --- a/api/core/app/task_pipeline/message_cycle_manager.py +++ b/api/core/app/task_pipeline/message_cycle_manager.py @@ -107,7 +107,6 @@ class MessageCycleManager: if dify_config.DEBUG: logger.exception("generate conversation name failed, conversation_id: %s", conversation_id) - db.session.merge(conversation) db.session.commit() db.session.close() diff --git a/api/core/datasource/__base/datasource_runtime.py b/api/core/datasource/__base/datasource_runtime.py index b7f280208a..c5d6c1d771 100644 --- a/api/core/datasource/__base/datasource_runtime.py +++ b/api/core/datasource/__base/datasource_runtime.py @@ -1,7 +1,6 @@ from typing import TYPE_CHECKING, Any, Optional -from openai import BaseModel -from pydantic import Field +from pydantic import BaseModel, Field # Import InvokeFrom locally to avoid circular import from core.app.entities.app_invoke_entities import InvokeFrom diff --git a/api/core/datasource/entities/api_entities.py b/api/core/datasource/entities/api_entities.py index cdefcc4506..1179537570 100644 --- a/api/core/datasource/entities/api_entities.py +++ b/api/core/datasource/entities/api_entities.py @@ -49,7 +49,7 @@ class DatasourceProviderApiEntity(BaseModel): for datasource in datasources: if datasource.get("parameters"): for parameter in datasource.get("parameters"): - if parameter.get("type") == DatasourceParameter.DatasourceParameterType.SYSTEM_FILES.value: + if parameter.get("type") == DatasourceParameter.DatasourceParameterType.SYSTEM_FILES: parameter["type"] = "files" # ------------- diff --git a/api/core/datasource/entities/common_entities.py b/api/core/datasource/entities/common_entities.py index ac36d83ae3..3c64632dbb 100644 --- a/api/core/datasource/entities/common_entities.py +++ b/api/core/datasource/entities/common_entities.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, model_validator class I18nObject(BaseModel): @@ -11,11 +11,12 @@ class I18nObject(BaseModel): pt_BR: str | None = Field(default=None) ja_JP: str | None = Field(default=None) - def __init__(self, **data): - super().__init__(**data) + @model_validator(mode="after") + def _(self): self.zh_Hans = self.zh_Hans or self.en_US self.pt_BR = self.pt_BR or self.en_US self.ja_JP = self.ja_JP or self.en_US + return self def to_dict(self) -> dict: return {"zh_Hans": self.zh_Hans, "en_US": self.en_US, "pt_BR": self.pt_BR, "ja_JP": self.ja_JP} diff --git a/api/core/datasource/entities/datasource_entities.py b/api/core/datasource/entities/datasource_entities.py index ac4f51ac75..260dcf04f5 100644 --- a/api/core/datasource/entities/datasource_entities.py +++ b/api/core/datasource/entities/datasource_entities.py @@ -1,5 +1,5 @@ import enum -from enum import Enum +from enum import StrEnum from typing import Any from pydantic import BaseModel, Field, ValidationInfo, field_validator @@ -54,16 +54,16 @@ class DatasourceParameter(PluginParameter): removes TOOLS_SELECTOR from PluginParameterType """ - STRING = PluginParameterType.STRING.value - NUMBER = PluginParameterType.NUMBER.value - BOOLEAN = PluginParameterType.BOOLEAN.value - SELECT = PluginParameterType.SELECT.value - SECRET_INPUT = PluginParameterType.SECRET_INPUT.value - FILE = PluginParameterType.FILE.value - FILES = PluginParameterType.FILES.value + STRING = PluginParameterType.STRING + NUMBER = PluginParameterType.NUMBER + BOOLEAN = PluginParameterType.BOOLEAN + SELECT = PluginParameterType.SELECT + SECRET_INPUT = PluginParameterType.SECRET_INPUT + FILE = PluginParameterType.FILE + FILES = PluginParameterType.FILES # deprecated, should not use. - SYSTEM_FILES = PluginParameterType.SYSTEM_FILES.value + SYSTEM_FILES = PluginParameterType.SYSTEM_FILES def as_normal_type(self): return as_normal_type(self) @@ -218,7 +218,7 @@ class DatasourceLabel(BaseModel): icon: str = Field(..., description="The icon of the tool") -class DatasourceInvokeFrom(Enum): +class DatasourceInvokeFrom(StrEnum): """ Enum class for datasource invoke """ diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index 111de89178..c4be429219 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -5,7 +5,7 @@ from collections import defaultdict from collections.abc import Iterator, Sequence from json import JSONDecodeError -from pydantic import BaseModel, ConfigDict, Field +from pydantic import BaseModel, ConfigDict, Field, model_validator from sqlalchemy import func, select from sqlalchemy.orm import Session @@ -73,9 +73,8 @@ class ProviderConfiguration(BaseModel): # pydantic configs model_config = ConfigDict(protected_namespaces=()) - def __init__(self, **data): - super().__init__(**data) - + @model_validator(mode="after") + def _(self): if self.provider.provider not in original_provider_configurate_methods: original_provider_configurate_methods[self.provider.provider] = [] for configurate_method in self.provider.configurate_methods: @@ -90,6 +89,7 @@ class ProviderConfiguration(BaseModel): and ConfigurateMethod.PREDEFINED_MODEL not in self.provider.configurate_methods ): self.provider.configurate_methods.append(ConfigurateMethod.PREDEFINED_MODEL) + return self def get_current_credentials(self, model_type: ModelType, model: str) -> dict | None: """ @@ -207,7 +207,7 @@ class ProviderConfiguration(BaseModel): """ stmt = select(Provider).where( Provider.tenant_id == self.tenant_id, - Provider.provider_type == ProviderType.CUSTOM.value, + Provider.provider_type == ProviderType.CUSTOM, Provider.provider_name.in_(self._get_provider_names()), ) @@ -458,7 +458,7 @@ class ProviderConfiguration(BaseModel): provider_record = Provider( tenant_id=self.tenant_id, provider_name=self.provider.provider, - provider_type=ProviderType.CUSTOM.value, + provider_type=ProviderType.CUSTOM, is_valid=True, credential_id=new_record.id, ) @@ -472,6 +472,9 @@ class ProviderConfiguration(BaseModel): provider_model_credentials_cache.delete() self.switch_preferred_provider_type(provider_type=ProviderType.CUSTOM, session=session) + else: + # some historical data may have a provider record but not be set as valid + provider_record.is_valid = True session.commit() except Exception: @@ -1145,6 +1148,15 @@ class ProviderConfiguration(BaseModel): raise ValueError("Can't add same credential") provider_model_record.credential_id = credential_record.id provider_model_record.updated_at = naive_utc_now() + + # clear cache + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + provider_model_credentials_cache.delete() + session.add(provider_model_record) session.commit() @@ -1178,6 +1190,14 @@ class ProviderConfiguration(BaseModel): session.add(provider_model_record) session.commit() + # clear cache + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + provider_model_credentials_cache.delete() + def delete_custom_model(self, model_type: ModelType, model: str): """ Delete custom model. @@ -1414,7 +1434,7 @@ class ProviderConfiguration(BaseModel): """ secret_input_form_variables = [] for credential_form_schema in credential_form_schemas: - if credential_form_schema.type.value == FormType.SECRET_INPUT.value: + if credential_form_schema.type == FormType.SECRET_INPUT: secret_input_form_variables.append(credential_form_schema.variable) return secret_input_form_variables diff --git a/api/core/extension/api_based_extension_requestor.py b/api/core/extension/api_based_extension_requestor.py index fab9ae44e9..f9e6099049 100644 --- a/api/core/extension/api_based_extension_requestor.py +++ b/api/core/extension/api_based_extension_requestor.py @@ -1,13 +1,13 @@ from typing import cast -import requests +import httpx from configs import dify_config from models.api_based_extension import APIBasedExtensionPoint class APIBasedExtensionRequestor: - timeout: tuple[int, int] = (5, 60) + timeout: httpx.Timeout = httpx.Timeout(60.0, connect=5.0) """timeout for request connect and read""" def __init__(self, api_endpoint: str, api_key: str): @@ -27,25 +27,23 @@ class APIBasedExtensionRequestor: url = self.api_endpoint try: - # proxy support for security - proxies = None + mounts: dict[str, httpx.BaseTransport] | None = None if dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL: - proxies = { - "http": dify_config.SSRF_PROXY_HTTP_URL, - "https": dify_config.SSRF_PROXY_HTTPS_URL, + mounts = { + "http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL), + "https://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTPS_URL), } - response = requests.request( - method="POST", - url=url, - json={"point": point.value, "params": params}, - headers=headers, - timeout=self.timeout, - proxies=proxies, - ) - except requests.Timeout: + with httpx.Client(mounts=mounts, timeout=self.timeout) as client: + response = client.request( + method="POST", + url=url, + json={"point": point.value, "params": params}, + headers=headers, + ) + except httpx.TimeoutException: raise ValueError("request timeout") - except requests.ConnectionError: + except httpx.RequestError: raise ValueError("request connection error") if response.status_code != 200: diff --git a/api/core/helper/code_executor/code_executor.py b/api/core/helper/code_executor/code_executor.py index 0c1d03dc13..f92278f9e2 100644 --- a/api/core/helper/code_executor/code_executor.py +++ b/api/core/helper/code_executor/code_executor.py @@ -131,7 +131,7 @@ class CodeExecutor: if (code := response_data.get("code")) != 0: raise CodeExecutionError(f"Got error code: {code}. Got error msg: {response_data.get('message')}") - response_code = CodeExecutionResponse(**response_data) + response_code = CodeExecutionResponse.model_validate(response_data) if response_code.data.error: raise CodeExecutionError(response_code.data.error) diff --git a/api/core/helper/marketplace.py b/api/core/helper/marketplace.py index 10f304c087..bddb864a95 100644 --- a/api/core/helper/marketplace.py +++ b/api/core/helper/marketplace.py @@ -26,7 +26,7 @@ def batch_fetch_plugin_manifests(plugin_ids: list[str]) -> Sequence[MarketplaceP response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version}) response.raise_for_status() - return [MarketplacePluginDeclaration(**plugin) for plugin in response.json()["data"]["plugins"]] + return [MarketplacePluginDeclaration.model_validate(plugin) for plugin in response.json()["data"]["plugins"]] def batch_fetch_plugin_manifests_ignore_deserialization_error( @@ -41,7 +41,7 @@ def batch_fetch_plugin_manifests_ignore_deserialization_error( result: list[MarketplacePluginDeclaration] = [] for plugin in response.json()["data"]["plugins"]: try: - result.append(MarketplacePluginDeclaration(**plugin)) + result.append(MarketplacePluginDeclaration.model_validate(plugin)) except Exception: pass diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index ee37024260..7822ed4268 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -20,7 +20,7 @@ from core.rag.cleaner.clean_processor import CleanProcessor from core.rag.datasource.keyword.keyword_factory import Keyword from core.rag.docstore.dataset_docstore import DatasetDocumentStore from core.rag.extractor.entity.datasource_type import DatasourceType -from core.rag.extractor.entity.extract_setting import ExtractSetting +from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.index_processor.index_processor_factory import IndexProcessorFactory @@ -343,7 +343,7 @@ class IndexingRunner: if file_detail: extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, + datasource_type=DatasourceType.FILE, upload_file=file_detail, document_model=dataset_document.doc_form, ) @@ -356,15 +356,17 @@ class IndexingRunner: ): raise ValueError("no notion import info found") extract_setting = ExtractSetting( - datasource_type=DatasourceType.NOTION.value, - notion_info={ - "credential_id": data_source_info["credential_id"], - "notion_workspace_id": data_source_info["notion_workspace_id"], - "notion_obj_id": data_source_info["notion_page_id"], - "notion_page_type": data_source_info["type"], - "document": dataset_document, - "tenant_id": dataset_document.tenant_id, - }, + datasource_type=DatasourceType.NOTION, + notion_info=NotionInfo.model_validate( + { + "credential_id": data_source_info["credential_id"], + "notion_workspace_id": data_source_info["notion_workspace_id"], + "notion_obj_id": data_source_info["notion_page_id"], + "notion_page_type": data_source_info["type"], + "document": dataset_document, + "tenant_id": dataset_document.tenant_id, + } + ), document_model=dataset_document.doc_form, ) text_docs = index_processor.extract(extract_setting, process_rule_mode=process_rule["mode"]) @@ -377,15 +379,17 @@ class IndexingRunner: ): raise ValueError("no website import info found") extract_setting = ExtractSetting( - datasource_type=DatasourceType.WEBSITE.value, - website_info={ - "provider": data_source_info["provider"], - "job_id": data_source_info["job_id"], - "tenant_id": dataset_document.tenant_id, - "url": data_source_info["url"], - "mode": data_source_info["mode"], - "only_main_content": data_source_info["only_main_content"], - }, + datasource_type=DatasourceType.WEBSITE, + website_info=WebsiteInfo.model_validate( + { + "provider": data_source_info["provider"], + "job_id": data_source_info["job_id"], + "tenant_id": dataset_document.tenant_id, + "url": data_source_info["url"], + "mode": data_source_info["mode"], + "only_main_content": data_source_info["only_main_content"], + } + ), document_model=dataset_document.doc_form, ) text_docs = index_processor.extract(extract_setting, process_rule_mode=process_rule["mode"]) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index e07d0ec14e..e64ac25ab1 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -28,7 +28,6 @@ from core.ops.ops_trace_manager import TraceQueueManager, TraceTask from core.ops.utils import measure_time from core.prompt.utils.prompt_template_parser import PromptTemplateParser from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey -from core.workflow.node_events import AgentLogEvent from extensions.ext_database import db from extensions.ext_storage import storage from models import App, Message, WorkflowNodeExecutionModel @@ -462,19 +461,18 @@ class LLMGenerator: ) def agent_log_of(node_execution: WorkflowNodeExecutionModel) -> Sequence: - raw_agent_log = node_execution.execution_metadata_dict.get(WorkflowNodeExecutionMetadataKey.AGENT_LOG) + raw_agent_log = node_execution.execution_metadata_dict.get(WorkflowNodeExecutionMetadataKey.AGENT_LOG, []) if not raw_agent_log: return [] - parsed: Sequence[AgentLogEvent] = json.loads(raw_agent_log) - def dict_of_event(event: AgentLogEvent): - return { - "status": event.status, - "error": event.error, - "data": event.data, + return [ + { + "status": event["status"], + "error": event["error"], + "data": event["data"], } - - return [dict_of_event(event) for event in parsed] + for event in raw_agent_log + ] inputs = last_run.load_full_inputs(session, storage) last_run_dict = { diff --git a/api/core/llm_generator/output_parser/structured_output.py b/api/core/llm_generator/output_parser/structured_output.py index 1e302b7668..686529c3ca 100644 --- a/api/core/llm_generator/output_parser/structured_output.py +++ b/api/core/llm_generator/output_parser/structured_output.py @@ -224,8 +224,8 @@ def _handle_native_json_schema( # Set appropriate response format if required by the model for rule in rules: - if rule.name == "response_format" and ResponseFormat.JSON_SCHEMA.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON_SCHEMA.value + if rule.name == "response_format" and ResponseFormat.JSON_SCHEMA in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON_SCHEMA return model_parameters @@ -239,10 +239,10 @@ def _set_response_format(model_parameters: dict, rules: list): """ for rule in rules: if rule.name == "response_format": - if ResponseFormat.JSON.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON.value - elif ResponseFormat.JSON_OBJECT.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON_OBJECT.value + if ResponseFormat.JSON in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON + elif ResponseFormat.JSON_OBJECT in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON_OBJECT def _handle_prompt_based_schema( diff --git a/api/core/mcp/session/client_session.py b/api/core/mcp/session/client_session.py index 5817416ba4..fa1d309134 100644 --- a/api/core/mcp/session/client_session.py +++ b/api/core/mcp/session/client_session.py @@ -294,7 +294,7 @@ class ClientSession( method="completion/complete", params=types.CompleteRequestParams( ref=ref, - argument=types.CompletionArgument(**argument), + argument=types.CompletionArgument.model_validate(argument), ), ) ), diff --git a/api/core/model_runtime/entities/common_entities.py b/api/core/model_runtime/entities/common_entities.py index c7353de5af..b673efae22 100644 --- a/api/core/model_runtime/entities/common_entities.py +++ b/api/core/model_runtime/entities/common_entities.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel +from pydantic import BaseModel, model_validator class I18nObject(BaseModel): @@ -9,7 +9,8 @@ class I18nObject(BaseModel): zh_Hans: str | None = None en_US: str - def __init__(self, **data): - super().__init__(**data) + @model_validator(mode="after") + def _(self): if not self.zh_Hans: self.zh_Hans = self.en_US + return self diff --git a/api/core/model_runtime/entities/message_entities.py b/api/core/model_runtime/entities/message_entities.py index 9235c881e0..89dae2dbff 100644 --- a/api/core/model_runtime/entities/message_entities.py +++ b/api/core/model_runtime/entities/message_entities.py @@ -74,7 +74,7 @@ class TextPromptMessageContent(PromptMessageContent): Model class for text prompt message content. """ - type: Literal[PromptMessageContentType.TEXT] = PromptMessageContentType.TEXT + type: Literal[PromptMessageContentType.TEXT] = PromptMessageContentType.TEXT # type: ignore data: str @@ -95,11 +95,11 @@ class MultiModalPromptMessageContent(PromptMessageContent): class VideoPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.VIDEO] = PromptMessageContentType.VIDEO + type: Literal[PromptMessageContentType.VIDEO] = PromptMessageContentType.VIDEO # type: ignore class AudioPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.AUDIO] = PromptMessageContentType.AUDIO + type: Literal[PromptMessageContentType.AUDIO] = PromptMessageContentType.AUDIO # type: ignore class ImagePromptMessageContent(MultiModalPromptMessageContent): @@ -111,12 +111,12 @@ class ImagePromptMessageContent(MultiModalPromptMessageContent): LOW = auto() HIGH = auto() - type: Literal[PromptMessageContentType.IMAGE] = PromptMessageContentType.IMAGE + type: Literal[PromptMessageContentType.IMAGE] = PromptMessageContentType.IMAGE # type: ignore detail: DETAIL = DETAIL.LOW class DocumentPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.DOCUMENT] = PromptMessageContentType.DOCUMENT + type: Literal[PromptMessageContentType.DOCUMENT] = PromptMessageContentType.DOCUMENT # type: ignore PromptMessageContentUnionTypes = Annotated[ diff --git a/api/core/model_runtime/entities/provider_entities.py b/api/core/model_runtime/entities/provider_entities.py index 2ccc9e0eae..0508116962 100644 --- a/api/core/model_runtime/entities/provider_entities.py +++ b/api/core/model_runtime/entities/provider_entities.py @@ -1,13 +1,13 @@ from collections.abc import Sequence -from enum import Enum, StrEnum, auto +from enum import StrEnum, auto -from pydantic import BaseModel, ConfigDict, Field, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.model_entities import AIModelEntity, ModelType -class ConfigurateMethod(Enum): +class ConfigurateMethod(StrEnum): """ Enum class for configurate method of provider model. """ @@ -46,10 +46,11 @@ class FormOption(BaseModel): value: str show_on: list[FormShowOnObject] = [] - def __init__(self, **data): - super().__init__(**data) + @model_validator(mode="after") + def _(self): if not self.label: self.label = I18nObject(en_US=self.value) + return self class CredentialFormSchema(BaseModel): diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py index 23d36c03af..3967acf07b 100644 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py +++ b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py @@ -15,7 +15,7 @@ class GPT2Tokenizer: use gpt2 tokenizer to get num tokens """ _tokenizer = GPT2Tokenizer.get_encoder() - tokens = _tokenizer.encode(text) + tokens = _tokenizer.encode(text) # type: ignore return len(tokens) @staticmethod diff --git a/api/core/model_runtime/model_providers/model_provider_factory.py b/api/core/model_runtime/model_providers/model_provider_factory.py index e070c17abd..e1afc41bee 100644 --- a/api/core/model_runtime/model_providers/model_provider_factory.py +++ b/api/core/model_runtime/model_providers/model_provider_factory.py @@ -269,17 +269,17 @@ class ModelProviderFactory: } if model_type == ModelType.LLM: - return LargeLanguageModel(**init_params) # type: ignore + return LargeLanguageModel.model_validate(init_params) elif model_type == ModelType.TEXT_EMBEDDING: - return TextEmbeddingModel(**init_params) # type: ignore + return TextEmbeddingModel.model_validate(init_params) elif model_type == ModelType.RERANK: - return RerankModel(**init_params) # type: ignore + return RerankModel.model_validate(init_params) elif model_type == ModelType.SPEECH2TEXT: - return Speech2TextModel(**init_params) # type: ignore + return Speech2TextModel.model_validate(init_params) elif model_type == ModelType.MODERATION: - return ModerationModel(**init_params) # type: ignore + return ModerationModel.model_validate(init_params) elif model_type == ModelType.TTS: - return TTSModel(**init_params) # type: ignore + return TTSModel.model_validate(init_params) def get_provider_icon(self, provider: str, icon_type: str, lang: str) -> tuple[bytes, str]: """ diff --git a/api/core/model_runtime/utils/encoders.py b/api/core/model_runtime/utils/encoders.py index c758eaf49f..c85152463e 100644 --- a/api/core/model_runtime/utils/encoders.py +++ b/api/core/model_runtime/utils/encoders.py @@ -196,15 +196,15 @@ def jsonable_encoder( return encoder(obj) try: - data = dict(obj) + data = dict(obj) # type: ignore except Exception as e: errors: list[Exception] = [] errors.append(e) try: - data = vars(obj) + data = vars(obj) # type: ignore except Exception as e: errors.append(e) - raise ValueError(errors) from e + raise ValueError(str(errors)) from e return jsonable_encoder( data, by_alias=by_alias, diff --git a/api/core/moderation/api/api.py b/api/core/moderation/api/api.py index 573f4ec2a7..2d72b17a04 100644 --- a/api/core/moderation/api/api.py +++ b/api/core/moderation/api/api.py @@ -51,7 +51,7 @@ class ApiModeration(Moderation): params = ModerationInputParams(app_id=self.app_id, inputs=inputs, query=query) result = self._get_config_by_requestor(APIBasedExtensionPoint.APP_MODERATION_INPUT, params.model_dump()) - return ModerationInputsResult(**result) + return ModerationInputsResult.model_validate(result) return ModerationInputsResult( flagged=flagged, action=ModerationAction.DIRECT_OUTPUT, preset_response=preset_response @@ -67,7 +67,7 @@ class ApiModeration(Moderation): params = ModerationOutputParams(app_id=self.app_id, text=text) result = self._get_config_by_requestor(APIBasedExtensionPoint.APP_MODERATION_OUTPUT, params.model_dump()) - return ModerationOutputsResult(**result) + return ModerationOutputsResult.model_validate(result) return ModerationOutputsResult( flagged=flagged, action=ModerationAction.DIRECT_OUTPUT, preset_response=preset_response diff --git a/api/core/ops/aliyun_trace/aliyun_trace.py b/api/core/ops/aliyun_trace/aliyun_trace.py index c0727326ce..a7d8576d8d 100644 --- a/api/core/ops/aliyun_trace/aliyun_trace.py +++ b/api/core/ops/aliyun_trace/aliyun_trace.py @@ -14,12 +14,12 @@ from core.ops.aliyun_trace.data_exporter.traceclient import ( from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData, TraceMetadata from core.ops.aliyun_trace.entities.semconv import ( GEN_AI_COMPLETION, - GEN_AI_MODEL_NAME, + GEN_AI_INPUT_MESSAGE, + GEN_AI_OUTPUT_MESSAGE, GEN_AI_PROMPT, - GEN_AI_PROMPT_TEMPLATE_TEMPLATE, - GEN_AI_PROMPT_TEMPLATE_VARIABLE, + GEN_AI_PROVIDER_NAME, + GEN_AI_REQUEST_MODEL, GEN_AI_RESPONSE_FINISH_REASON, - GEN_AI_SYSTEM, GEN_AI_USAGE_INPUT_TOKENS, GEN_AI_USAGE_OUTPUT_TOKENS, GEN_AI_USAGE_TOTAL_TOKENS, @@ -35,6 +35,9 @@ from core.ops.aliyun_trace.utils import ( create_links_from_trace_id, create_status_from_error, extract_retrieval_documents, + format_input_messages, + format_output_messages, + format_retrieval_documents, get_user_id_from_message_data, get_workflow_node_status, serialize_json_data, @@ -151,10 +154,6 @@ class AliyunDataTrace(BaseTraceInstance): ) self.trace_client.add_span(message_span) - app_model_config = getattr(message_data, "app_model_config", {}) - pre_prompt = getattr(app_model_config, "pre_prompt", "") - inputs_data = getattr(message_data, "inputs", {}) - llm_span = SpanData( trace_id=trace_metadata.trace_id, parent_span_id=message_span_id, @@ -170,13 +169,11 @@ class AliyunDataTrace(BaseTraceInstance): inputs=inputs_json, outputs=outputs_str, ), - GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name") or "", - GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider") or "", + GEN_AI_REQUEST_MODEL: trace_info.metadata.get("ls_model_name") or "", + GEN_AI_PROVIDER_NAME: trace_info.metadata.get("ls_provider") or "", GEN_AI_USAGE_INPUT_TOKENS: str(trace_info.message_tokens), GEN_AI_USAGE_OUTPUT_TOKENS: str(trace_info.answer_tokens), GEN_AI_USAGE_TOTAL_TOKENS: str(trace_info.total_tokens), - GEN_AI_PROMPT_TEMPLATE_VARIABLE: serialize_json_data(inputs_data), - GEN_AI_PROMPT_TEMPLATE_TEMPLATE: pre_prompt, GEN_AI_PROMPT: inputs_json, GEN_AI_COMPLETION: outputs_str, }, @@ -364,6 +361,10 @@ class AliyunDataTrace(BaseTraceInstance): input_value = str(node_execution.inputs.get("query", "")) if node_execution.inputs else "" output_value = serialize_json_data(node_execution.outputs.get("result", [])) if node_execution.outputs else "" + retrieval_documents = node_execution.outputs.get("result", []) if node_execution.outputs else [] + semantic_retrieval_documents = format_retrieval_documents(retrieval_documents) + semantic_retrieval_documents_json = serialize_json_data(semantic_retrieval_documents) + return SpanData( trace_id=trace_metadata.trace_id, parent_span_id=trace_metadata.workflow_span_id, @@ -380,7 +381,7 @@ class AliyunDataTrace(BaseTraceInstance): outputs=output_value, ), RETRIEVAL_QUERY: input_value, - RETRIEVAL_DOCUMENT: output_value, + RETRIEVAL_DOCUMENT: semantic_retrieval_documents_json, }, status=get_workflow_node_status(node_execution), links=trace_metadata.links, @@ -396,6 +397,9 @@ class AliyunDataTrace(BaseTraceInstance): prompts_json = serialize_json_data(process_data.get("prompts", [])) text_output = str(outputs.get("text", "")) + gen_ai_input_message = format_input_messages(process_data) + gen_ai_output_message = format_output_messages(outputs) + return SpanData( trace_id=trace_metadata.trace_id, parent_span_id=trace_metadata.workflow_span_id, @@ -411,14 +415,16 @@ class AliyunDataTrace(BaseTraceInstance): inputs=prompts_json, outputs=text_output, ), - GEN_AI_MODEL_NAME: process_data.get("model_name") or "", - GEN_AI_SYSTEM: process_data.get("model_provider") or "", + GEN_AI_REQUEST_MODEL: process_data.get("model_name") or "", + GEN_AI_PROVIDER_NAME: process_data.get("model_provider") or "", GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)), GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)), GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)), GEN_AI_PROMPT: prompts_json, GEN_AI_COMPLETION: text_output, GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason") or "", + GEN_AI_INPUT_MESSAGE: gen_ai_input_message, + GEN_AI_OUTPUT_MESSAGE: gen_ai_output_message, }, status=get_workflow_node_status(node_execution), links=trace_metadata.links, @@ -502,8 +508,8 @@ class AliyunDataTrace(BaseTraceInstance): inputs=inputs_json, outputs=suggested_question_json, ), - GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name") or "", - GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider") or "", + GEN_AI_REQUEST_MODEL: trace_info.metadata.get("ls_model_name") or "", + GEN_AI_PROVIDER_NAME: trace_info.metadata.get("ls_provider") or "", GEN_AI_PROMPT: inputs_json, GEN_AI_COMPLETION: suggested_question_json, }, diff --git a/api/core/ops/aliyun_trace/data_exporter/traceclient.py b/api/core/ops/aliyun_trace/data_exporter/traceclient.py index f54405b5de..5aa9fb6689 100644 --- a/api/core/ops/aliyun_trace/data_exporter/traceclient.py +++ b/api/core/ops/aliyun_trace/data_exporter/traceclient.py @@ -7,7 +7,7 @@ import uuid from collections import deque from collections.abc import Sequence from datetime import datetime -from typing import Final +from typing import Final, cast from urllib.parse import urljoin import httpx @@ -199,7 +199,7 @@ def convert_to_trace_id(uuid_v4: str | None) -> int: raise ValueError("UUID cannot be None") try: uuid_obj = uuid.UUID(uuid_v4) - return uuid_obj.int + return cast(int, uuid_obj.int) except ValueError as e: raise ValueError(f"Invalid UUID input: {uuid_v4}") from e diff --git a/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py b/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py index 0ee71fc23f..20ff2d0875 100644 --- a/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py +++ b/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py @@ -3,7 +3,8 @@ from dataclasses import dataclass from typing import Any from opentelemetry import trace as trace_api -from opentelemetry.sdk.trace import Event, Status, StatusCode +from opentelemetry.sdk.trace import Event +from opentelemetry.trace import Status, StatusCode from pydantic import BaseModel, Field diff --git a/api/core/ops/aliyun_trace/entities/semconv.py b/api/core/ops/aliyun_trace/entities/semconv.py index 7a22db21e2..c823fcab8a 100644 --- a/api/core/ops/aliyun_trace/entities/semconv.py +++ b/api/core/ops/aliyun_trace/entities/semconv.py @@ -17,17 +17,18 @@ RETRIEVAL_QUERY: Final[str] = "retrieval.query" RETRIEVAL_DOCUMENT: Final[str] = "retrieval.document" # LLM attributes -GEN_AI_MODEL_NAME: Final[str] = "gen_ai.model_name" -GEN_AI_SYSTEM: Final[str] = "gen_ai.system" +GEN_AI_REQUEST_MODEL: Final[str] = "gen_ai.request.model" +GEN_AI_PROVIDER_NAME: Final[str] = "gen_ai.provider.name" GEN_AI_USAGE_INPUT_TOKENS: Final[str] = "gen_ai.usage.input_tokens" GEN_AI_USAGE_OUTPUT_TOKENS: Final[str] = "gen_ai.usage.output_tokens" GEN_AI_USAGE_TOTAL_TOKENS: Final[str] = "gen_ai.usage.total_tokens" -GEN_AI_PROMPT_TEMPLATE_TEMPLATE: Final[str] = "gen_ai.prompt_template.template" -GEN_AI_PROMPT_TEMPLATE_VARIABLE: Final[str] = "gen_ai.prompt_template.variable" GEN_AI_PROMPT: Final[str] = "gen_ai.prompt" GEN_AI_COMPLETION: Final[str] = "gen_ai.completion" GEN_AI_RESPONSE_FINISH_REASON: Final[str] = "gen_ai.response.finish_reason" +GEN_AI_INPUT_MESSAGE: Final[str] = "gen_ai.input.messages" +GEN_AI_OUTPUT_MESSAGE: Final[str] = "gen_ai.output.messages" + # Tool attributes TOOL_NAME: Final[str] = "tool.name" TOOL_DESCRIPTION: Final[str] = "tool.description" diff --git a/api/core/ops/aliyun_trace/utils.py b/api/core/ops/aliyun_trace/utils.py index 2ec9e75dcd..7f68889e92 100644 --- a/api/core/ops/aliyun_trace/utils.py +++ b/api/core/ops/aliyun_trace/utils.py @@ -1,4 +1,5 @@ import json +from collections.abc import Mapping from typing import Any from opentelemetry.trace import Link, Status, StatusCode @@ -93,3 +94,97 @@ def create_common_span_attributes( INPUT_VALUE: inputs, OUTPUT_VALUE: outputs, } + + +def format_retrieval_documents(retrieval_documents: list) -> list: + try: + if not isinstance(retrieval_documents, list): + return [] + + semantic_documents = [] + for doc in retrieval_documents: + if not isinstance(doc, dict): + continue + + metadata = doc.get("metadata", {}) + content = doc.get("content", "") + title = doc.get("title", "") + score = metadata.get("score", 0.0) + document_id = metadata.get("document_id", "") + + semantic_metadata = {} + if title: + semantic_metadata["title"] = title + if metadata.get("source"): + semantic_metadata["source"] = metadata["source"] + elif metadata.get("_source"): + semantic_metadata["source"] = metadata["_source"] + if metadata.get("doc_metadata"): + doc_metadata = metadata["doc_metadata"] + if isinstance(doc_metadata, dict): + semantic_metadata.update(doc_metadata) + + semantic_doc = { + "document": {"content": content, "metadata": semantic_metadata, "score": score, "id": document_id} + } + semantic_documents.append(semantic_doc) + + return semantic_documents + except Exception: + return [] + + +def format_input_messages(process_data: Mapping[str, Any]) -> str: + try: + if not isinstance(process_data, dict): + return serialize_json_data([]) + + prompts = process_data.get("prompts", []) + if not prompts: + return serialize_json_data([]) + + valid_roles = {"system", "user", "assistant", "tool"} + input_messages = [] + for prompt in prompts: + if not isinstance(prompt, dict): + continue + + role = prompt.get("role", "") + text = prompt.get("text", "") + + if not role or role not in valid_roles: + continue + + if text: + message = {"role": role, "parts": [{"type": "text", "content": text}]} + input_messages.append(message) + + return serialize_json_data(input_messages) + except Exception: + return serialize_json_data([]) + + +def format_output_messages(outputs: Mapping[str, Any]) -> str: + try: + if not isinstance(outputs, dict): + return serialize_json_data([]) + + text = outputs.get("text", "") + finish_reason = outputs.get("finish_reason", "") + + if not text: + return serialize_json_data([]) + + valid_finish_reasons = {"stop", "length", "content_filter", "tool_call", "error"} + if finish_reason not in valid_finish_reasons: + finish_reason = "stop" + + output_message = { + "role": "assistant", + "parts": [{"type": "text", "content": text}], + "finish_reason": finish_reason, + } + + return serialize_json_data([output_message]) + except Exception: + return serialize_json_data([]) diff --git a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py index 1497bc1863..03d2d75372 100644 --- a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py +++ b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py @@ -213,9 +213,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance): node_metadata.update(json.loads(node_execution.execution_metadata)) # Determine the correct span kind based on node type - span_kind = OpenInferenceSpanKindValues.CHAIN.value + span_kind = OpenInferenceSpanKindValues.CHAIN if node_execution.node_type == "llm": - span_kind = OpenInferenceSpanKindValues.LLM.value + span_kind = OpenInferenceSpanKindValues.LLM provider = process_data.get("model_provider") model = process_data.get("model_name") if provider: @@ -230,18 +230,18 @@ class ArizePhoenixDataTrace(BaseTraceInstance): node_metadata["prompt_tokens"] = usage_data.get("prompt_tokens", 0) node_metadata["completion_tokens"] = usage_data.get("completion_tokens", 0) elif node_execution.node_type == "dataset_retrieval": - span_kind = OpenInferenceSpanKindValues.RETRIEVER.value + span_kind = OpenInferenceSpanKindValues.RETRIEVER elif node_execution.node_type == "tool": - span_kind = OpenInferenceSpanKindValues.TOOL.value + span_kind = OpenInferenceSpanKindValues.TOOL else: - span_kind = OpenInferenceSpanKindValues.CHAIN.value + span_kind = OpenInferenceSpanKindValues.CHAIN node_span = self.tracer.start_span( name=node_execution.node_type, attributes={ SpanAttributes.INPUT_VALUE: node_execution.inputs or "{}", SpanAttributes.OUTPUT_VALUE: node_execution.outputs or "{}", - SpanAttributes.OPENINFERENCE_SPAN_KIND: span_kind, + SpanAttributes.OPENINFERENCE_SPAN_KIND: span_kind.value, SpanAttributes.METADATA: json.dumps(node_metadata, ensure_ascii=False), SpanAttributes.SESSION_ID: trace_info.conversation_id or "", }, diff --git a/api/core/ops/entities/config_entity.py b/api/core/ops/entities/config_entity.py index 4ba6eb0780..f9b8d41e0a 100644 --- a/api/core/ops/entities/config_entity.py +++ b/api/core/ops/entities/config_entity.py @@ -13,6 +13,7 @@ class TracingProviderEnum(StrEnum): OPIK = "opik" WEAVE = "weave" ALIYUN = "aliyun" + TENCENT = "tencent" class BaseTracingConfig(BaseModel): @@ -195,5 +196,32 @@ class AliyunConfig(BaseTracingConfig): return validate_url_with_path(v, "https://tracing-analysis-dc-hz.aliyuncs.com") +class TencentConfig(BaseTracingConfig): + """ + Tencent APM tracing config + """ + + token: str + endpoint: str + service_name: str + + @field_validator("token") + @classmethod + def token_validator(cls, v, info: ValidationInfo): + if not v or v.strip() == "": + raise ValueError("Token cannot be empty") + return v + + @field_validator("endpoint") + @classmethod + def endpoint_validator(cls, v, info: ValidationInfo): + return cls.validate_endpoint_url(v, "https://apm.tencentcloudapi.com") + + @field_validator("service_name") + @classmethod + def service_name_validator(cls, v, info: ValidationInfo): + return cls.validate_project_field(v, "dify_app") + + OPS_FILE_PATH = "ops_trace/" OPS_TRACE_FAILED_KEY = "FAILED_OPS_TRACE" diff --git a/api/core/ops/entities/trace_entity.py b/api/core/ops/entities/trace_entity.py index b8a25c5d7d..5b81c09a2d 100644 --- a/api/core/ops/entities/trace_entity.py +++ b/api/core/ops/entities/trace_entity.py @@ -90,6 +90,7 @@ class SuggestedQuestionTraceInfo(BaseTraceInfo): class DatasetRetrievalTraceInfo(BaseTraceInfo): documents: Any = None + error: str | None = None class ToolTraceInfo(BaseTraceInfo): diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index 931bed78d4..92e6b8ea60 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -73,7 +73,7 @@ class LangFuseDataTrace(BaseTraceInstance): if trace_info.message_id: trace_id = trace_info.trace_id or trace_info.message_id - name = TraceTaskName.MESSAGE_TRACE.value + name = TraceTaskName.MESSAGE_TRACE trace_data = LangfuseTrace( id=trace_id, user_id=user_id, @@ -88,7 +88,7 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_trace(langfuse_trace_data=trace_data) workflow_span_data = LangfuseSpan( id=trace_info.workflow_run_id, - name=TraceTaskName.WORKFLOW_TRACE.value, + name=TraceTaskName.WORKFLOW_TRACE, input=dict(trace_info.workflow_run_inputs), output=dict(trace_info.workflow_run_outputs), trace_id=trace_id, @@ -103,7 +103,7 @@ class LangFuseDataTrace(BaseTraceInstance): trace_data = LangfuseTrace( id=trace_id, user_id=user_id, - name=TraceTaskName.WORKFLOW_TRACE.value, + name=TraceTaskName.WORKFLOW_TRACE, input=dict(trace_info.workflow_run_inputs), output=dict(trace_info.workflow_run_outputs), metadata=metadata, @@ -253,7 +253,7 @@ class LangFuseDataTrace(BaseTraceInstance): trace_data = LangfuseTrace( id=trace_id, user_id=user_id, - name=TraceTaskName.MESSAGE_TRACE.value, + name=TraceTaskName.MESSAGE_TRACE, input={ "message": trace_info.inputs, "files": file_list, @@ -303,7 +303,7 @@ class LangFuseDataTrace(BaseTraceInstance): if trace_info.message_data is None: return span_data = LangfuseSpan( - name=TraceTaskName.MODERATION_TRACE.value, + name=TraceTaskName.MODERATION_TRACE, input=trace_info.inputs, output={ "action": trace_info.action, @@ -331,7 +331,7 @@ class LangFuseDataTrace(BaseTraceInstance): ) generation_data = LangfuseGeneration( - name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value, + name=TraceTaskName.SUGGESTED_QUESTION_TRACE, input=trace_info.inputs, output=str(trace_info.suggested_question), trace_id=trace_info.trace_id or trace_info.message_id, @@ -349,7 +349,7 @@ class LangFuseDataTrace(BaseTraceInstance): if trace_info.message_data is None: return dataset_retrieval_span_data = LangfuseSpan( - name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value, + name=TraceTaskName.DATASET_RETRIEVAL_TRACE, input=trace_info.inputs, output={"documents": trace_info.documents}, trace_id=trace_info.trace_id or trace_info.message_id, @@ -377,7 +377,7 @@ class LangFuseDataTrace(BaseTraceInstance): def generate_name_trace(self, trace_info: GenerateNameTraceInfo): name_generation_trace_data = LangfuseTrace( - name=TraceTaskName.GENERATE_NAME_TRACE.value, + name=TraceTaskName.GENERATE_NAME_TRACE, input=trace_info.inputs, output=trace_info.outputs, user_id=trace_info.tenant_id, @@ -388,7 +388,7 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_trace(langfuse_trace_data=name_generation_trace_data) name_generation_span_data = LangfuseSpan( - name=TraceTaskName.GENERATE_NAME_TRACE.value, + name=TraceTaskName.GENERATE_NAME_TRACE, input=trace_info.inputs, output=trace_info.outputs, trace_id=trace_info.conversation_id, diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py index 24a43e1cd8..8b8117b24c 100644 --- a/api/core/ops/langsmith_trace/langsmith_trace.py +++ b/api/core/ops/langsmith_trace/langsmith_trace.py @@ -81,7 +81,7 @@ class LangSmithDataTrace(BaseTraceInstance): if trace_info.message_id: message_run = LangSmithRunModel( id=trace_info.message_id, - name=TraceTaskName.MESSAGE_TRACE.value, + name=TraceTaskName.MESSAGE_TRACE, inputs=dict(trace_info.workflow_run_inputs), outputs=dict(trace_info.workflow_run_outputs), run_type=LangSmithRunType.chain, @@ -110,7 +110,7 @@ class LangSmithDataTrace(BaseTraceInstance): file_list=trace_info.file_list, total_tokens=trace_info.total_tokens, id=trace_info.workflow_run_id, - name=TraceTaskName.WORKFLOW_TRACE.value, + name=TraceTaskName.WORKFLOW_TRACE, inputs=dict(trace_info.workflow_run_inputs), run_type=LangSmithRunType.tool, start_time=trace_info.workflow_data.created_at, @@ -271,7 +271,7 @@ class LangSmithDataTrace(BaseTraceInstance): output_tokens=trace_info.answer_tokens, total_tokens=trace_info.total_tokens, id=message_id, - name=TraceTaskName.MESSAGE_TRACE.value, + name=TraceTaskName.MESSAGE_TRACE, inputs=trace_info.inputs, run_type=LangSmithRunType.chain, start_time=trace_info.start_time, @@ -327,7 +327,7 @@ class LangSmithDataTrace(BaseTraceInstance): if trace_info.message_data is None: return langsmith_run = LangSmithRunModel( - name=TraceTaskName.MODERATION_TRACE.value, + name=TraceTaskName.MODERATION_TRACE, inputs=trace_info.inputs, outputs={ "action": trace_info.action, @@ -362,7 +362,7 @@ class LangSmithDataTrace(BaseTraceInstance): if message_data is None: return suggested_question_run = LangSmithRunModel( - name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value, + name=TraceTaskName.SUGGESTED_QUESTION_TRACE, inputs=trace_info.inputs, outputs=trace_info.suggested_question, run_type=LangSmithRunType.tool, @@ -391,7 +391,7 @@ class LangSmithDataTrace(BaseTraceInstance): if trace_info.message_data is None: return dataset_retrieval_run = LangSmithRunModel( - name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value, + name=TraceTaskName.DATASET_RETRIEVAL_TRACE, inputs=trace_info.inputs, outputs={"documents": trace_info.documents}, run_type=LangSmithRunType.retriever, @@ -447,7 +447,7 @@ class LangSmithDataTrace(BaseTraceInstance): def generate_name_trace(self, trace_info: GenerateNameTraceInfo): name_run = LangSmithRunModel( - name=TraceTaskName.GENERATE_NAME_TRACE.value, + name=TraceTaskName.GENERATE_NAME_TRACE, inputs=trace_info.inputs, outputs=trace_info.outputs, run_type=LangSmithRunType.tool, diff --git a/api/core/ops/opik_trace/opik_trace.py b/api/core/ops/opik_trace/opik_trace.py index 8fa92f9fcd..8050c59db9 100644 --- a/api/core/ops/opik_trace/opik_trace.py +++ b/api/core/ops/opik_trace/opik_trace.py @@ -108,7 +108,7 @@ class OpikDataTrace(BaseTraceInstance): trace_data = { "id": opik_trace_id, - "name": TraceTaskName.MESSAGE_TRACE.value, + "name": TraceTaskName.MESSAGE_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": workflow_metadata, @@ -125,7 +125,7 @@ class OpikDataTrace(BaseTraceInstance): "id": root_span_id, "parent_span_id": None, "trace_id": opik_trace_id, - "name": TraceTaskName.WORKFLOW_TRACE.value, + "name": TraceTaskName.WORKFLOW_TRACE, "input": wrap_dict("input", trace_info.workflow_run_inputs), "output": wrap_dict("output", trace_info.workflow_run_outputs), "start_time": trace_info.start_time, @@ -138,7 +138,7 @@ class OpikDataTrace(BaseTraceInstance): else: trace_data = { "id": opik_trace_id, - "name": TraceTaskName.MESSAGE_TRACE.value, + "name": TraceTaskName.MESSAGE_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": workflow_metadata, @@ -290,7 +290,7 @@ class OpikDataTrace(BaseTraceInstance): trace_data = { "id": prepare_opik_uuid(trace_info.start_time, dify_trace_id), - "name": TraceTaskName.MESSAGE_TRACE.value, + "name": TraceTaskName.MESSAGE_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": wrap_metadata(metadata), @@ -329,7 +329,7 @@ class OpikDataTrace(BaseTraceInstance): span_data = { "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), - "name": TraceTaskName.MODERATION_TRACE.value, + "name": TraceTaskName.MODERATION_TRACE, "type": "tool", "start_time": start_time, "end_time": trace_info.end_time or trace_info.message_data.updated_at, @@ -355,7 +355,7 @@ class OpikDataTrace(BaseTraceInstance): span_data = { "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), - "name": TraceTaskName.SUGGESTED_QUESTION_TRACE.value, + "name": TraceTaskName.SUGGESTED_QUESTION_TRACE, "type": "tool", "start_time": start_time, "end_time": trace_info.end_time or message_data.updated_at, @@ -375,7 +375,7 @@ class OpikDataTrace(BaseTraceInstance): span_data = { "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), - "name": TraceTaskName.DATASET_RETRIEVAL_TRACE.value, + "name": TraceTaskName.DATASET_RETRIEVAL_TRACE, "type": "tool", "start_time": start_time, "end_time": trace_info.end_time or trace_info.message_data.updated_at, @@ -405,7 +405,7 @@ class OpikDataTrace(BaseTraceInstance): def generate_name_trace(self, trace_info: GenerateNameTraceInfo): trace_data = { "id": prepare_opik_uuid(trace_info.start_time, trace_info.trace_id or trace_info.message_id), - "name": TraceTaskName.GENERATE_NAME_TRACE.value, + "name": TraceTaskName.GENERATE_NAME_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": wrap_metadata(trace_info.metadata), @@ -420,7 +420,7 @@ class OpikDataTrace(BaseTraceInstance): span_data = { "trace_id": trace.id, - "name": TraceTaskName.GENERATE_NAME_TRACE.value, + "name": TraceTaskName.GENERATE_NAME_TRACE, "start_time": trace_info.start_time, "end_time": trace_info.end_time, "metadata": wrap_metadata(trace_info.metadata), diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 0679b27271..7db9b076d2 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -120,6 +120,17 @@ class OpsTraceProviderConfigMap(collections.UserDict[str, dict[str, Any]]): "trace_instance": AliyunDataTrace, } + case TracingProviderEnum.TENCENT: + from core.ops.entities.config_entity import TencentConfig + from core.ops.tencent_trace.tencent_trace import TencentDataTrace + + return { + "config_class": TencentConfig, + "secret_keys": ["token"], + "other_keys": ["endpoint", "service_name"], + "trace_instance": TencentDataTrace, + } + case _: raise KeyError(f"Unsupported tracing provider: {provider}") @@ -155,7 +166,10 @@ class OpsTraceManager: if key in tracing_config: if "*" in tracing_config[key]: # If the key contains '*', retain the original value from the current config - new_config[key] = current_trace_config.get(key, tracing_config[key]) + if current_trace_config: + new_config[key] = current_trace_config.get(key, tracing_config[key]) + else: + new_config[key] = tracing_config[key] else: # Otherwise, encrypt the key new_config[key] = encrypt_token(tenant_id, tracing_config[key]) @@ -720,6 +734,7 @@ class TraceTask: end_time=timer.get("end"), metadata=metadata, message_data=message_data.to_dict(), + error=kwargs.get("error"), ) return dataset_retrieval_trace_info @@ -886,6 +901,7 @@ class TraceQueueManager: continue file_id = uuid4().hex trace_info = task.execute() + task_data = TaskData( app_id=task.app_id, trace_info_type=type(trace_info).__name__, @@ -897,4 +913,4 @@ class TraceQueueManager: "file_id": file_id, "app_id": task.app_id, } - process_trace_tasks.delay(file_info) + process_trace_tasks.delay(file_info) # type: ignore diff --git a/docker/volumes/sandbox/dependencies/python-requirements.txt b/api/core/ops/tencent_trace/__init__.py similarity index 100% rename from docker/volumes/sandbox/dependencies/python-requirements.txt rename to api/core/ops/tencent_trace/__init__.py diff --git a/api/core/ops/tencent_trace/client.py b/api/core/ops/tencent_trace/client.py new file mode 100644 index 0000000000..270732aa02 --- /dev/null +++ b/api/core/ops/tencent_trace/client.py @@ -0,0 +1,337 @@ +""" +Tencent APM Trace Client - handles network operations, metrics, and API communication +""" + +from __future__ import annotations + +import importlib +import logging +import os +import socket +from typing import TYPE_CHECKING +from urllib.parse import urlparse + +if TYPE_CHECKING: + from opentelemetry.metrics import Meter + from opentelemetry.metrics._internal.instrument import Histogram + from opentelemetry.sdk.metrics.export import MetricReader + +from opentelemetry import trace as trace_api +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.semconv.resource import ResourceAttributes +from opentelemetry.trace import SpanKind +from opentelemetry.util.types import AttributeValue + +from configs import dify_config + +from .entities.tencent_semconv import LLM_OPERATION_DURATION +from .entities.tencent_trace_entity import SpanData + +logger = logging.getLogger(__name__) + + +class TencentTraceClient: + """Tencent APM trace client using OpenTelemetry OTLP exporter""" + + def __init__( + self, + service_name: str, + endpoint: str, + token: str, + max_queue_size: int = 1000, + schedule_delay_sec: int = 5, + max_export_batch_size: int = 50, + metrics_export_interval_sec: int = 10, + ): + self.endpoint = endpoint + self.token = token + self.service_name = service_name + self.metrics_export_interval_sec = metrics_export_interval_sec + + self.resource = Resource( + attributes={ + ResourceAttributes.SERVICE_NAME: service_name, + ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}", + ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}", + ResourceAttributes.HOST_NAME: socket.gethostname(), + } + ) + # Prepare gRPC endpoint/metadata + grpc_endpoint, insecure, _, _ = self._resolve_grpc_target(endpoint) + + headers = (("authorization", f"Bearer {token}"),) + + self.exporter = OTLPSpanExporter( + endpoint=grpc_endpoint, + headers=headers, + insecure=insecure, + timeout=30, + ) + + self.tracer_provider = TracerProvider(resource=self.resource) + self.span_processor = BatchSpanProcessor( + span_exporter=self.exporter, + max_queue_size=max_queue_size, + schedule_delay_millis=schedule_delay_sec * 1000, + max_export_batch_size=max_export_batch_size, + ) + self.tracer_provider.add_span_processor(self.span_processor) + + self.tracer = self.tracer_provider.get_tracer("dify.tencent_apm") + + # Store span contexts for parent-child relationships + self.span_contexts: dict[int, trace_api.SpanContext] = {} + + self.meter: Meter | None = None + self.hist_llm_duration: Histogram | None = None + self.metric_reader: MetricReader | None = None + + # Metrics exporter and instruments + try: + from opentelemetry import metrics + from opentelemetry.sdk.metrics import Histogram, MeterProvider + from opentelemetry.sdk.metrics.export import AggregationTemporality, PeriodicExportingMetricReader + + protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "").strip().lower() + use_http_protobuf = protocol in {"http/protobuf", "http-protobuf"} + use_http_json = protocol in {"http/json", "http-json"} + + # Set preferred temporality for histograms to DELTA + preferred_temporality: dict[type, AggregationTemporality] = {Histogram: AggregationTemporality.DELTA} + + def _create_metric_exporter(exporter_cls, **kwargs): + """Create metric exporter with preferred_temporality support""" + try: + return exporter_cls(**kwargs, preferred_temporality=preferred_temporality) + except Exception: + return exporter_cls(**kwargs) + + metric_reader = None + if use_http_json: + exporter_cls = None + for mod_path in ( + "opentelemetry.exporter.otlp.http.json.metric_exporter", + "opentelemetry.exporter.otlp.json.metric_exporter", + ): + try: + mod = importlib.import_module(mod_path) + exporter_cls = getattr(mod, "OTLPMetricExporter", None) + if exporter_cls: + break + except Exception: + continue + if exporter_cls is not None: + metric_exporter = _create_metric_exporter( + exporter_cls, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + else: + from opentelemetry.exporter.otlp.proto.http.metric_exporter import ( + OTLPMetricExporter as HttpMetricExporter, + ) + + metric_exporter = _create_metric_exporter( + HttpMetricExporter, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + + elif use_http_protobuf: + from opentelemetry.exporter.otlp.proto.http.metric_exporter import ( + OTLPMetricExporter as HttpMetricExporter, + ) + + metric_exporter = _create_metric_exporter( + HttpMetricExporter, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + else: + from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( + OTLPMetricExporter as GrpcMetricExporter, + ) + + m_grpc_endpoint, m_insecure, _, _ = self._resolve_grpc_target(endpoint) + + metric_exporter = _create_metric_exporter( + GrpcMetricExporter, + endpoint=m_grpc_endpoint, + headers={"authorization": f"Bearer {token}"}, + insecure=m_insecure, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + + if metric_reader is not None: + provider = MeterProvider(resource=self.resource, metric_readers=[metric_reader]) + metrics.set_meter_provider(provider) + self.meter = metrics.get_meter("dify-sdk", dify_config.project.version) + self.hist_llm_duration = self.meter.create_histogram( + name=LLM_OPERATION_DURATION, + unit="s", + description="LLM operation duration (seconds)", + ) + self.metric_reader = metric_reader + else: + self.meter = None + self.hist_llm_duration = None + self.metric_reader = None + except Exception: + logger.exception("[Tencent APM] Metrics initialization failed; metrics disabled") + self.meter = None + self.hist_llm_duration = None + self.metric_reader = None + + def add_span(self, span_data: SpanData) -> None: + """Create and export span using OpenTelemetry Tracer API""" + try: + self._create_and_export_span(span_data) + logger.debug("[Tencent APM] Created span: %s", span_data.name) + + except Exception: + logger.exception("[Tencent APM] Failed to create span: %s", span_data.name) + + # Metrics recording API + def record_llm_duration(self, latency_seconds: float, attributes: dict[str, str] | None = None) -> None: + """Record LLM operation duration histogram in seconds.""" + try: + if not hasattr(self, "hist_llm_duration") or self.hist_llm_duration is None: + return + attrs: dict[str, str] = {} + if attributes: + for k, v in attributes.items(): + attrs[k] = str(v) if not isinstance(v, (str, int, float, bool)) else v # type: ignore[assignment] + self.hist_llm_duration.record(latency_seconds, attrs) # type: ignore[attr-defined] + except Exception: + logger.debug("[Tencent APM] Failed to record LLM duration", exc_info=True) + + def _create_and_export_span(self, span_data: SpanData) -> None: + """Create span using OpenTelemetry Tracer API""" + try: + parent_context = None + if span_data.parent_span_id and span_data.parent_span_id in self.span_contexts: + parent_context = trace_api.set_span_in_context( + trace_api.NonRecordingSpan(self.span_contexts[span_data.parent_span_id]) + ) + + span = self.tracer.start_span( + name=span_data.name, + context=parent_context, + kind=SpanKind.INTERNAL, + start_time=span_data.start_time, + ) + self.span_contexts[span_data.span_id] = span.get_span_context() + + if span_data.attributes: + attributes: dict[str, AttributeValue] = {} + for key, value in span_data.attributes.items(): + if isinstance(value, (int, float, bool)): + attributes[key] = value + else: + attributes[key] = str(value) + span.set_attributes(attributes) + + if span_data.events: + for event in span_data.events: + span.add_event(event.name, event.attributes, event.timestamp) + + if span_data.status: + span.set_status(span_data.status) + + # Manually end span; do not use context manager to avoid double-end warnings + span.end(end_time=span_data.end_time) + + except Exception: + logger.exception("[Tencent APM] Error creating span: %s", span_data.name) + + def api_check(self) -> bool: + """Check API connectivity using socket connection test for gRPC endpoints""" + try: + # Resolve gRPC target consistently with exporters + _, _, host, port = self._resolve_grpc_target(self.endpoint) + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(5) + result = sock.connect_ex((host, port)) + sock.close() + + if result == 0: + logger.info("[Tencent APM] Endpoint %s:%s is accessible", host, port) + return True + else: + logger.warning("[Tencent APM] Endpoint %s:%s is not accessible", host, port) + if host in ["127.0.0.1", "localhost"]: + logger.info("[Tencent APM] Development environment detected, allowing config save") + return True + return False + + except Exception: + logger.exception("[Tencent APM] API check failed") + if "127.0.0.1" in self.endpoint or "localhost" in self.endpoint: + return True + return False + + def get_project_url(self) -> str: + """Get project console URL""" + return "https://console.cloud.tencent.com/apm" + + def shutdown(self) -> None: + """Shutdown the client and export remaining spans""" + try: + if self.span_processor: + logger.info("[Tencent APM] Flushing remaining spans before shutdown") + _ = self.span_processor.force_flush() + self.span_processor.shutdown() + + if self.tracer_provider: + self.tracer_provider.shutdown() + if self.metric_reader is not None: + try: + self.metric_reader.shutdown() # type: ignore[attr-defined] + except Exception: + pass + + except Exception: + logger.exception("[Tencent APM] Error during client shutdown") + + @staticmethod + def _resolve_grpc_target(endpoint: str, default_port: int = 4317) -> tuple[str, bool, str, int]: + """Normalize endpoint to gRPC target and security flag. + + Returns: + (grpc_endpoint, insecure, host, port) + """ + try: + if endpoint.startswith(("http://", "https://")): + parsed = urlparse(endpoint) + host = parsed.hostname or "localhost" + port = parsed.port or default_port + insecure = parsed.scheme == "http" + return f"{host}:{port}", insecure, host, port + + host = endpoint + port = default_port + if ":" in endpoint: + parts = endpoint.rsplit(":", 1) + host = parts[0] or "localhost" + try: + port = int(parts[1]) + except Exception: + port = default_port + + insecure = ("localhost" in host) or ("127.0.0.1" in host) + return f"{host}:{port}", insecure, host, port + except Exception: + host, port = "localhost", default_port + return f"{host}:{port}", True, host, port diff --git a/api/core/ops/tencent_trace/entities/__init__.py b/api/core/ops/tencent_trace/entities/__init__.py new file mode 100644 index 0000000000..b1602628ed --- /dev/null +++ b/api/core/ops/tencent_trace/entities/__init__.py @@ -0,0 +1 @@ +# Tencent trace entities module diff --git a/api/core/ops/tencent_trace/entities/tencent_semconv.py b/api/core/ops/tencent_trace/entities/tencent_semconv.py new file mode 100644 index 0000000000..5ea6eeacef --- /dev/null +++ b/api/core/ops/tencent_trace/entities/tencent_semconv.py @@ -0,0 +1,73 @@ +from enum import Enum + +# public +GEN_AI_SESSION_ID = "gen_ai.session.id" + +GEN_AI_USER_ID = "gen_ai.user.id" + +GEN_AI_USER_NAME = "gen_ai.user.name" + +GEN_AI_SPAN_KIND = "gen_ai.span.kind" + +GEN_AI_FRAMEWORK = "gen_ai.framework" + +GEN_AI_IS_ENTRY = "gen_ai.is_entry" # mark to count the LLM-related traces + +# Chain +INPUT_VALUE = "gen_ai.entity.input" + +OUTPUT_VALUE = "gen_ai.entity.output" + + +# Retriever +RETRIEVAL_QUERY = "retrieval.query" + +RETRIEVAL_DOCUMENT = "retrieval.document" + + +# GENERATION +GEN_AI_MODEL_NAME = "gen_ai.response.model" + +GEN_AI_PROVIDER = "gen_ai.provider.name" + + +GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens" + +GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens" + +GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens" + +GEN_AI_PROMPT_TEMPLATE_TEMPLATE = "gen_ai.prompt_template.template" + +GEN_AI_PROMPT_TEMPLATE_VARIABLE = "gen_ai.prompt_template.variable" + +GEN_AI_PROMPT = "gen_ai.prompt" + +GEN_AI_COMPLETION = "gen_ai.completion" + +GEN_AI_RESPONSE_FINISH_REASON = "gen_ai.response.finish_reason" + +# Tool +TOOL_NAME = "tool.name" + +TOOL_DESCRIPTION = "tool.description" + +TOOL_PARAMETERS = "tool.parameters" + +# Instrumentation Library +INSTRUMENTATION_NAME = "dify-sdk" +INSTRUMENTATION_VERSION = "0.1.0" +INSTRUMENTATION_LANGUAGE = "python" + + +# Metrics +LLM_OPERATION_DURATION = "gen_ai.client.operation.duration" + + +class GenAISpanKind(Enum): + WORKFLOW = "WORKFLOW" # OpenLLMetry + RETRIEVER = "RETRIEVER" # RAG + GENERATION = "GENERATION" # Langfuse + TOOL = "TOOL" # OpenLLMetry + AGENT = "AGENT" # OpenLLMetry + TASK = "TASK" # OpenLLMetry diff --git a/api/core/ops/tencent_trace/entities/tencent_trace_entity.py b/api/core/ops/tencent_trace/entities/tencent_trace_entity.py new file mode 100644 index 0000000000..428850f109 --- /dev/null +++ b/api/core/ops/tencent_trace/entities/tencent_trace_entity.py @@ -0,0 +1,21 @@ +from collections.abc import Sequence + +from opentelemetry import trace as trace_api +from opentelemetry.sdk.trace import Event +from opentelemetry.trace import Status, StatusCode +from pydantic import BaseModel, Field + + +class SpanData(BaseModel): + model_config = {"arbitrary_types_allowed": True} + + trace_id: int = Field(..., description="The unique identifier for the trace.") + parent_span_id: int | None = Field(None, description="The ID of the parent span, if any.") + span_id: int = Field(..., description="The unique identifier for this span.") + name: str = Field(..., description="The name of the span.") + attributes: dict[str, str] = Field(default_factory=dict, description="Attributes associated with the span.") + events: Sequence[Event] = Field(default_factory=list, description="Events recorded in the span.") + links: Sequence[trace_api.Link] = Field(default_factory=list, description="Links to other spans.") + status: Status = Field(default=Status(StatusCode.UNSET), description="The status of the span.") + start_time: int = Field(..., description="The start time of the span in nanoseconds.") + end_time: int = Field(..., description="The end time of the span in nanoseconds.") diff --git a/api/core/ops/tencent_trace/span_builder.py b/api/core/ops/tencent_trace/span_builder.py new file mode 100644 index 0000000000..5ba592290d --- /dev/null +++ b/api/core/ops/tencent_trace/span_builder.py @@ -0,0 +1,372 @@ +""" +Tencent APM Span Builder - handles all span construction logic +""" + +import json +import logging +from datetime import datetime + +from opentelemetry.trace import Status, StatusCode + +from core.ops.entities.trace_entity import ( + DatasetRetrievalTraceInfo, + MessageTraceInfo, + ToolTraceInfo, + WorkflowTraceInfo, +) +from core.ops.tencent_trace.entities.tencent_semconv import ( + GEN_AI_COMPLETION, + GEN_AI_FRAMEWORK, + GEN_AI_IS_ENTRY, + GEN_AI_MODEL_NAME, + GEN_AI_PROMPT, + GEN_AI_PROVIDER, + GEN_AI_RESPONSE_FINISH_REASON, + GEN_AI_SESSION_ID, + GEN_AI_SPAN_KIND, + GEN_AI_USAGE_INPUT_TOKENS, + GEN_AI_USAGE_OUTPUT_TOKENS, + GEN_AI_USAGE_TOTAL_TOKENS, + GEN_AI_USER_ID, + INPUT_VALUE, + OUTPUT_VALUE, + RETRIEVAL_DOCUMENT, + RETRIEVAL_QUERY, + TOOL_DESCRIPTION, + TOOL_NAME, + TOOL_PARAMETERS, + GenAISpanKind, +) +from core.ops.tencent_trace.entities.tencent_trace_entity import SpanData +from core.ops.tencent_trace.utils import TencentTraceUtils +from core.rag.models.document import Document +from core.workflow.entities.workflow_node_execution import ( + WorkflowNodeExecution, + WorkflowNodeExecutionMetadataKey, + WorkflowNodeExecutionStatus, +) + +logger = logging.getLogger(__name__) + + +class TencentSpanBuilder: + """Builder class for constructing different types of spans""" + + @staticmethod + def _get_time_nanoseconds(time_value: datetime | None) -> int: + """Convert datetime to nanoseconds for span creation.""" + return TencentTraceUtils.convert_datetime_to_nanoseconds(time_value) + + @staticmethod + def build_workflow_spans( + trace_info: WorkflowTraceInfo, trace_id: int, user_id: str, links: list | None = None + ) -> list[SpanData]: + """Build workflow-related spans""" + spans = [] + links = links or [] + + message_span_id = None + workflow_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "workflow") + + if hasattr(trace_info, "metadata") and trace_info.metadata.get("conversation_id"): + message_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "message") + + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + if message_span_id: + message_span = TencentSpanBuilder._build_message_span( + trace_info, trace_id, message_span_id, user_id, status, links + ) + spans.append(message_span) + + workflow_span = TencentSpanBuilder._build_workflow_span( + trace_info, trace_id, workflow_span_id, message_span_id, user_id, status, links + ) + spans.append(workflow_span) + + return spans + + @staticmethod + def _build_message_span( + trace_info: WorkflowTraceInfo, trace_id: int, message_span_id: int, user_id: str, status: Status, links: list + ) -> SpanData: + """Build message span for chatflow""" + return SpanData( + trace_id=trace_id, + parent_span_id=None, + span_id=message_span_id, + name="message", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_IS_ENTRY: "true", + INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query", ""), + OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), + }, + status=status, + links=links, + ) + + @staticmethod + def _build_workflow_span( + trace_info: WorkflowTraceInfo, + trace_id: int, + workflow_span_id: int, + message_span_id: int | None, + user_id: str, + status: Status, + links: list, + ) -> SpanData: + """Build workflow span""" + attributes = { + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), + } + + if message_span_id is None: + attributes[GEN_AI_IS_ENTRY] = "true" + + return SpanData( + trace_id=trace_id, + parent_span_id=message_span_id, + span_id=workflow_span_id, + name="workflow", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes=attributes, + status=status, + links=links, + ) + + @staticmethod + def build_workflow_llm_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build LLM span for workflow nodes.""" + process_data = node_execution.process_data or {} + outputs = node_execution.outputs or {} + usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {}) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name="GENERATION", + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.GENERATION.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_MODEL_NAME: process_data.get("model_name", ""), + GEN_AI_PROVIDER: process_data.get("model_provider", ""), + GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)), + GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)), + GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)), + GEN_AI_PROMPT: json.dumps(process_data.get("prompts", []), ensure_ascii=False), + GEN_AI_COMPLETION: str(outputs.get("text", "")), + GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason", ""), + INPUT_VALUE: json.dumps(process_data.get("prompts", []), ensure_ascii=False), + OUTPUT_VALUE: str(outputs.get("text", "")), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_message_span( + trace_info: MessageTraceInfo, trace_id: int, user_id: str, links: list | None = None + ) -> SpanData: + """Build message span.""" + links = links or [] + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + return SpanData( + trace_id=trace_id, + parent_span_id=None, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message"), + name="message", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_IS_ENTRY: "true", + INPUT_VALUE: str(trace_info.inputs or ""), + OUTPUT_VALUE: str(trace_info.outputs or ""), + }, + status=status, + links=links, + ) + + @staticmethod + def build_tool_span(trace_info: ToolTraceInfo, trace_id: int, parent_span_id: int) -> SpanData: + """Build tool span.""" + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + return SpanData( + trace_id=trace_id, + parent_span_id=parent_span_id, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "tool"), + name=trace_info.tool_name, + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value, + GEN_AI_FRAMEWORK: "dify", + TOOL_NAME: trace_info.tool_name, + TOOL_DESCRIPTION: "", + TOOL_PARAMETERS: json.dumps(trace_info.tool_parameters, ensure_ascii=False), + INPUT_VALUE: json.dumps(trace_info.tool_inputs, ensure_ascii=False), + OUTPUT_VALUE: str(trace_info.tool_outputs), + }, + status=status, + ) + + @staticmethod + def build_retrieval_span(trace_info: DatasetRetrievalTraceInfo, trace_id: int, parent_span_id: int) -> SpanData: + """Build dataset retrieval span.""" + status = Status(StatusCode.OK) + if getattr(trace_info, "error", None): + status = Status(StatusCode.ERROR, trace_info.error) # type: ignore[arg-type] + + documents_data = TencentSpanBuilder._extract_retrieval_documents(trace_info.documents) + + return SpanData( + trace_id=trace_id, + parent_span_id=parent_span_id, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "retrieval"), + name="retrieval", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value, + GEN_AI_FRAMEWORK: "dify", + RETRIEVAL_QUERY: str(trace_info.inputs or ""), + RETRIEVAL_DOCUMENT: json.dumps(documents_data, ensure_ascii=False), + INPUT_VALUE: str(trace_info.inputs or ""), + OUTPUT_VALUE: json.dumps(documents_data, ensure_ascii=False), + }, + status=status, + ) + + @staticmethod + def _get_workflow_node_status(node_execution: WorkflowNodeExecution) -> Status: + """Get workflow node execution status.""" + if node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED: + return Status(StatusCode.OK) + elif node_execution.status in [WorkflowNodeExecutionStatus.FAILED, WorkflowNodeExecutionStatus.EXCEPTION]: + return Status(StatusCode.ERROR, str(node_execution.error)) + return Status(StatusCode.UNSET) + + @staticmethod + def build_workflow_retrieval_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build knowledge retrieval span for workflow nodes.""" + input_value = "" + if node_execution.inputs: + input_value = str(node_execution.inputs.get("query", "")) + output_value = "" + if node_execution.outputs: + output_value = json.dumps(node_execution.outputs.get("result", []), ensure_ascii=False) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value, + GEN_AI_FRAMEWORK: "dify", + RETRIEVAL_QUERY: input_value, + RETRIEVAL_DOCUMENT: output_value, + INPUT_VALUE: input_value, + OUTPUT_VALUE: output_value, + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_workflow_tool_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build tool span for workflow nodes.""" + tool_des = {} + if node_execution.metadata: + tool_des = node_execution.metadata.get(WorkflowNodeExecutionMetadataKey.TOOL_INFO, {}) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value, + GEN_AI_FRAMEWORK: "dify", + TOOL_NAME: node_execution.title, + TOOL_DESCRIPTION: json.dumps(tool_des, ensure_ascii=False), + TOOL_PARAMETERS: json.dumps(node_execution.inputs or {}, ensure_ascii=False), + INPUT_VALUE: json.dumps(node_execution.inputs or {}, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_workflow_task_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build generic task span for workflow nodes.""" + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.TASK.value, + GEN_AI_FRAMEWORK: "dify", + INPUT_VALUE: json.dumps(node_execution.inputs, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def _extract_retrieval_documents(documents: list[Document]): + """Extract documents data for retrieval tracing.""" + documents_data = [] + for document in documents: + document_data = { + "content": document.page_content, + "metadata": { + "dataset_id": document.metadata.get("dataset_id"), + "doc_id": document.metadata.get("doc_id"), + "document_id": document.metadata.get("document_id"), + }, + "score": document.metadata.get("score"), + } + documents_data.append(document_data) + return documents_data diff --git a/api/core/ops/tencent_trace/tencent_trace.py b/api/core/ops/tencent_trace/tencent_trace.py new file mode 100644 index 0000000000..5ef1c61b24 --- /dev/null +++ b/api/core/ops/tencent_trace/tencent_trace.py @@ -0,0 +1,317 @@ +""" +Tencent APM tracing implementation with separated concerns +""" + +import logging + +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker + +from core.ops.base_trace_instance import BaseTraceInstance +from core.ops.entities.config_entity import TencentConfig +from core.ops.entities.trace_entity import ( + BaseTraceInfo, + DatasetRetrievalTraceInfo, + GenerateNameTraceInfo, + MessageTraceInfo, + ModerationTraceInfo, + SuggestedQuestionTraceInfo, + ToolTraceInfo, + WorkflowTraceInfo, +) +from core.ops.tencent_trace.client import TencentTraceClient +from core.ops.tencent_trace.entities.tencent_trace_entity import SpanData +from core.ops.tencent_trace.span_builder import TencentSpanBuilder +from core.ops.tencent_trace.utils import TencentTraceUtils +from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository +from core.workflow.entities.workflow_node_execution import ( + WorkflowNodeExecution, +) +from core.workflow.nodes import NodeType +from extensions.ext_database import db +from models import Account, App, TenantAccountJoin, WorkflowNodeExecutionTriggeredFrom + +logger = logging.getLogger(__name__) + + +class TencentDataTrace(BaseTraceInstance): + """ + Tencent APM trace implementation with single responsibility principle. + Acts as a coordinator that delegates specific tasks to specialized classes. + """ + + def __init__(self, tencent_config: TencentConfig): + super().__init__(tencent_config) + self.trace_client = TencentTraceClient( + service_name=tencent_config.service_name, + endpoint=tencent_config.endpoint, + token=tencent_config.token, + metrics_export_interval_sec=5, + ) + + def trace(self, trace_info: BaseTraceInfo) -> None: + """Main tracing entry point - coordinates different trace types.""" + if isinstance(trace_info, WorkflowTraceInfo): + self.workflow_trace(trace_info) + elif isinstance(trace_info, MessageTraceInfo): + self.message_trace(trace_info) + elif isinstance(trace_info, ModerationTraceInfo): + pass + elif isinstance(trace_info, SuggestedQuestionTraceInfo): + self.suggested_question_trace(trace_info) + elif isinstance(trace_info, DatasetRetrievalTraceInfo): + self.dataset_retrieval_trace(trace_info) + elif isinstance(trace_info, ToolTraceInfo): + self.tool_trace(trace_info) + elif isinstance(trace_info, GenerateNameTraceInfo): + pass + + def api_check(self) -> bool: + return self.trace_client.api_check() + + def get_project_url(self) -> str: + return self.trace_client.get_project_url() + + def workflow_trace(self, trace_info: WorkflowTraceInfo) -> None: + """Handle workflow tracing by coordinating data retrieval and span construction.""" + try: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_info.workflow_run_id) + + links = [] + if trace_info.trace_id: + links.append(TencentTraceUtils.create_link(trace_info.trace_id)) + + user_id = self._get_user_id(trace_info) + + workflow_spans = TencentSpanBuilder.build_workflow_spans(trace_info, trace_id, str(user_id), links) + + for span in workflow_spans: + self.trace_client.add_span(span) + + self._process_workflow_nodes(trace_info, trace_id) + + except Exception: + logger.exception("[Tencent APM] Failed to process workflow trace") + + def message_trace(self, trace_info: MessageTraceInfo) -> None: + """Handle message tracing.""" + try: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_info.message_id) + user_id = self._get_user_id(trace_info) + + links = [] + if trace_info.trace_id: + links.append(TencentTraceUtils.create_link(trace_info.trace_id)) + + message_span = TencentSpanBuilder.build_message_span(trace_info, trace_id, str(user_id), links) + + self.trace_client.add_span(message_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process message trace") + + def tool_trace(self, trace_info: ToolTraceInfo) -> None: + """Handle tool tracing.""" + try: + parent_span_id = None + trace_root_id = None + + if trace_info.message_id: + parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message") + trace_root_id = trace_info.message_id + + if parent_span_id and trace_root_id: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_root_id) + + tool_span = TencentSpanBuilder.build_tool_span(trace_info, trace_id, parent_span_id) + + self.trace_client.add_span(tool_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process tool trace") + + def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo) -> None: + """Handle dataset retrieval tracing.""" + try: + parent_span_id = None + trace_root_id = None + + if trace_info.message_id: + parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message") + trace_root_id = trace_info.message_id + + if parent_span_id and trace_root_id: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_root_id) + + retrieval_span = TencentSpanBuilder.build_retrieval_span(trace_info, trace_id, parent_span_id) + + self.trace_client.add_span(retrieval_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process dataset retrieval trace") + + def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo) -> None: + """Handle suggested question tracing""" + try: + logger.info("[Tencent APM] Processing suggested question trace") + + except Exception: + logger.exception("[Tencent APM] Failed to process suggested question trace") + + def _process_workflow_nodes(self, trace_info: WorkflowTraceInfo, trace_id: int) -> None: + """Process workflow node executions.""" + try: + workflow_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "workflow") + + node_executions = self._get_workflow_node_executions(trace_info) + + for node_execution in node_executions: + try: + node_span = self._build_workflow_node_span(node_execution, trace_id, trace_info, workflow_span_id) + if node_span: + self.trace_client.add_span(node_span) + + if node_execution.node_type == NodeType.LLM: + self._record_llm_metrics(node_execution) + except Exception: + logger.exception("[Tencent APM] Failed to process node execution: %s", node_execution.id) + + except Exception: + logger.exception("[Tencent APM] Failed to process workflow nodes") + + def _build_workflow_node_span( + self, node_execution: WorkflowNodeExecution, trace_id: int, trace_info: WorkflowTraceInfo, workflow_span_id: int + ) -> SpanData | None: + """Build span for different node types""" + try: + if node_execution.node_type == NodeType.LLM: + return TencentSpanBuilder.build_workflow_llm_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + elif node_execution.node_type == NodeType.KNOWLEDGE_RETRIEVAL: + return TencentSpanBuilder.build_workflow_retrieval_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + elif node_execution.node_type == NodeType.TOOL: + return TencentSpanBuilder.build_workflow_tool_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + else: + # Handle all other node types as generic tasks + return TencentSpanBuilder.build_workflow_task_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + except Exception: + logger.debug( + "[Tencent APM] Error building span for node %s: %s", + node_execution.id, + node_execution.node_type, + exc_info=True, + ) + return None + + def _get_workflow_node_executions(self, trace_info: WorkflowTraceInfo) -> list[WorkflowNodeExecution]: + """Retrieve workflow node executions from database.""" + try: + session_maker = sessionmaker(bind=db.engine) + + with Session(db.engine, expire_on_commit=False) as session: + app_id = trace_info.metadata.get("app_id") + if not app_id: + raise ValueError("No app_id found in trace_info metadata") + + app_stmt = select(App).where(App.id == app_id) + app = session.scalar(app_stmt) + if not app: + raise ValueError(f"App with id {app_id} not found") + + if not app.created_by: + raise ValueError(f"App with id {app_id} has no creator") + + account_stmt = select(Account).where(Account.id == app.created_by) + service_account = session.scalar(account_stmt) + if not service_account: + raise ValueError(f"Creator account not found for app {app_id}") + + current_tenant = ( + session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first() + ) + if not current_tenant: + raise ValueError(f"Current tenant not found for account {service_account.id}") + + service_account.set_tenant_id(current_tenant.tenant_id) + + repository = SQLAlchemyWorkflowNodeExecutionRepository( + session_factory=session_maker, + user=service_account, + app_id=trace_info.metadata.get("app_id"), + triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN, + ) + + executions = repository.get_by_workflow_run(workflow_run_id=trace_info.workflow_run_id) + return list(executions) + + except Exception: + logger.exception("[Tencent APM] Failed to get workflow node executions") + return [] + + def _get_user_id(self, trace_info: BaseTraceInfo) -> str: + """Get user ID from trace info.""" + try: + tenant_id = None + user_id = None + + if isinstance(trace_info, (WorkflowTraceInfo, GenerateNameTraceInfo)): + tenant_id = trace_info.tenant_id + + if hasattr(trace_info, "metadata") and trace_info.metadata: + user_id = trace_info.metadata.get("user_id") + + if user_id and tenant_id: + stmt = ( + select(Account.name) + .join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id) + .where(Account.id == user_id, TenantAccountJoin.tenant_id == tenant_id) + ) + + session_maker = sessionmaker(bind=db.engine) + with session_maker() as session: + account_name = session.scalar(stmt) + return account_name or str(user_id) + elif user_id: + return str(user_id) + + return "anonymous" + + except Exception: + logger.exception("[Tencent APM] Failed to get user ID") + return "unknown" + + def _record_llm_metrics(self, node_execution: WorkflowNodeExecution) -> None: + """Record LLM performance metrics""" + try: + if not hasattr(self.trace_client, "record_llm_duration"): + return + + process_data = node_execution.process_data or {} + usage = process_data.get("usage", {}) + latency_s = float(usage.get("latency", 0.0)) + + if latency_s > 0: + attributes = { + "provider": process_data.get("model_provider", ""), + "model": process_data.get("model_name", ""), + "span_kind": "GENERATION", + } + self.trace_client.record_llm_duration(latency_s, attributes) + + except Exception: + logger.debug("[Tencent APM] Failed to record LLM metrics") + + def __del__(self): + """Ensure proper cleanup on garbage collection.""" + try: + if hasattr(self, "trace_client"): + self.trace_client.shutdown() + except Exception: + pass diff --git a/api/core/ops/tencent_trace/utils.py b/api/core/ops/tencent_trace/utils.py new file mode 100644 index 0000000000..96087951ab --- /dev/null +++ b/api/core/ops/tencent_trace/utils.py @@ -0,0 +1,65 @@ +""" +Utility functions for Tencent APM tracing +""" + +import hashlib +import random +import uuid +from datetime import datetime +from typing import cast + +from opentelemetry.trace import Link, SpanContext, TraceFlags + + +class TencentTraceUtils: + """Utility class for common tracing operations.""" + + INVALID_SPAN_ID = 0x0000000000000000 + INVALID_TRACE_ID = 0x00000000000000000000000000000000 + + @staticmethod + def convert_to_trace_id(uuid_v4: str | None) -> int: + try: + uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4() + except Exception as e: + raise ValueError(f"Invalid UUID input: {e}") + return cast(int, uuid_obj.int) + + @staticmethod + def convert_to_span_id(uuid_v4: str | None, span_type: str) -> int: + try: + uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4() + except Exception as e: + raise ValueError(f"Invalid UUID input: {e}") + combined_key = f"{uuid_obj.hex}-{span_type}" + hash_bytes = hashlib.sha256(combined_key.encode("utf-8")).digest() + return int.from_bytes(hash_bytes[:8], byteorder="big", signed=False) + + @staticmethod + def generate_span_id() -> int: + span_id = random.getrandbits(64) + while span_id == TencentTraceUtils.INVALID_SPAN_ID: + span_id = random.getrandbits(64) + return span_id + + @staticmethod + def convert_datetime_to_nanoseconds(start_time: datetime | None) -> int: + if start_time is None: + start_time = datetime.now() + timestamp_in_seconds = start_time.timestamp() + return int(timestamp_in_seconds * 1e9) + + @staticmethod + def create_link(trace_id_str: str) -> Link: + try: + trace_id = int(trace_id_str, 16) if len(trace_id_str) == 32 else cast(int, uuid.UUID(trace_id_str).int) + except (ValueError, TypeError): + trace_id = cast(int, uuid.uuid4().int) + + span_context = SpanContext( + trace_id=trace_id, + span_id=TencentTraceUtils.INVALID_SPAN_ID, + is_remote=False, + trace_flags=TraceFlags(TraceFlags.SAMPLED), + ) + return Link(span_context) diff --git a/api/core/ops/weave_trace/weave_trace.py b/api/core/ops/weave_trace/weave_trace.py index 339694cf07..9b3d7a8192 100644 --- a/api/core/ops/weave_trace/weave_trace.py +++ b/api/core/ops/weave_trace/weave_trace.py @@ -62,7 +62,8 @@ class WeaveDataTrace(BaseTraceInstance): self, ): try: - project_url = f"https://wandb.ai/{self.weave_client._project_id()}" + project_identifier = f"{self.entity}/{self.project_name}" if self.entity else self.project_name + project_url = f"https://wandb.ai/{project_identifier}" return project_url except Exception as e: logger.debug("Weave get run url failed: %s", str(e)) @@ -103,7 +104,7 @@ class WeaveDataTrace(BaseTraceInstance): message_run = WeaveTraceModel( id=trace_info.message_id, - op=str(TraceTaskName.MESSAGE_TRACE.value), + op=str(TraceTaskName.MESSAGE_TRACE), inputs=dict(trace_info.workflow_run_inputs), outputs=dict(trace_info.workflow_run_outputs), total_tokens=trace_info.total_tokens, @@ -125,7 +126,7 @@ class WeaveDataTrace(BaseTraceInstance): file_list=trace_info.file_list, total_tokens=trace_info.total_tokens, id=trace_info.workflow_run_id, - op=str(TraceTaskName.WORKFLOW_TRACE.value), + op=str(TraceTaskName.WORKFLOW_TRACE), inputs=dict(trace_info.workflow_run_inputs), outputs=dict(trace_info.workflow_run_outputs), attributes=workflow_attributes, @@ -252,7 +253,7 @@ class WeaveDataTrace(BaseTraceInstance): message_run = WeaveTraceModel( id=trace_id, - op=str(TraceTaskName.MESSAGE_TRACE.value), + op=str(TraceTaskName.MESSAGE_TRACE), input_tokens=trace_info.message_tokens, output_tokens=trace_info.answer_tokens, total_tokens=trace_info.total_tokens, @@ -299,7 +300,7 @@ class WeaveDataTrace(BaseTraceInstance): moderation_run = WeaveTraceModel( id=str(uuid.uuid4()), - op=str(TraceTaskName.MODERATION_TRACE.value), + op=str(TraceTaskName.MODERATION_TRACE), inputs=trace_info.inputs, outputs={ "action": trace_info.action, @@ -329,7 +330,7 @@ class WeaveDataTrace(BaseTraceInstance): suggested_question_run = WeaveTraceModel( id=str(uuid.uuid4()), - op=str(TraceTaskName.SUGGESTED_QUESTION_TRACE.value), + op=str(TraceTaskName.SUGGESTED_QUESTION_TRACE), inputs=trace_info.inputs, outputs=trace_info.suggested_question, attributes=attributes, @@ -354,7 +355,7 @@ class WeaveDataTrace(BaseTraceInstance): dataset_retrieval_run = WeaveTraceModel( id=str(uuid.uuid4()), - op=str(TraceTaskName.DATASET_RETRIEVAL_TRACE.value), + op=str(TraceTaskName.DATASET_RETRIEVAL_TRACE), inputs=trace_info.inputs, outputs={"documents": trace_info.documents}, attributes=attributes, @@ -396,7 +397,7 @@ class WeaveDataTrace(BaseTraceInstance): name_run = WeaveTraceModel( id=str(uuid.uuid4()), - op=str(TraceTaskName.GENERATE_NAME_TRACE.value), + op=str(TraceTaskName.GENERATE_NAME_TRACE), inputs=trace_info.inputs, outputs=trace_info.outputs, attributes=attributes, @@ -424,7 +425,23 @@ class WeaveDataTrace(BaseTraceInstance): raise ValueError(f"Weave API check failed: {str(e)}") def start_call(self, run_data: WeaveTraceModel, parent_run_id: str | None = None): - call = self.weave_client.create_call(op=run_data.op, inputs=run_data.inputs, attributes=run_data.attributes) + inputs = run_data.inputs + if inputs is None: + inputs = {} + elif not isinstance(inputs, dict): + inputs = {"inputs": str(inputs)} + + attributes = run_data.attributes + if attributes is None: + attributes = {} + elif not isinstance(attributes, dict): + attributes = {"attributes": str(attributes)} + + call = self.weave_client.create_call( + op=run_data.op, + inputs=inputs, + attributes=attributes, + ) self.calls[run_data.id] = call if parent_run_id: self.calls[run_data.id].parent_id = parent_run_id @@ -432,6 +449,7 @@ class WeaveDataTrace(BaseTraceInstance): def finish_call(self, run_data: WeaveTraceModel): call = self.calls.get(run_data.id) if call: - self.weave_client.finish_call(call=call, output=run_data.outputs, exception=run_data.exception) + exception = Exception(run_data.exception) if run_data.exception else None + self.weave_client.finish_call(call=call, output=run_data.outputs, exception=exception) else: raise ValueError(f"Call with id {run_data.id} not found") diff --git a/api/core/plugin/backwards_invocation/app.py b/api/core/plugin/backwards_invocation/app.py index 8b08b09eb9..32ac132e1e 100644 --- a/api/core/plugin/backwards_invocation/app.py +++ b/api/core/plugin/backwards_invocation/app.py @@ -14,7 +14,7 @@ from core.app.apps.workflow.app_generator import WorkflowAppGenerator from core.app.entities.app_invoke_entities import InvokeFrom from core.plugin.backwards_invocation.base import BaseBackwardsInvocation from extensions.ext_database import db -from models.account import Account +from models import Account from models.model import App, AppMode, EndUser diff --git a/api/core/plugin/backwards_invocation/node.py b/api/core/plugin/backwards_invocation/node.py index 1d6d21cff7..9fbcbf55b4 100644 --- a/api/core/plugin/backwards_invocation/node.py +++ b/api/core/plugin/backwards_invocation/node.py @@ -52,7 +52,7 @@ class PluginNodeBackwardsInvocation(BaseBackwardsInvocation): instruction=instruction, # instruct with variables are not supported ) node_data_dict = node_data.model_dump() - node_data_dict["type"] = NodeType.PARAMETER_EXTRACTOR.value + node_data_dict["type"] = NodeType.PARAMETER_EXTRACTOR execution = workflow_service.run_free_workflow_node( node_data_dict, tenant_id=tenant_id, diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 10f37f75f8..d5df85730b 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -83,16 +83,16 @@ class RequestInvokeLLM(BaseRequestInvokeModel): raise ValueError("prompt_messages must be a list") for i in range(len(v)): - if v[i]["role"] == PromptMessageRole.USER.value: - v[i] = UserPromptMessage(**v[i]) - elif v[i]["role"] == PromptMessageRole.ASSISTANT.value: - v[i] = AssistantPromptMessage(**v[i]) - elif v[i]["role"] == PromptMessageRole.SYSTEM.value: - v[i] = SystemPromptMessage(**v[i]) - elif v[i]["role"] == PromptMessageRole.TOOL.value: - v[i] = ToolPromptMessage(**v[i]) + if v[i]["role"] == PromptMessageRole.USER: + v[i] = UserPromptMessage.model_validate(v[i]) + elif v[i]["role"] == PromptMessageRole.ASSISTANT: + v[i] = AssistantPromptMessage.model_validate(v[i]) + elif v[i]["role"] == PromptMessageRole.SYSTEM: + v[i] = SystemPromptMessage.model_validate(v[i]) + elif v[i]["role"] == PromptMessageRole.TOOL: + v[i] = ToolPromptMessage.model_validate(v[i]) else: - v[i] = PromptMessage(**v[i]) + v[i] = PromptMessage.model_validate(v[i]) return v diff --git a/api/core/plugin/impl/base.py b/api/core/plugin/impl/base.py index 8e3df4da2c..5095b46432 100644 --- a/api/core/plugin/impl/base.py +++ b/api/core/plugin/impl/base.py @@ -2,11 +2,10 @@ import inspect import json import logging from collections.abc import Callable, Generator -from typing import TypeVar +from typing import Any, TypeVar, cast -import requests +import httpx from pydantic import BaseModel -from requests.exceptions import HTTPError from yarl import URL from configs import dify_config @@ -32,6 +31,17 @@ from core.plugin.impl.exc import ( ) plugin_daemon_inner_api_baseurl = URL(str(dify_config.PLUGIN_DAEMON_URL)) +_plugin_daemon_timeout_config = cast( + float | httpx.Timeout | None, + getattr(dify_config, "PLUGIN_DAEMON_TIMEOUT", 300.0), +) +plugin_daemon_request_timeout: httpx.Timeout | None +if _plugin_daemon_timeout_config is None: + plugin_daemon_request_timeout = None +elif isinstance(_plugin_daemon_timeout_config, httpx.Timeout): + plugin_daemon_request_timeout = _plugin_daemon_timeout_config +else: + plugin_daemon_request_timeout = httpx.Timeout(_plugin_daemon_timeout_config) T = TypeVar("T", bound=(BaseModel | dict | list | bool | str)) @@ -47,29 +57,57 @@ class BasePluginClient: data: bytes | dict | str | None = None, params: dict | None = None, files: dict | None = None, - stream: bool = False, - ) -> requests.Response: + ) -> httpx.Response: """ Make a request to the plugin daemon inner API. """ - url = plugin_daemon_inner_api_baseurl / path - headers = headers or {} - headers["X-Api-Key"] = dify_config.PLUGIN_DAEMON_KEY - headers["Accept-Encoding"] = "gzip, deflate, br" + url, headers, prepared_data, params, files = self._prepare_request(path, headers, data, params, files) - if headers.get("Content-Type") == "application/json" and isinstance(data, dict): - data = json.dumps(data) + request_kwargs: dict[str, Any] = { + "method": method, + "url": url, + "headers": headers, + "params": params, + "files": files, + "timeout": plugin_daemon_request_timeout, + } + if isinstance(prepared_data, dict): + request_kwargs["data"] = prepared_data + elif prepared_data is not None: + request_kwargs["content"] = prepared_data try: - response = requests.request( - method=method, url=str(url), headers=headers, data=data, params=params, stream=stream, files=files - ) - except requests.ConnectionError: + response = httpx.request(**request_kwargs) + except httpx.RequestError: logger.exception("Request to Plugin Daemon Service failed") raise PluginDaemonInnerError(code=-500, message="Request to Plugin Daemon Service failed") return response + def _prepare_request( + self, + path: str, + headers: dict | None, + data: bytes | dict | str | None, + params: dict | None, + files: dict | None, + ) -> tuple[str, dict, bytes | dict | str | None, dict | None, dict | None]: + url = plugin_daemon_inner_api_baseurl / path + prepared_headers = dict(headers or {}) + prepared_headers["X-Api-Key"] = dify_config.PLUGIN_DAEMON_KEY + prepared_headers.setdefault("Accept-Encoding", "gzip, deflate, br") + + prepared_data: bytes | dict | str | None = ( + data if isinstance(data, (bytes, str, dict)) or data is None else None + ) + if isinstance(data, dict): + if prepared_headers.get("Content-Type") == "application/json": + prepared_data = json.dumps(data) + else: + prepared_data = data + + return str(url), prepared_headers, prepared_data, params, files + def _stream_request( self, method: str, @@ -78,23 +116,45 @@ class BasePluginClient: headers: dict | None = None, data: bytes | dict | None = None, files: dict | None = None, - ) -> Generator[bytes, None, None]: + ) -> Generator[str, None, None]: """ Make a stream request to the plugin daemon inner API """ - response = self._request(method, path, headers, data, params, files, stream=True) - for line in response.iter_lines(chunk_size=1024 * 8): - line = line.decode("utf-8").strip() - if line.startswith("data:"): - line = line[5:].strip() - if line: - yield line + url, headers, prepared_data, params, files = self._prepare_request(path, headers, data, params, files) + + stream_kwargs: dict[str, Any] = { + "method": method, + "url": url, + "headers": headers, + "params": params, + "files": files, + "timeout": plugin_daemon_request_timeout, + } + if isinstance(prepared_data, dict): + stream_kwargs["data"] = prepared_data + elif prepared_data is not None: + stream_kwargs["content"] = prepared_data + + try: + with httpx.stream(**stream_kwargs) as response: + for raw_line in response.iter_lines(): + if raw_line is None: + continue + line = raw_line.decode("utf-8") if isinstance(raw_line, bytes) else raw_line + line = line.strip() + if line.startswith("data:"): + line = line[5:].strip() + if line: + yield line + except httpx.RequestError: + logger.exception("Stream request to Plugin Daemon Service failed") + raise PluginDaemonInnerError(code=-500, message="Request to Plugin Daemon Service failed") def _stream_request_with_model( self, method: str, path: str, - type: type[T], + type_: type[T], headers: dict | None = None, data: bytes | dict | None = None, params: dict | None = None, @@ -104,13 +164,13 @@ class BasePluginClient: Make a stream request to the plugin daemon inner API and yield the response as a model. """ for line in self._stream_request(method, path, params, headers, data, files): - yield type(**json.loads(line)) # type: ignore + yield type_(**json.loads(line)) # type: ignore def _request_with_model( self, method: str, path: str, - type: type[T], + type_: type[T], headers: dict | None = None, data: bytes | None = None, params: dict | None = None, @@ -120,13 +180,13 @@ class BasePluginClient: Make a request to the plugin daemon inner API and return the response as a model. """ response = self._request(method, path, headers, data, params, files) - return type(**response.json()) # type: ignore + return type_(**response.json()) # type: ignore def _request_with_plugin_daemon_response( self, method: str, path: str, - type: type[T], + type_: type[T], headers: dict | None = None, data: bytes | dict | None = None, params: dict | None = None, @@ -139,23 +199,23 @@ class BasePluginClient: try: response = self._request(method, path, headers, data, params, files) response.raise_for_status() - except HTTPError as e: - msg = f"Failed to request plugin daemon, status: {e.response.status_code}, url: {path}" - logger.exception(msg) + except httpx.HTTPStatusError as e: + logger.exception("Failed to request plugin daemon, status: %s, url: %s", e.response.status_code, path) raise e except Exception as e: msg = f"Failed to request plugin daemon, url: {path}" - logger.exception(msg) + logger.exception("Failed to request plugin daemon, url: %s", path) raise ValueError(msg) from e try: json_response = response.json() if transformer: json_response = transformer(json_response) - rep = PluginDaemonBasicResponse[type](**json_response) # type: ignore + # https://stackoverflow.com/questions/59634937/variable-foo-class-is-not-valid-as-type-but-why + rep = PluginDaemonBasicResponse[type_].model_validate(json_response) # type: ignore except Exception: msg = ( - f"Failed to parse response from plugin daemon to PluginDaemonBasicResponse [{str(type.__name__)}]," + f"Failed to parse response from plugin daemon to PluginDaemonBasicResponse [{str(type_.__name__)}]," f" url: {path}" ) logger.exception(msg) @@ -163,7 +223,7 @@ class BasePluginClient: if rep.code != 0: try: - error = PluginDaemonError(**json.loads(rep.message)) + error = PluginDaemonError.model_validate(json.loads(rep.message)) except Exception: raise ValueError(f"{rep.message}, code: {rep.code}") @@ -178,7 +238,7 @@ class BasePluginClient: self, method: str, path: str, - type: type[T], + type_: type[T], headers: dict | None = None, data: bytes | dict | None = None, params: dict | None = None, @@ -189,7 +249,7 @@ class BasePluginClient: """ for line in self._stream_request(method, path, params, headers, data, files): try: - rep = PluginDaemonBasicResponse[type].model_validate_json(line) # type: ignore + rep = PluginDaemonBasicResponse[type_].model_validate_json(line) # type: ignore except (ValueError, TypeError): # TODO modify this when line_data has code and message try: @@ -204,11 +264,11 @@ class BasePluginClient: if rep.code != 0: if rep.code == -500: try: - error = PluginDaemonError(**json.loads(rep.message)) + error = PluginDaemonError.model_validate(json.loads(rep.message)) except Exception: raise PluginDaemonInnerError(code=rep.code, message=rep.message) - logger.error("Error in stream reponse for plugin %s", rep.__dict__) + logger.error("Error in stream response for plugin %s", rep.__dict__) self._handle_plugin_daemon_error(error.error_type, error.message) raise ValueError(f"plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: diff --git a/api/core/plugin/impl/datasource.py b/api/core/plugin/impl/datasource.py index 84087f8104..ce1ef71494 100644 --- a/api/core/plugin/impl/datasource.py +++ b/api/core/plugin/impl/datasource.py @@ -46,7 +46,9 @@ class PluginDatasourceManager(BasePluginClient): params={"page": 1, "page_size": 256}, transformer=transformer, ) - local_file_datasource_provider = PluginDatasourceProviderEntity(**self._get_local_file_datasource_provider()) + local_file_datasource_provider = PluginDatasourceProviderEntity.model_validate( + self._get_local_file_datasource_provider() + ) for provider in response: ToolTransformService.repack_provider(tenant_id=tenant_id, provider=provider) @@ -104,7 +106,7 @@ class PluginDatasourceManager(BasePluginClient): Fetch datasource provider for the given tenant and plugin. """ if provider_id == "langgenius/file/file": - return PluginDatasourceProviderEntity(**self._get_local_file_datasource_provider()) + return PluginDatasourceProviderEntity.model_validate(self._get_local_file_datasource_provider()) tool_provider_id = DatasourceProviderID(provider_id) diff --git a/api/core/plugin/impl/model.py b/api/core/plugin/impl/model.py index 153da142f4..5dfc3c212e 100644 --- a/api/core/plugin/impl/model.py +++ b/api/core/plugin/impl/model.py @@ -162,7 +162,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/llm/invoke", - type=LLMResultChunk, + type_=LLMResultChunk, data=jsonable_encoder( { "user_id": user_id, @@ -208,7 +208,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/llm/num_tokens", - type=PluginLLMNumTokensResponse, + type_=PluginLLMNumTokensResponse, data=jsonable_encoder( { "user_id": user_id, @@ -250,7 +250,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/text_embedding/invoke", - type=TextEmbeddingResult, + type_=TextEmbeddingResult, data=jsonable_encoder( { "user_id": user_id, @@ -291,7 +291,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/text_embedding/num_tokens", - type=PluginTextEmbeddingNumTokensResponse, + type_=PluginTextEmbeddingNumTokensResponse, data=jsonable_encoder( { "user_id": user_id, @@ -334,7 +334,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/rerank/invoke", - type=RerankResult, + type_=RerankResult, data=jsonable_encoder( { "user_id": user_id, @@ -378,7 +378,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/tts/invoke", - type=PluginStringResultResponse, + type_=PluginStringResultResponse, data=jsonable_encoder( { "user_id": user_id, @@ -422,7 +422,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/tts/model/voices", - type=PluginVoicesResponse, + type_=PluginVoicesResponse, data=jsonable_encoder( { "user_id": user_id, @@ -466,7 +466,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/speech2text/invoke", - type=PluginStringResultResponse, + type_=PluginStringResultResponse, data=jsonable_encoder( { "user_id": user_id, @@ -506,7 +506,7 @@ class PluginModelClient(BasePluginClient): response = self._request_with_plugin_daemon_response_stream( method="POST", path=f"plugin/{tenant_id}/dispatch/moderation/invoke", - type=PluginBasicBooleanResponse, + type_=PluginBasicBooleanResponse, data=jsonable_encoder( { "user_id": user_id, diff --git a/api/core/plugin/utils/chunk_merger.py b/api/core/plugin/utils/chunk_merger.py index e30076f9d3..28cb70f96a 100644 --- a/api/core/plugin/utils/chunk_merger.py +++ b/api/core/plugin/utils/chunk_merger.py @@ -1,6 +1,6 @@ from collections.abc import Generator from dataclasses import dataclass, field -from typing import TypeVar, Union, cast +from typing import TypeVar, Union from core.agent.entities import AgentInvokeMessage from core.tools.entities.tool_entities import ToolInvokeMessage @@ -87,7 +87,8 @@ def merge_blob_chunks( ), meta=resp.meta, ) - yield cast(MessageType, merged_message) + assert isinstance(merged_message, (ToolInvokeMessage, AgentInvokeMessage)) + yield merged_message # type: ignore # Clean up the buffer del files[chunk_id] else: diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 499d39bd5d..6cf6620d8d 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -610,7 +610,7 @@ class ProviderManager: provider_quota_to_provider_record_dict = {} for provider_record in provider_records: - if provider_record.provider_type != ProviderType.SYSTEM.value: + if provider_record.provider_type != ProviderType.SYSTEM: continue provider_quota_to_provider_record_dict[ProviderQuotaType.value_of(provider_record.quota_type)] = ( @@ -627,8 +627,8 @@ class ProviderManager: tenant_id=tenant_id, # TODO: Use provider name with prefix after the data migration. provider_name=ModelProviderID(provider_name).provider_name, - provider_type=ProviderType.SYSTEM.value, - quota_type=ProviderQuotaType.TRIAL.value, + provider_type=ProviderType.SYSTEM, + quota_type=ProviderQuotaType.TRIAL, quota_limit=quota.quota_limit, # type: ignore quota_used=0, is_valid=True, @@ -641,8 +641,8 @@ class ProviderManager: stmt = select(Provider).where( Provider.tenant_id == tenant_id, Provider.provider_name == ModelProviderID(provider_name).provider_name, - Provider.provider_type == ProviderType.SYSTEM.value, - Provider.quota_type == ProviderQuotaType.TRIAL.value, + Provider.provider_type == ProviderType.SYSTEM, + Provider.quota_type == ProviderQuotaType.TRIAL, ) existed_provider_record = db.session.scalar(stmt) if not existed_provider_record: @@ -702,7 +702,7 @@ class ProviderManager: """Get custom provider configuration.""" # Find custom provider record (non-system) custom_provider_record = next( - (record for record in provider_records if record.provider_type != ProviderType.SYSTEM.value), None + (record for record in provider_records if record.provider_type != ProviderType.SYSTEM), None ) if not custom_provider_record: @@ -905,7 +905,7 @@ class ProviderManager: # Convert provider_records to dict quota_type_to_provider_records_dict: dict[ProviderQuotaType, Provider] = {} for provider_record in provider_records: - if provider_record.provider_type != ProviderType.SYSTEM.value: + if provider_record.provider_type != ProviderType.SYSTEM: continue quota_type_to_provider_records_dict[ProviderQuotaType.value_of(provider_record.quota_type)] = ( @@ -1046,7 +1046,7 @@ class ProviderManager: """ secret_input_form_variables = [] for credential_form_schema in credential_form_schemas: - if credential_form_schema.type.value == FormType.SECRET_INPUT.value: + if credential_form_schema.type == FormType.SECRET_INPUT: secret_input_form_variables.append(credential_form_schema.variable) return secret_input_form_variables diff --git a/api/core/rag/data_post_processor/data_post_processor.py b/api/core/rag/data_post_processor/data_post_processor.py index 696e3e967f..cc946a72c3 100644 --- a/api/core/rag/data_post_processor/data_post_processor.py +++ b/api/core/rag/data_post_processor/data_post_processor.py @@ -46,7 +46,7 @@ class DataPostProcessor: reranking_model: dict | None = None, weights: dict | None = None, ) -> BaseRerankRunner | None: - if reranking_mode == RerankMode.WEIGHTED_SCORE.value and weights: + if reranking_mode == RerankMode.WEIGHTED_SCORE and weights: runner = RerankRunnerFactory.create_rerank_runner( runner_type=reranking_mode, tenant_id=tenant_id, @@ -62,7 +62,7 @@ class DataPostProcessor: ), ) return runner - elif reranking_mode == RerankMode.RERANKING_MODEL.value: + elif reranking_mode == RerankMode.RERANKING_MODEL: rerank_model_instance = self._get_rerank_model_instance(tenant_id, reranking_model) if rerank_model_instance is None: return None diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 429744c0de..2290de19bc 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -21,7 +21,7 @@ from models.dataset import Document as DatasetDocument from services.external_knowledge_service import ExternalDatasetService default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -34,7 +34,7 @@ class RetrievalService: @classmethod def retrieve( cls, - retrieval_method: str, + retrieval_method: RetrievalMethod, dataset_id: str, query: str, top_k: int, @@ -56,7 +56,7 @@ class RetrievalService: # Optimize multithreading with thread pools with ThreadPoolExecutor(max_workers=dify_config.RETRIEVAL_SERVICE_EXECUTORS) as executor: # type: ignore futures = [] - if retrieval_method == "keyword_search": + if retrieval_method == RetrievalMethod.KEYWORD_SEARCH: futures.append( executor.submit( cls.keyword_search, @@ -106,7 +106,9 @@ class RetrievalService: if exceptions: raise ValueError(";\n".join(exceptions)) - if retrieval_method == RetrievalMethod.HYBRID_SEARCH.value: + # Deduplicate documents for hybrid search to avoid duplicate chunks + if retrieval_method == RetrievalMethod.HYBRID_SEARCH: + all_documents = cls._deduplicate_documents(all_documents) data_post_processor = DataPostProcessor( str(dataset.tenant_id), reranking_mode, reranking_model, weights, False ) @@ -132,7 +134,7 @@ class RetrievalService: if not dataset: return [] metadata_condition = ( - MetadataCondition(**metadata_filtering_conditions) if metadata_filtering_conditions else None + MetadataCondition.model_validate(metadata_filtering_conditions) if metadata_filtering_conditions else None ) all_documents = ExternalDatasetService.fetch_external_knowledge_retrieval( dataset.tenant_id, @@ -143,6 +145,40 @@ class RetrievalService: ) return all_documents + @classmethod + def _deduplicate_documents(cls, documents: list[Document]) -> list[Document]: + """Deduplicate documents based on doc_id to avoid duplicate chunks in hybrid search.""" + if not documents: + return documents + + unique_documents = [] + seen_doc_ids = set() + + for document in documents: + # For dify provider documents, use doc_id for deduplication + if document.provider == "dify" and document.metadata is not None and "doc_id" in document.metadata: + doc_id = document.metadata["doc_id"] + if doc_id not in seen_doc_ids: + seen_doc_ids.add(doc_id) + unique_documents.append(document) + # If duplicate, keep the one with higher score + elif "score" in document.metadata: + # Find existing document with same doc_id and compare scores + for i, existing_doc in enumerate(unique_documents): + if ( + existing_doc.metadata + and existing_doc.metadata.get("doc_id") == doc_id + and existing_doc.metadata.get("score", 0) < document.metadata.get("score", 0) + ): + unique_documents[i] = document + break + else: + # For non-dify documents, use content-based deduplication + if document not in unique_documents: + unique_documents.append(document) + + return unique_documents + @classmethod def _get_dataset(cls, dataset_id: str) -> Dataset | None: with Session(db.engine) as session: @@ -184,7 +220,7 @@ class RetrievalService: score_threshold: float | None, reranking_model: dict | None, all_documents: list, - retrieval_method: str, + retrieval_method: RetrievalMethod, exceptions: list, document_ids_filter: list[str] | None = None, ): @@ -209,10 +245,10 @@ class RetrievalService: reranking_model and reranking_model.get("reranking_model_name") and reranking_model.get("reranking_provider_name") - and retrieval_method == RetrievalMethod.SEMANTIC_SEARCH.value + and retrieval_method == RetrievalMethod.SEMANTIC_SEARCH ): data_post_processor = DataPostProcessor( - str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL.value), reranking_model, None, False + str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL), reranking_model, None, False ) all_documents.extend( data_post_processor.invoke( @@ -257,10 +293,10 @@ class RetrievalService: reranking_model and reranking_model.get("reranking_model_name") and reranking_model.get("reranking_provider_name") - and retrieval_method == RetrievalMethod.FULL_TEXT_SEARCH.value + and retrieval_method == RetrievalMethod.FULL_TEXT_SEARCH ): data_post_processor = DataPostProcessor( - str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL.value), reranking_model, None, False + str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL), reranking_model, None, False ) all_documents.extend( data_post_processor.invoke( diff --git a/web/app/components/app/configuration/base/icons/citation.tsx b/api/core/rag/datasource/vdb/alibabacloud_mysql/__init__.py similarity index 100% rename from web/app/components/app/configuration/base/icons/citation.tsx rename to api/core/rag/datasource/vdb/alibabacloud_mysql/__init__.py diff --git a/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py b/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py new file mode 100644 index 0000000000..fdb5ffebfc --- /dev/null +++ b/api/core/rag/datasource/vdb/alibabacloud_mysql/alibabacloud_mysql_vector.py @@ -0,0 +1,388 @@ +import hashlib +import json +import logging +import uuid +from contextlib import contextmanager +from typing import Any, Literal, cast + +import mysql.connector +from mysql.connector import Error as MySQLError +from pydantic import BaseModel, model_validator + +from configs import dify_config +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.embedding.embedding_base import Embeddings +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + +logger = logging.getLogger(__name__) + + +class AlibabaCloudMySQLVectorConfig(BaseModel): + host: str + port: int + user: str + password: str + database: str + max_connection: int + charset: str = "utf8mb4" + distance_function: Literal["cosine", "euclidean"] = "cosine" + hnsw_m: int = 6 + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict): + if not values.get("host"): + raise ValueError("config ALIBABACLOUD_MYSQL_HOST is required") + if not values.get("port"): + raise ValueError("config ALIBABACLOUD_MYSQL_PORT is required") + if not values.get("user"): + raise ValueError("config ALIBABACLOUD_MYSQL_USER is required") + if values.get("password") is None: + raise ValueError("config ALIBABACLOUD_MYSQL_PASSWORD is required") + if not values.get("database"): + raise ValueError("config ALIBABACLOUD_MYSQL_DATABASE is required") + if not values.get("max_connection"): + raise ValueError("config ALIBABACLOUD_MYSQL_MAX_CONNECTION is required") + return values + + +SQL_CREATE_TABLE = """ +CREATE TABLE IF NOT EXISTS {table_name} ( + id VARCHAR(36) PRIMARY KEY, + text LONGTEXT NOT NULL, + meta JSON NOT NULL, + embedding VECTOR({dimension}) NOT NULL, + VECTOR INDEX (embedding) M={hnsw_m} DISTANCE={distance_function} +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; +""" + +SQL_CREATE_META_INDEX = """ +CREATE INDEX idx_{index_hash}_meta ON {table_name} + ((CAST(JSON_UNQUOTE(JSON_EXTRACT(meta, '$.document_id')) AS CHAR(36)))); +""" + +SQL_CREATE_FULLTEXT_INDEX = """ +CREATE FULLTEXT INDEX idx_{index_hash}_text ON {table_name} (text) WITH PARSER ngram; +""" + + +class AlibabaCloudMySQLVector(BaseVector): + def __init__(self, collection_name: str, config: AlibabaCloudMySQLVectorConfig): + super().__init__(collection_name) + self.pool = self._create_connection_pool(config) + self.table_name = collection_name.lower() + self.index_hash = hashlib.md5(self.table_name.encode()).hexdigest()[:8] + self.distance_function = config.distance_function.lower() + self.hnsw_m = config.hnsw_m + self._check_vector_support() + + def get_type(self) -> str: + return VectorType.ALIBABACLOUD_MYSQL + + def _create_connection_pool(self, config: AlibabaCloudMySQLVectorConfig): + # Create connection pool using mysql-connector-python pooling + pool_config: dict[str, Any] = { + "host": config.host, + "port": config.port, + "user": config.user, + "password": config.password, + "database": config.database, + "charset": config.charset, + "autocommit": True, + "pool_name": f"pool_{self.collection_name}", + "pool_size": config.max_connection, + "pool_reset_session": True, + } + return mysql.connector.pooling.MySQLConnectionPool(**pool_config) + + def _check_vector_support(self): + """Check if the MySQL server supports vector operations.""" + try: + with self._get_cursor() as cur: + # Check MySQL version and vector support + cur.execute("SELECT VERSION()") + version = cur.fetchone()["VERSION()"] + logger.debug("Connected to MySQL version: %s", version) + # Try to execute a simple vector function to verify support + cur.execute("SELECT VEC_FromText('[1,2,3]') IS NOT NULL as vector_support") + result = cur.fetchone() + if not result or not result.get("vector_support"): + raise ValueError( + "RDS MySQL Vector functions are not available." + " Please ensure you're using RDS MySQL 8.0.36+ with Vector support." + ) + + except MySQLError as e: + if "FUNCTION" in str(e) and "VEC_FromText" in str(e): + raise ValueError( + "RDS MySQL Vector functions are not available." + " Please ensure you're using RDS MySQL 8.0.36+ with Vector support." + ) from e + raise e + + @contextmanager + def _get_cursor(self): + conn = self.pool.get_connection() + cur = conn.cursor(dictionary=True) + try: + yield cur + finally: + cur.close() + conn.close() + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + dimension = len(embeddings[0]) + self._create_collection(dimension) + return self.add_texts(texts, embeddings) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + values = [] + pks = [] + for i, doc in enumerate(documents): + if doc.metadata is not None: + doc_id = doc.metadata.get("doc_id", str(uuid.uuid4())) + pks.append(doc_id) + # Convert embedding list to Aliyun MySQL vector format + vector_str = "[" + ",".join(map(str, embeddings[i])) + "]" + values.append( + ( + doc_id, + doc.page_content, + json.dumps(doc.metadata), + vector_str, + ) + ) + + with self._get_cursor() as cur: + insert_sql = ( + f"INSERT INTO {self.table_name} (id, text, meta, embedding) VALUES (%s, %s, %s, VEC_FromText(%s))" + ) + cur.executemany(insert_sql, values) + return pks + + def text_exists(self, id: str) -> bool: + with self._get_cursor() as cur: + cur.execute(f"SELECT id FROM {self.table_name} WHERE id = %s", (id,)) + return cur.fetchone() is not None + + def get_by_ids(self, ids: list[str]) -> list[Document]: + if not ids: + return [] + + with self._get_cursor() as cur: + placeholders = ",".join(["%s"] * len(ids)) + cur.execute(f"SELECT meta, text FROM {self.table_name} WHERE id IN ({placeholders})", ids) + docs = [] + for record in cur: + metadata = record["meta"] + if isinstance(metadata, str): + metadata = json.loads(metadata) + docs.append(Document(page_content=record["text"], metadata=metadata)) + return docs + + def delete_by_ids(self, ids: list[str]): + # Avoiding crashes caused by performing delete operations on empty lists + if not ids: + return + + with self._get_cursor() as cur: + try: + placeholders = ",".join(["%s"] * len(ids)) + cur.execute(f"DELETE FROM {self.table_name} WHERE id IN ({placeholders})", ids) + except MySQLError as e: + if e.errno == 1146: # Table doesn't exist + logger.warning("Table %s not found, skipping delete operation.", self.table_name) + return + else: + raise e + + def delete_by_metadata_field(self, key: str, value: str): + with self._get_cursor() as cur: + cur.execute( + f"DELETE FROM {self.table_name} WHERE JSON_UNQUOTE(JSON_EXTRACT(meta, %s)) = %s", (f"$.{key}", value) + ) + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + """ + Search the nearest neighbors to a vector using RDS MySQL vector distance functions. + + :param query_vector: The input vector to search for similar items. + :return: List of Documents that are nearest to the query vector. + """ + top_k = kwargs.get("top_k", 4) + if not isinstance(top_k, int) or top_k <= 0: + raise ValueError("top_k must be a positive integer") + + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + params = [] + + if document_ids_filter: + placeholders = ",".join(["%s"] * len(document_ids_filter)) + where_clause = f" WHERE JSON_UNQUOTE(JSON_EXTRACT(meta, '$.document_id')) IN ({placeholders}) " + params.extend(document_ids_filter) + + # Convert query vector to RDS MySQL vector format + query_vector_str = "[" + ",".join(map(str, query_vector)) + "]" + + # Use RSD MySQL's native vector distance functions + with self._get_cursor() as cur: + # Choose distance function based on configuration + distance_func = "VEC_DISTANCE_COSINE" if self.distance_function == "cosine" else "VEC_DISTANCE_EUCLIDEAN" + + # Note: RDS MySQL optimizer will use vector index when ORDER BY + LIMIT are present + # Use column alias in ORDER BY to avoid calculating distance twice + sql = f""" + SELECT meta, text, + {distance_func}(embedding, VEC_FromText(%s)) AS distance + FROM {self.table_name} + {where_clause} + ORDER BY distance + LIMIT %s + """ + query_params = [query_vector_str] + params + [top_k] + + cur.execute(sql, query_params) + + docs = [] + score_threshold = float(kwargs.get("score_threshold") or 0.0) + + for record in cur: + try: + distance = float(record["distance"]) + # Convert distance to similarity score + if self.distance_function == "cosine": + # For cosine distance: similarity = 1 - distance + similarity = 1.0 - distance + else: + # For euclidean distance: use inverse relationship + # similarity = 1 / (1 + distance) + similarity = 1.0 / (1.0 + distance) + + metadata = record["meta"] + if isinstance(metadata, str): + metadata = json.loads(metadata) + metadata["score"] = similarity + metadata["distance"] = distance + + if similarity >= score_threshold: + docs.append(Document(page_content=record["text"], metadata=metadata)) + except (ValueError, json.JSONDecodeError) as e: + logger.warning("Error processing search result: %s", e) + continue + + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + top_k = kwargs.get("top_k", 5) + if not isinstance(top_k, int) or top_k <= 0: + raise ValueError("top_k must be a positive integer") + + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + params = [] + + if document_ids_filter: + placeholders = ",".join(["%s"] * len(document_ids_filter)) + where_clause = f" AND JSON_UNQUOTE(JSON_EXTRACT(meta, '$.document_id')) IN ({placeholders}) " + params.extend(document_ids_filter) + + with self._get_cursor() as cur: + # Build query parameters: query (twice for MATCH clauses), document_ids_filter (if any), top_k + query_params = [query, query] + params + [top_k] + cur.execute( + f"""SELECT meta, text, + MATCH(text) AGAINST(%s IN NATURAL LANGUAGE MODE) AS score + FROM {self.table_name} + WHERE MATCH(text) AGAINST(%s IN NATURAL LANGUAGE MODE) + {where_clause} + ORDER BY score DESC + LIMIT %s""", + query_params, + ) + docs = [] + for record in cur: + metadata = record["meta"] + if isinstance(metadata, str): + metadata = json.loads(metadata) + metadata["score"] = float(record["score"]) + docs.append(Document(page_content=record["text"], metadata=metadata)) + return docs + + def delete(self): + with self._get_cursor() as cur: + cur.execute(f"DROP TABLE IF EXISTS {self.table_name}") + + def _create_collection(self, dimension: int): + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" + lock_name = f"{collection_exist_cache_key}_lock" + with redis_client.lock(lock_name, timeout=20): + if redis_client.get(collection_exist_cache_key): + return + + with self._get_cursor() as cur: + # Create table with vector column and vector index + cur.execute( + SQL_CREATE_TABLE.format( + table_name=self.table_name, + dimension=dimension, + distance_function=self.distance_function, + hnsw_m=self.hnsw_m, + ) + ) + # Create metadata index (check if exists first) + try: + cur.execute(SQL_CREATE_META_INDEX.format(table_name=self.table_name, index_hash=self.index_hash)) + except MySQLError as e: + if e.errno != 1061: # Duplicate key name + logger.warning("Could not create meta index: %s", e) + + # Create full-text index for text search + try: + cur.execute( + SQL_CREATE_FULLTEXT_INDEX.format(table_name=self.table_name, index_hash=self.index_hash) + ) + except MySQLError as e: + if e.errno != 1061: # Duplicate key name + logger.warning("Could not create fulltext index: %s", e) + + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + +class AlibabaCloudMySQLVectorFactory(AbstractVectorFactory): + def _validate_distance_function(self, distance_function: str) -> Literal["cosine", "euclidean"]: + """Validate and return the distance function as a proper Literal type.""" + if distance_function not in ["cosine", "euclidean"]: + raise ValueError(f"Invalid distance function: {distance_function}. Must be 'cosine' or 'euclidean'") + return cast(Literal["cosine", "euclidean"], distance_function) + + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> AlibabaCloudMySQLVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + dataset.index_struct = json.dumps( + self.gen_index_struct_dict(VectorType.ALIBABACLOUD_MYSQL, collection_name) + ) + return AlibabaCloudMySQLVector( + collection_name=collection_name, + config=AlibabaCloudMySQLVectorConfig( + host=dify_config.ALIBABACLOUD_MYSQL_HOST or "localhost", + port=dify_config.ALIBABACLOUD_MYSQL_PORT, + user=dify_config.ALIBABACLOUD_MYSQL_USER or "root", + password=dify_config.ALIBABACLOUD_MYSQL_PASSWORD or "", + database=dify_config.ALIBABACLOUD_MYSQL_DATABASE or "dify", + max_connection=dify_config.ALIBABACLOUD_MYSQL_MAX_CONNECTION, + charset=dify_config.ALIBABACLOUD_MYSQL_CHARSET or "utf8mb4", + distance_function=self._validate_distance_function( + dify_config.ALIBABACLOUD_MYSQL_DISTANCE_FUNCTION or "cosine" + ), + hnsw_m=dify_config.ALIBABACLOUD_MYSQL_HNSW_M or 6, + ), + ) diff --git a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py index e55e5f3101..a306f9ba0c 100644 --- a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py +++ b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py @@ -488,9 +488,9 @@ class ClickzettaVector(BaseVector): create_table_sql = f""" CREATE TABLE IF NOT EXISTS {self._config.schema_name}.{self._table_name} ( id STRING NOT NULL COMMENT 'Unique document identifier', - {Field.CONTENT_KEY.value} STRING NOT NULL COMMENT 'Document text content for search and retrieval', - {Field.METADATA_KEY.value} JSON COMMENT 'Document metadata including source, type, and other attributes', - {Field.VECTOR.value} VECTOR(FLOAT, {dimension}) NOT NULL COMMENT + {Field.CONTENT_KEY} STRING NOT NULL COMMENT 'Document text content for search and retrieval', + {Field.METADATA_KEY} JSON COMMENT 'Document metadata including source, type, and other attributes', + {Field.VECTOR} VECTOR(FLOAT, {dimension}) NOT NULL COMMENT 'High-dimensional embedding vector for semantic similarity search', PRIMARY KEY (id) ) COMMENT 'Dify RAG knowledge base vector storage table for document embeddings and content' @@ -519,15 +519,15 @@ class ClickzettaVector(BaseVector): existing_indexes = cursor.fetchall() for idx in existing_indexes: # Check if vector index already exists on the embedding column - if Field.VECTOR.value in str(idx).lower(): - logger.info("Vector index already exists on column %s", Field.VECTOR.value) + if Field.VECTOR in str(idx).lower(): + logger.info("Vector index already exists on column %s", Field.VECTOR) return except (RuntimeError, ValueError) as e: logger.warning("Failed to check existing indexes: %s", e) index_sql = f""" CREATE VECTOR INDEX IF NOT EXISTS {index_name} - ON TABLE {self._config.schema_name}.{self._table_name}({Field.VECTOR.value}) + ON TABLE {self._config.schema_name}.{self._table_name}({Field.VECTOR}) PROPERTIES ( "distance.function" = "{self._config.vector_distance_function}", "scalar.type" = "f32", @@ -560,17 +560,17 @@ class ClickzettaVector(BaseVector): # More precise check: look for inverted index specifically on the content column if ( "inverted" in idx_str - and Field.CONTENT_KEY.value.lower() in idx_str + and Field.CONTENT_KEY.lower() in idx_str and (index_name.lower() in idx_str or f"idx_{self._table_name}_text" in idx_str) ): - logger.info("Inverted index already exists on column %s: %s", Field.CONTENT_KEY.value, idx) + logger.info("Inverted index already exists on column %s: %s", Field.CONTENT_KEY, idx) return except (RuntimeError, ValueError) as e: logger.warning("Failed to check existing indexes: %s", e) index_sql = f""" CREATE INVERTED INDEX IF NOT EXISTS {index_name} - ON TABLE {self._config.schema_name}.{self._table_name} ({Field.CONTENT_KEY.value}) + ON TABLE {self._config.schema_name}.{self._table_name} ({Field.CONTENT_KEY}) PROPERTIES ( "analyzer" = "{self._config.analyzer_type}", "mode" = "{self._config.analyzer_mode}" @@ -588,13 +588,13 @@ class ClickzettaVector(BaseVector): or "with the same type" in error_msg or "cannot create inverted index" in error_msg ) and "already has index" in error_msg: - logger.info("Inverted index already exists on column %s", Field.CONTENT_KEY.value) + logger.info("Inverted index already exists on column %s", Field.CONTENT_KEY) # Try to get the existing index name for logging try: cursor.execute(f"SHOW INDEX FROM {self._config.schema_name}.{self._table_name}") existing_indexes = cursor.fetchall() for idx in existing_indexes: - if "inverted" in str(idx).lower() and Field.CONTENT_KEY.value.lower() in str(idx).lower(): + if "inverted" in str(idx).lower() and Field.CONTENT_KEY.lower() in str(idx).lower(): logger.info("Found existing inverted index: %s", idx) break except (RuntimeError, ValueError): @@ -669,7 +669,7 @@ class ClickzettaVector(BaseVector): # Use parameterized INSERT with executemany for better performance and security # Cast JSON and VECTOR in SQL, pass raw data as parameters - columns = f"id, {Field.CONTENT_KEY.value}, {Field.METADATA_KEY.value}, {Field.VECTOR.value}" + columns = f"id, {Field.CONTENT_KEY}, {Field.METADATA_KEY}, {Field.VECTOR}" insert_sql = ( f"INSERT INTO {self._config.schema_name}.{self._table_name} ({columns}) " f"VALUES (?, ?, CAST(? AS JSON), CAST(? AS VECTOR({vector_dimension})))" @@ -767,7 +767,7 @@ class ClickzettaVector(BaseVector): # Use json_extract_string function for ClickZetta compatibility sql = ( f"DELETE FROM {self._config.schema_name}.{self._table_name} " - f"WHERE json_extract_string({Field.METADATA_KEY.value}, '$.{key}') = ?" + f"WHERE json_extract_string({Field.METADATA_KEY}, '$.{key}') = ?" ) cursor.execute(sql, binding_params=[value]) @@ -795,9 +795,7 @@ class ClickzettaVector(BaseVector): safe_doc_ids = [str(id).replace("'", "''") for id in document_ids_filter] doc_ids_str = ",".join(f"'{id}'" for id in safe_doc_ids) # Use json_extract_string function for ClickZetta compatibility - filter_clauses.append( - f"json_extract_string({Field.METADATA_KEY.value}, '$.document_id') IN ({doc_ids_str})" - ) + filter_clauses.append(f"json_extract_string({Field.METADATA_KEY}, '$.document_id') IN ({doc_ids_str})") # No need for dataset_id filter since each dataset has its own table @@ -808,23 +806,21 @@ class ClickzettaVector(BaseVector): distance_func = "COSINE_DISTANCE" if score_threshold > 0: query_vector_str = f"CAST('[{self._format_vector_simple(query_vector)}]' AS VECTOR({vector_dimension}))" - filter_clauses.append( - f"{distance_func}({Field.VECTOR.value}, {query_vector_str}) < {2 - score_threshold}" - ) + filter_clauses.append(f"{distance_func}({Field.VECTOR}, {query_vector_str}) < {2 - score_threshold}") else: # For L2 distance, smaller is better distance_func = "L2_DISTANCE" if score_threshold > 0: query_vector_str = f"CAST('[{self._format_vector_simple(query_vector)}]' AS VECTOR({vector_dimension}))" - filter_clauses.append(f"{distance_func}({Field.VECTOR.value}, {query_vector_str}) < {score_threshold}") + filter_clauses.append(f"{distance_func}({Field.VECTOR}, {query_vector_str}) < {score_threshold}") where_clause = " AND ".join(filter_clauses) if filter_clauses else "1=1" # Execute vector search query query_vector_str = f"CAST('[{self._format_vector_simple(query_vector)}]' AS VECTOR({vector_dimension}))" search_sql = f""" - SELECT id, {Field.CONTENT_KEY.value}, {Field.METADATA_KEY.value}, - {distance_func}({Field.VECTOR.value}, {query_vector_str}) AS distance + SELECT id, {Field.CONTENT_KEY}, {Field.METADATA_KEY}, + {distance_func}({Field.VECTOR}, {query_vector_str}) AS distance FROM {self._config.schema_name}.{self._table_name} WHERE {where_clause} ORDER BY distance @@ -887,9 +883,7 @@ class ClickzettaVector(BaseVector): safe_doc_ids = [str(id).replace("'", "''") for id in document_ids_filter] doc_ids_str = ",".join(f"'{id}'" for id in safe_doc_ids) # Use json_extract_string function for ClickZetta compatibility - filter_clauses.append( - f"json_extract_string({Field.METADATA_KEY.value}, '$.document_id') IN ({doc_ids_str})" - ) + filter_clauses.append(f"json_extract_string({Field.METADATA_KEY}, '$.document_id') IN ({doc_ids_str})") # No need for dataset_id filter since each dataset has its own table @@ -897,13 +891,13 @@ class ClickzettaVector(BaseVector): # match_all requires all terms to be present # Use simple quote escaping for MATCH_ALL since it needs to be in the WHERE clause escaped_query = query.replace("'", "''") - filter_clauses.append(f"MATCH_ALL({Field.CONTENT_KEY.value}, '{escaped_query}')") + filter_clauses.append(f"MATCH_ALL({Field.CONTENT_KEY}, '{escaped_query}')") where_clause = " AND ".join(filter_clauses) # Execute full-text search query search_sql = f""" - SELECT id, {Field.CONTENT_KEY.value}, {Field.METADATA_KEY.value} + SELECT id, {Field.CONTENT_KEY}, {Field.METADATA_KEY} FROM {self._config.schema_name}.{self._table_name} WHERE {where_clause} LIMIT {top_k} @@ -986,19 +980,17 @@ class ClickzettaVector(BaseVector): safe_doc_ids = [str(id).replace("'", "''") for id in document_ids_filter] doc_ids_str = ",".join(f"'{id}'" for id in safe_doc_ids) # Use json_extract_string function for ClickZetta compatibility - filter_clauses.append( - f"json_extract_string({Field.METADATA_KEY.value}, '$.document_id') IN ({doc_ids_str})" - ) + filter_clauses.append(f"json_extract_string({Field.METADATA_KEY}, '$.document_id') IN ({doc_ids_str})") # No need for dataset_id filter since each dataset has its own table # Use simple quote escaping for LIKE clause escaped_query = query.replace("'", "''") - filter_clauses.append(f"{Field.CONTENT_KEY.value} LIKE '%{escaped_query}%'") + filter_clauses.append(f"{Field.CONTENT_KEY} LIKE '%{escaped_query}%'") where_clause = " AND ".join(filter_clauses) search_sql = f""" - SELECT id, {Field.CONTENT_KEY.value}, {Field.METADATA_KEY.value} + SELECT id, {Field.CONTENT_KEY}, {Field.METADATA_KEY} FROM {self._config.schema_name}.{self._table_name} WHERE {where_clause} LIMIT {top_k} diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py index 7b00928b7b..1e7fe52666 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py @@ -57,18 +57,18 @@ class ElasticSearchJaVector(ElasticSearchVector): } mappings = { "properties": { - Field.CONTENT_KEY.value: { + Field.CONTENT_KEY: { "type": "text", "analyzer": "ja_analyzer", "search_analyzer": "ja_analyzer", }, - Field.VECTOR.value: { # Make sure the dimension is correct here + Field.VECTOR: { # Make sure the dimension is correct here "type": "dense_vector", "dims": dim, "index": True, "similarity": "cosine", }, - Field.METADATA_KEY.value: { + Field.METADATA_KEY: { "type": "object", "properties": { "doc_id": {"type": "keyword"} # Map doc_id to keyword type diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py index 2c147fa7ca..0ff8c915e6 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py @@ -4,7 +4,7 @@ import math from typing import Any, cast from urllib.parse import urlparse -import requests +from elasticsearch import ConnectionError as ElasticsearchConnectionError from elasticsearch import Elasticsearch from flask import current_app from packaging.version import parse as parse_version @@ -138,7 +138,7 @@ class ElasticSearchVector(BaseVector): if not client.ping(): raise ConnectionError("Failed to connect to Elasticsearch") - except requests.ConnectionError as e: + except ElasticsearchConnectionError as e: raise ConnectionError(f"Vector database connection error: {str(e)}") except Exception as e: raise ConnectionError(f"Elasticsearch client initialization failed: {str(e)}") @@ -163,9 +163,9 @@ class ElasticSearchVector(BaseVector): index=self._collection_name, id=uuids[i], document={ - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i] or None, - Field.METADATA_KEY.value: documents[i].metadata or {}, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i] or None, + Field.METADATA_KEY: documents[i].metadata or {}, }, ) self._client.indices.refresh(index=self._collection_name) @@ -193,7 +193,7 @@ class ElasticSearchVector(BaseVector): def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: top_k = kwargs.get("top_k", 4) num_candidates = math.ceil(top_k * 1.5) - knn = {"field": Field.VECTOR.value, "query_vector": query_vector, "k": top_k, "num_candidates": num_candidates} + knn = {"field": Field.VECTOR, "query_vector": query_vector, "k": top_k, "num_candidates": num_candidates} document_ids_filter = kwargs.get("document_ids_filter") if document_ids_filter: knn["filter"] = {"terms": {"metadata.document_id": document_ids_filter}} @@ -205,9 +205,9 @@ class ElasticSearchVector(BaseVector): docs_and_scores.append( ( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ), hit["_score"], ) @@ -224,13 +224,13 @@ class ElasticSearchVector(BaseVector): return docs def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - query_str: dict[str, Any] = {"match": {Field.CONTENT_KEY.value: query}} + query_str: dict[str, Any] = {"match": {Field.CONTENT_KEY: query}} document_ids_filter = kwargs.get("document_ids_filter") if document_ids_filter: query_str = { "bool": { - "must": {"match": {Field.CONTENT_KEY.value: query}}, + "must": {"match": {Field.CONTENT_KEY: query}}, "filter": {"terms": {"metadata.document_id": document_ids_filter}}, } } @@ -240,9 +240,9 @@ class ElasticSearchVector(BaseVector): for hit in results["hits"]["hits"]: docs.append( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ) ) @@ -270,14 +270,14 @@ class ElasticSearchVector(BaseVector): dim = len(embeddings[0]) mappings = { "properties": { - Field.CONTENT_KEY.value: {"type": "text"}, - Field.VECTOR.value: { # Make sure the dimension is correct here + Field.CONTENT_KEY: {"type": "text"}, + Field.VECTOR: { # Make sure the dimension is correct here "type": "dense_vector", "dims": dim, "index": True, "similarity": "cosine", }, - Field.METADATA_KEY.value: { + Field.METADATA_KEY: { "type": "object", "properties": { "doc_id": {"type": "keyword"}, # Map doc_id to keyword type diff --git a/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py b/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py index cfee090768..c7b6593a8f 100644 --- a/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py +++ b/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py @@ -67,9 +67,9 @@ class HuaweiCloudVector(BaseVector): index=self._collection_name, id=uuids[i], document={ - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i] or None, - Field.METADATA_KEY.value: documents[i].metadata or {}, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i] or None, + Field.METADATA_KEY: documents[i].metadata or {}, }, ) self._client.indices.refresh(index=self._collection_name) @@ -101,7 +101,7 @@ class HuaweiCloudVector(BaseVector): "size": top_k, "query": { "vector": { - Field.VECTOR.value: { + Field.VECTOR: { "vector": query_vector, "topk": top_k, } @@ -116,9 +116,9 @@ class HuaweiCloudVector(BaseVector): docs_and_scores.append( ( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ), hit["_score"], ) @@ -135,15 +135,15 @@ class HuaweiCloudVector(BaseVector): return docs def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - query_str = {"match": {Field.CONTENT_KEY.value: query}} + query_str = {"match": {Field.CONTENT_KEY: query}} results = self._client.search(index=self._collection_name, query=query_str, size=kwargs.get("top_k", 4)) docs = [] for hit in results["hits"]["hits"]: docs.append( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ) ) @@ -171,8 +171,8 @@ class HuaweiCloudVector(BaseVector): dim = len(embeddings[0]) mappings = { "properties": { - Field.CONTENT_KEY.value: {"type": "text"}, - Field.VECTOR.value: { # Make sure the dimension is correct here + Field.CONTENT_KEY: {"type": "text"}, + Field.VECTOR: { # Make sure the dimension is correct here "type": "vector", "dimension": dim, "indexing": True, @@ -181,7 +181,7 @@ class HuaweiCloudVector(BaseVector): "neighbors": 32, "efc": 128, }, - Field.METADATA_KEY.value: { + Field.METADATA_KEY: { "type": "object", "properties": { "doc_id": {"type": "keyword"} # Map doc_id to keyword type diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index 8824e1c67b..bfcb620618 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -125,9 +125,9 @@ class LindormVectorStore(BaseVector): } } action_values: dict[str, Any] = { - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i], - Field.METADATA_KEY.value: documents[i].metadata, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i], + Field.METADATA_KEY: documents[i].metadata, } if self._using_ugc: action_header["index"]["routing"] = self._routing @@ -149,7 +149,7 @@ class LindormVectorStore(BaseVector): def get_ids_by_metadata_field(self, key: str, value: str): query: dict[str, Any] = { - "query": {"bool": {"must": [{"term": {f"{Field.METADATA_KEY.value}.{key}.keyword": value}}]}} + "query": {"bool": {"must": [{"term": {f"{Field.METADATA_KEY}.{key}.keyword": value}}]}} } if self._using_ugc: query["query"]["bool"]["must"].append({"term": {f"{ROUTING_FIELD}.keyword": self._routing}}) @@ -252,14 +252,14 @@ class LindormVectorStore(BaseVector): search_query: dict[str, Any] = { "size": top_k, "_source": True, - "query": {"knn": {Field.VECTOR.value: {"vector": query_vector, "k": top_k}}}, + "query": {"knn": {Field.VECTOR: {"vector": query_vector, "k": top_k}}}, } final_ext: dict[str, Any] = {"lvector": {}} if filters is not None and len(filters) > 0: # when using filter, transform filter from List[Dict] to Dict as valid format filter_dict = {"bool": {"must": filters}} if len(filters) > 1 else filters[0] - search_query["query"]["knn"][Field.VECTOR.value]["filter"] = filter_dict # filter should be Dict + search_query["query"]["knn"][Field.VECTOR]["filter"] = filter_dict # filter should be Dict final_ext["lvector"]["filter_type"] = "pre_filter" if final_ext != {"lvector": {}}: @@ -279,9 +279,9 @@ class LindormVectorStore(BaseVector): docs_and_scores.append( ( Document( - page_content=hit["_source"][Field.CONTENT_KEY.value], - vector=hit["_source"][Field.VECTOR.value], - metadata=hit["_source"][Field.METADATA_KEY.value], + page_content=hit["_source"][Field.CONTENT_KEY], + vector=hit["_source"][Field.VECTOR], + metadata=hit["_source"][Field.METADATA_KEY], ), hit["_score"], ) @@ -318,9 +318,9 @@ class LindormVectorStore(BaseVector): docs = [] for hit in response["hits"]["hits"]: - metadata = hit["_source"].get(Field.METADATA_KEY.value) - vector = hit["_source"].get(Field.VECTOR.value) - page_content = hit["_source"].get(Field.CONTENT_KEY.value) + metadata = hit["_source"].get(Field.METADATA_KEY) + vector = hit["_source"].get(Field.VECTOR) + page_content = hit["_source"].get(Field.CONTENT_KEY) doc = Document(page_content=page_content, vector=vector, metadata=metadata) docs.append(doc) @@ -342,8 +342,8 @@ class LindormVectorStore(BaseVector): "settings": {"index": {"knn": True, "knn_routing": self._using_ugc}}, "mappings": { "properties": { - Field.CONTENT_KEY.value: {"type": "text"}, - Field.VECTOR.value: { + Field.CONTENT_KEY: {"type": "text"}, + Field.VECTOR: { "type": "knn_vector", "dimension": len(embeddings[0]), # Make sure the dimension is correct here "method": { diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index 5f32feb709..96eb465401 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -85,7 +85,7 @@ class MilvusVector(BaseVector): collection_info = self._client.describe_collection(self._collection_name) fields = [field["name"] for field in collection_info["fields"]] # Since primary field is auto-id, no need to track it - self._fields = [f for f in fields if f != Field.PRIMARY_KEY.value] + self._fields = [f for f in fields if f != Field.PRIMARY_KEY] def _check_hybrid_search_support(self) -> bool: """ @@ -130,9 +130,9 @@ class MilvusVector(BaseVector): insert_dict = { # Do not need to insert the sparse_vector field separately, as the text_bm25_emb # function will automatically convert the native text into a sparse vector for us. - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i], - Field.METADATA_KEY.value: documents[i].metadata, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i], + Field.METADATA_KEY: documents[i].metadata, } insert_dict_list.append(insert_dict) # Total insert count @@ -243,15 +243,15 @@ class MilvusVector(BaseVector): results = self._client.search( collection_name=self._collection_name, data=[query_vector], - anns_field=Field.VECTOR.value, + anns_field=Field.VECTOR, limit=kwargs.get("top_k", 4), - output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + output_fields=[Field.CONTENT_KEY, Field.METADATA_KEY], filter=filter, ) return self._process_search_results( results, - output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + output_fields=[Field.CONTENT_KEY, Field.METADATA_KEY], score_threshold=float(kwargs.get("score_threshold") or 0.0), ) @@ -264,7 +264,7 @@ class MilvusVector(BaseVector): "Full-text search is disabled: set MILVUS_ENABLE_HYBRID_SEARCH=true (requires Milvus >= 2.5.0)." ) return [] - if not self.field_exists(Field.SPARSE_VECTOR.value): + if not self.field_exists(Field.SPARSE_VECTOR): logger.warning( "Full-text search unavailable: collection missing 'sparse_vector' field; " "recreate the collection after enabling MILVUS_ENABLE_HYBRID_SEARCH to add BM25 sparse index." @@ -279,15 +279,15 @@ class MilvusVector(BaseVector): results = self._client.search( collection_name=self._collection_name, data=[query], - anns_field=Field.SPARSE_VECTOR.value, + anns_field=Field.SPARSE_VECTOR, limit=kwargs.get("top_k", 4), - output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + output_fields=[Field.CONTENT_KEY, Field.METADATA_KEY], filter=filter, ) return self._process_search_results( results, - output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + output_fields=[Field.CONTENT_KEY, Field.METADATA_KEY], score_threshold=float(kwargs.get("score_threshold") or 0.0), ) @@ -311,7 +311,7 @@ class MilvusVector(BaseVector): dim = len(embeddings[0]) fields = [] if metadatas: - fields.append(FieldSchema(Field.METADATA_KEY.value, DataType.JSON, max_length=65_535)) + fields.append(FieldSchema(Field.METADATA_KEY, DataType.JSON, max_length=65_535)) # Create the text field, enable_analyzer will be set True to support milvus automatically # transfer text to sparse_vector, reference: https://milvus.io/docs/full-text-search.md @@ -326,15 +326,15 @@ class MilvusVector(BaseVector): ): content_field_kwargs["analyzer_params"] = self._client_config.analyzer_params - fields.append(FieldSchema(Field.CONTENT_KEY.value, DataType.VARCHAR, **content_field_kwargs)) + fields.append(FieldSchema(Field.CONTENT_KEY, DataType.VARCHAR, **content_field_kwargs)) # Create the primary key field - fields.append(FieldSchema(Field.PRIMARY_KEY.value, DataType.INT64, is_primary=True, auto_id=True)) + fields.append(FieldSchema(Field.PRIMARY_KEY, DataType.INT64, is_primary=True, auto_id=True)) # Create the vector field, supports binary or float vectors - fields.append(FieldSchema(Field.VECTOR.value, infer_dtype_bydata(embeddings[0]), dim=dim)) + fields.append(FieldSchema(Field.VECTOR, infer_dtype_bydata(embeddings[0]), dim=dim)) # Create Sparse Vector Index for the collection if self._hybrid_search_enabled: - fields.append(FieldSchema(Field.SPARSE_VECTOR.value, DataType.SPARSE_FLOAT_VECTOR)) + fields.append(FieldSchema(Field.SPARSE_VECTOR, DataType.SPARSE_FLOAT_VECTOR)) schema = CollectionSchema(fields) @@ -342,8 +342,8 @@ class MilvusVector(BaseVector): if self._hybrid_search_enabled: bm25_function = Function( name="text_bm25_emb", - input_field_names=[Field.CONTENT_KEY.value], - output_field_names=[Field.SPARSE_VECTOR.value], + input_field_names=[Field.CONTENT_KEY], + output_field_names=[Field.SPARSE_VECTOR], function_type=FunctionType.BM25, ) schema.add_function(bm25_function) @@ -352,12 +352,12 @@ class MilvusVector(BaseVector): # Create Index params for the collection index_params_obj = IndexParams() - index_params_obj.add_index(field_name=Field.VECTOR.value, **index_params) + index_params_obj.add_index(field_name=Field.VECTOR, **index_params) # Create Sparse Vector Index for the collection if self._hybrid_search_enabled: index_params_obj.add_index( - field_name=Field.SPARSE_VECTOR.value, index_type="AUTOINDEX", metric_type="BM25" + field_name=Field.SPARSE_VECTOR, index_type="AUTOINDEX", metric_type="BM25" ) # Create the collection diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py index 3eb1df027e..80ffdadd96 100644 --- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py +++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py @@ -1,6 +1,6 @@ import json import logging -from typing import Any, Literal +from typing import Any from uuid import uuid4 from opensearchpy import OpenSearch, Urllib3AWSV4SignerAuth, Urllib3HttpConnection, helpers @@ -8,6 +8,7 @@ from opensearchpy.helpers import BulkIndexError from pydantic import BaseModel, model_validator from configs import dify_config +from configs.middleware.vdb.opensearch_config import AuthMethod from core.rag.datasource.vdb.field import Field from core.rag.datasource.vdb.vector_base import BaseVector from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory @@ -25,7 +26,7 @@ class OpenSearchConfig(BaseModel): port: int secure: bool = False # use_ssl verify_certs: bool = True - auth_method: Literal["basic", "aws_managed_iam"] = "basic" + auth_method: AuthMethod = AuthMethod.BASIC user: str | None = None password: str | None = None aws_region: str | None = None @@ -98,9 +99,9 @@ class OpenSearchVector(BaseVector): "_op_type": "index", "_index": self._collection_name.lower(), "_source": { - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i], # Make sure you pass an array here - Field.METADATA_KEY.value: documents[i].metadata, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i], # Make sure you pass an array here + Field.METADATA_KEY: documents[i].metadata, }, } # See https://github.com/langchain-ai/langchainjs/issues/4346#issuecomment-1935123377 @@ -116,7 +117,7 @@ class OpenSearchVector(BaseVector): ) def get_ids_by_metadata_field(self, key: str, value: str): - query = {"query": {"term": {f"{Field.METADATA_KEY.value}.{key}": value}}} + query = {"query": {"term": {f"{Field.METADATA_KEY}.{key}": value}}} response = self._client.search(index=self._collection_name.lower(), body=query) if response["hits"]["hits"]: return [hit["_id"] for hit in response["hits"]["hits"]] @@ -180,17 +181,17 @@ class OpenSearchVector(BaseVector): query = { "size": kwargs.get("top_k", 4), - "query": {"knn": {Field.VECTOR.value: {Field.VECTOR.value: query_vector, "k": kwargs.get("top_k", 4)}}}, + "query": {"knn": {Field.VECTOR: {Field.VECTOR: query_vector, "k": kwargs.get("top_k", 4)}}}, } document_ids_filter = kwargs.get("document_ids_filter") if document_ids_filter: query["query"] = { "script_score": { - "query": {"bool": {"filter": [{"terms": {Field.DOCUMENT_ID.value: document_ids_filter}}]}}, + "query": {"bool": {"filter": [{"terms": {Field.DOCUMENT_ID: document_ids_filter}}]}}, "script": { "source": "knn_score", "lang": "knn", - "params": {"field": Field.VECTOR.value, "query_value": query_vector, "space_type": "l2"}, + "params": {"field": Field.VECTOR, "query_value": query_vector, "space_type": "l2"}, }, } } @@ -203,7 +204,7 @@ class OpenSearchVector(BaseVector): docs = [] for hit in response["hits"]["hits"]: - metadata = hit["_source"].get(Field.METADATA_KEY.value, {}) + metadata = hit["_source"].get(Field.METADATA_KEY, {}) # Make sure metadata is a dictionary if metadata is None: @@ -212,7 +213,7 @@ class OpenSearchVector(BaseVector): metadata["score"] = hit["_score"] score_threshold = float(kwargs.get("score_threshold") or 0.0) if hit["_score"] >= score_threshold: - doc = Document(page_content=hit["_source"].get(Field.CONTENT_KEY.value), metadata=metadata) + doc = Document(page_content=hit["_source"].get(Field.CONTENT_KEY), metadata=metadata) docs.append(doc) return docs @@ -227,9 +228,9 @@ class OpenSearchVector(BaseVector): docs = [] for hit in response["hits"]["hits"]: - metadata = hit["_source"].get(Field.METADATA_KEY.value) - vector = hit["_source"].get(Field.VECTOR.value) - page_content = hit["_source"].get(Field.CONTENT_KEY.value) + metadata = hit["_source"].get(Field.METADATA_KEY) + vector = hit["_source"].get(Field.VECTOR) + page_content = hit["_source"].get(Field.CONTENT_KEY) doc = Document(page_content=page_content, vector=vector, metadata=metadata) docs.append(doc) @@ -250,8 +251,8 @@ class OpenSearchVector(BaseVector): "settings": {"index": {"knn": True}}, "mappings": { "properties": { - Field.CONTENT_KEY.value: {"type": "text"}, - Field.VECTOR.value: { + Field.CONTENT_KEY: {"type": "text"}, + Field.VECTOR: { "type": "knn_vector", "dimension": len(embeddings[0]), # Make sure the dimension is correct here "method": { @@ -261,7 +262,7 @@ class OpenSearchVector(BaseVector): "parameters": {"ef_construction": 64, "m": 8}, }, }, - Field.METADATA_KEY.value: { + Field.METADATA_KEY: { "type": "object", "properties": { "doc_id": {"type": "keyword"}, # Map doc_id to keyword type @@ -293,7 +294,7 @@ class OpenSearchVectorFactory(AbstractVectorFactory): port=dify_config.OPENSEARCH_PORT, secure=dify_config.OPENSEARCH_SECURE, verify_certs=dify_config.OPENSEARCH_VERIFY_CERTS, - auth_method=dify_config.OPENSEARCH_AUTH_METHOD.value, + auth_method=dify_config.OPENSEARCH_AUTH_METHOD, user=dify_config.OPENSEARCH_USER, password=dify_config.OPENSEARCH_PASSWORD, aws_region=dify_config.OPENSEARCH_AWS_REGION, diff --git a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py index d46f29bd64..f8c62b908a 100644 --- a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py +++ b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py @@ -147,15 +147,13 @@ class QdrantVector(BaseVector): # create group_id payload index self._client.create_payload_index( - collection_name, Field.GROUP_KEY.value, field_schema=PayloadSchemaType.KEYWORD + collection_name, Field.GROUP_KEY, field_schema=PayloadSchemaType.KEYWORD ) # create doc_id payload index - self._client.create_payload_index( - collection_name, Field.DOC_ID.value, field_schema=PayloadSchemaType.KEYWORD - ) + self._client.create_payload_index(collection_name, Field.DOC_ID, field_schema=PayloadSchemaType.KEYWORD) # create document_id payload index self._client.create_payload_index( - collection_name, Field.DOCUMENT_ID.value, field_schema=PayloadSchemaType.KEYWORD + collection_name, Field.DOCUMENT_ID, field_schema=PayloadSchemaType.KEYWORD ) # create full text index text_index_params = TextIndexParams( @@ -165,9 +163,7 @@ class QdrantVector(BaseVector): max_token_len=20, lowercase=True, ) - self._client.create_payload_index( - collection_name, Field.CONTENT_KEY.value, field_schema=text_index_params - ) + self._client.create_payload_index(collection_name, Field.CONTENT_KEY, field_schema=text_index_params) redis_client.set(collection_exist_cache_key, 1, ex=3600) def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): @@ -220,10 +216,10 @@ class QdrantVector(BaseVector): self._build_payloads( batch_texts, batch_metadatas, - Field.CONTENT_KEY.value, - Field.METADATA_KEY.value, + Field.CONTENT_KEY, + Field.METADATA_KEY, group_id or "", # Ensure group_id is never None - Field.GROUP_KEY.value, + Field.GROUP_KEY, ), ) ] @@ -381,12 +377,12 @@ class QdrantVector(BaseVector): for result in results: if result.payload is None: continue - metadata = result.payload.get(Field.METADATA_KEY.value) or {} + metadata = result.payload.get(Field.METADATA_KEY) or {} # duplicate check score threshold if result.score >= score_threshold: metadata["score"] = result.score doc = Document( - page_content=result.payload.get(Field.CONTENT_KEY.value, ""), + page_content=result.payload.get(Field.CONTENT_KEY, ""), metadata=metadata, ) docs.append(doc) @@ -433,7 +429,7 @@ class QdrantVector(BaseVector): documents = [] for result in results: if result: - document = self._document_from_scored_point(result, Field.CONTENT_KEY.value, Field.METADATA_KEY.value) + document = self._document_from_scored_point(result, Field.CONTENT_KEY, Field.METADATA_KEY) documents.append(document) return documents diff --git a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py index e91d9bb0d6..f2156afa59 100644 --- a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py +++ b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py @@ -55,7 +55,7 @@ class TableStoreVector(BaseVector): self._normalize_full_text_bm25_score = config.normalize_full_text_bm25_score self._table_name = f"{collection_name}" self._index_name = f"{collection_name}_idx" - self._tags_field = f"{Field.METADATA_KEY.value}_tags" + self._tags_field = f"{Field.METADATA_KEY}_tags" def create_collection(self, embeddings: list[list[float]], **kwargs): dimension = len(embeddings[0]) @@ -64,7 +64,7 @@ class TableStoreVector(BaseVector): def get_by_ids(self, ids: list[str]) -> list[Document]: docs = [] request = BatchGetRowRequest() - columns_to_get = [Field.METADATA_KEY.value, Field.CONTENT_KEY.value] + columns_to_get = [Field.METADATA_KEY, Field.CONTENT_KEY] rows_to_get = [[("id", _id)] for _id in ids] request.add(TableInBatchGetRowItem(self._table_name, rows_to_get, columns_to_get, None, 1)) @@ -73,11 +73,7 @@ class TableStoreVector(BaseVector): for item in table_result: if item.is_ok and item.row: kv = {k: v for k, v, _ in item.row.attribute_columns} - docs.append( - Document( - page_content=kv[Field.CONTENT_KEY.value], metadata=json.loads(kv[Field.METADATA_KEY.value]) - ) - ) + docs.append(Document(page_content=kv[Field.CONTENT_KEY], metadata=json.loads(kv[Field.METADATA_KEY]))) return docs def get_type(self) -> str: @@ -95,9 +91,9 @@ class TableStoreVector(BaseVector): self._write_row( primary_key=uuids[i], attributes={ - Field.CONTENT_KEY.value: documents[i].page_content, - Field.VECTOR.value: embeddings[i], - Field.METADATA_KEY.value: documents[i].metadata, + Field.CONTENT_KEY: documents[i].page_content, + Field.VECTOR: embeddings[i], + Field.METADATA_KEY: documents[i].metadata, }, ) return uuids @@ -180,7 +176,7 @@ class TableStoreVector(BaseVector): field_schemas = [ tablestore.FieldSchema( - Field.CONTENT_KEY.value, + Field.CONTENT_KEY, tablestore.FieldType.TEXT, analyzer=tablestore.AnalyzerType.MAXWORD, index=True, @@ -188,7 +184,7 @@ class TableStoreVector(BaseVector): store=False, ), tablestore.FieldSchema( - Field.VECTOR.value, + Field.VECTOR, tablestore.FieldType.VECTOR, vector_options=tablestore.VectorOptions( data_type=tablestore.VectorDataType.VD_FLOAT_32, @@ -197,7 +193,7 @@ class TableStoreVector(BaseVector): ), ), tablestore.FieldSchema( - Field.METADATA_KEY.value, + Field.METADATA_KEY, tablestore.FieldType.KEYWORD, index=True, store=False, @@ -233,15 +229,15 @@ class TableStoreVector(BaseVector): pk = [("id", primary_key)] tags = [] - for key, value in attributes[Field.METADATA_KEY.value].items(): + for key, value in attributes[Field.METADATA_KEY].items(): tags.append(str(key) + "=" + str(value)) attribute_columns = [ - (Field.CONTENT_KEY.value, attributes[Field.CONTENT_KEY.value]), - (Field.VECTOR.value, json.dumps(attributes[Field.VECTOR.value])), + (Field.CONTENT_KEY, attributes[Field.CONTENT_KEY]), + (Field.VECTOR, json.dumps(attributes[Field.VECTOR])), ( - Field.METADATA_KEY.value, - json.dumps(attributes[Field.METADATA_KEY.value]), + Field.METADATA_KEY, + json.dumps(attributes[Field.METADATA_KEY]), ), (self._tags_field, json.dumps(tags)), ] @@ -270,7 +266,7 @@ class TableStoreVector(BaseVector): index_name=self._index_name, search_query=query, columns_to_get=tablestore.ColumnsToGet( - column_names=[Field.PRIMARY_KEY.value], return_type=tablestore.ColumnReturnType.SPECIFIED + column_names=[Field.PRIMARY_KEY], return_type=tablestore.ColumnReturnType.SPECIFIED ), ) @@ -288,7 +284,7 @@ class TableStoreVector(BaseVector): self, query_vector: list[float], document_ids_filter: list[str] | None, top_k: int, score_threshold: float ) -> list[Document]: knn_vector_query = tablestore.KnnVectorQuery( - field_name=Field.VECTOR.value, + field_name=Field.VECTOR, top_k=top_k, float32_query_vector=query_vector, ) @@ -311,8 +307,8 @@ class TableStoreVector(BaseVector): for col in search_hit.row[1]: ots_column_map[col[0]] = col[1] - vector_str = ots_column_map.get(Field.VECTOR.value) - metadata_str = ots_column_map.get(Field.METADATA_KEY.value) + vector_str = ots_column_map.get(Field.VECTOR) + metadata_str = ots_column_map.get(Field.METADATA_KEY) vector = json.loads(vector_str) if vector_str else None metadata = json.loads(metadata_str) if metadata_str else {} @@ -321,7 +317,7 @@ class TableStoreVector(BaseVector): documents.append( Document( - page_content=ots_column_map.get(Field.CONTENT_KEY.value) or "", + page_content=ots_column_map.get(Field.CONTENT_KEY) or "", vector=vector, metadata=metadata, ) @@ -343,7 +339,7 @@ class TableStoreVector(BaseVector): self, query: str, document_ids_filter: list[str] | None, top_k: int, score_threshold: float ) -> list[Document]: bool_query = tablestore.BoolQuery(must_queries=[], filter_queries=[], should_queries=[], must_not_queries=[]) - bool_query.must_queries.append(tablestore.MatchQuery(text=query, field_name=Field.CONTENT_KEY.value)) + bool_query.must_queries.append(tablestore.MatchQuery(text=query, field_name=Field.CONTENT_KEY)) if document_ids_filter: bool_query.filter_queries.append(tablestore.TermsQuery(self._tags_field, document_ids_filter)) @@ -374,10 +370,10 @@ class TableStoreVector(BaseVector): for col in search_hit.row[1]: ots_column_map[col[0]] = col[1] - metadata_str = ots_column_map.get(Field.METADATA_KEY.value) + metadata_str = ots_column_map.get(Field.METADATA_KEY) metadata = json.loads(metadata_str) if metadata_str else {} - vector_str = ots_column_map.get(Field.VECTOR.value) + vector_str = ots_column_map.get(Field.VECTOR) vector = json.loads(vector_str) if vector_str else None if score: @@ -385,7 +381,7 @@ class TableStoreVector(BaseVector): documents.append( Document( - page_content=ots_column_map.get(Field.CONTENT_KEY.value) or "", + page_content=ots_column_map.get(Field.CONTENT_KEY) or "", vector=vector, metadata=metadata, ) diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py index f90a311df4..56ffb36a2b 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -5,9 +5,10 @@ from collections.abc import Generator, Iterable, Sequence from itertools import islice from typing import TYPE_CHECKING, Any, Union +import httpx import qdrant_client -import requests from flask import current_app +from httpx import DigestAuth from pydantic import BaseModel from qdrant_client.http import models as rest from qdrant_client.http.models import ( @@ -19,7 +20,6 @@ from qdrant_client.http.models import ( TokenizerType, ) from qdrant_client.local.qdrant_local import QdrantLocal -from requests.auth import HTTPDigestAuth from sqlalchemy import select from configs import dify_config @@ -141,15 +141,13 @@ class TidbOnQdrantVector(BaseVector): # create group_id payload index self._client.create_payload_index( - collection_name, Field.GROUP_KEY.value, field_schema=PayloadSchemaType.KEYWORD + collection_name, Field.GROUP_KEY, field_schema=PayloadSchemaType.KEYWORD ) # create doc_id payload index - self._client.create_payload_index( - collection_name, Field.DOC_ID.value, field_schema=PayloadSchemaType.KEYWORD - ) + self._client.create_payload_index(collection_name, Field.DOC_ID, field_schema=PayloadSchemaType.KEYWORD) # create document_id payload index self._client.create_payload_index( - collection_name, Field.DOCUMENT_ID.value, field_schema=PayloadSchemaType.KEYWORD + collection_name, Field.DOCUMENT_ID, field_schema=PayloadSchemaType.KEYWORD ) # create full text index text_index_params = TextIndexParams( @@ -159,9 +157,7 @@ class TidbOnQdrantVector(BaseVector): max_token_len=20, lowercase=True, ) - self._client.create_payload_index( - collection_name, Field.CONTENT_KEY.value, field_schema=text_index_params - ) + self._client.create_payload_index(collection_name, Field.CONTENT_KEY, field_schema=text_index_params) redis_client.set(collection_exist_cache_key, 1, ex=3600) def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): @@ -211,10 +207,10 @@ class TidbOnQdrantVector(BaseVector): self._build_payloads( batch_texts, batch_metadatas, - Field.CONTENT_KEY.value, - Field.METADATA_KEY.value, + Field.CONTENT_KEY, + Field.METADATA_KEY, group_id or "", - Field.GROUP_KEY.value, + Field.GROUP_KEY, ), ) ] @@ -349,13 +345,13 @@ class TidbOnQdrantVector(BaseVector): for result in results: if result.payload is None: continue - metadata = result.payload.get(Field.METADATA_KEY.value) or {} + metadata = result.payload.get(Field.METADATA_KEY) or {} # duplicate check score threshold score_threshold = kwargs.get("score_threshold") or 0.0 if result.score >= score_threshold: metadata["score"] = result.score doc = Document( - page_content=result.payload.get(Field.CONTENT_KEY.value, ""), + page_content=result.payload.get(Field.CONTENT_KEY, ""), metadata=metadata, ) docs.append(doc) @@ -392,7 +388,7 @@ class TidbOnQdrantVector(BaseVector): documents = [] for result in results: if result: - document = self._document_from_scored_point(result, Field.CONTENT_KEY.value, Field.METADATA_KEY.value) + document = self._document_from_scored_point(result, Field.CONTENT_KEY, Field.METADATA_KEY) documents.append(document) return documents @@ -504,10 +500,10 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory): } cluster_data = {"displayName": display_name, "region": region_object, "labels": labels} - response = requests.post( + response = httpx.post( f"{tidb_config.api_url}/clusters", json=cluster_data, - auth=HTTPDigestAuth(tidb_config.public_key, tidb_config.private_key), + auth=DigestAuth(tidb_config.public_key, tidb_config.private_key), ) if response.status_code == 200: @@ -527,10 +523,10 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory): body = {"password": new_password} - response = requests.put( + response = httpx.put( f"{tidb_config.api_url}/clusters/{cluster_id}/password", json=body, - auth=HTTPDigestAuth(tidb_config.public_key, tidb_config.private_key), + auth=DigestAuth(tidb_config.public_key, tidb_config.private_key), ) if response.status_code == 200: diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py index e1d4422144..754c149241 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py @@ -2,8 +2,8 @@ import time import uuid from collections.abc import Sequence -import requests -from requests.auth import HTTPDigestAuth +import httpx +from httpx import DigestAuth from configs import dify_config from extensions.ext_database import db @@ -49,7 +49,7 @@ class TidbService: "rootPassword": password, } - response = requests.post(f"{api_url}/clusters", json=cluster_data, auth=HTTPDigestAuth(public_key, private_key)) + response = httpx.post(f"{api_url}/clusters", json=cluster_data, auth=DigestAuth(public_key, private_key)) if response.status_code == 200: response_data = response.json() @@ -83,7 +83,7 @@ class TidbService: :return: The response from the API. """ - response = requests.delete(f"{api_url}/clusters/{cluster_id}", auth=HTTPDigestAuth(public_key, private_key)) + response = httpx.delete(f"{api_url}/clusters/{cluster_id}", auth=DigestAuth(public_key, private_key)) if response.status_code == 200: return response.json() @@ -102,7 +102,7 @@ class TidbService: :return: The response from the API. """ - response = requests.get(f"{api_url}/clusters/{cluster_id}", auth=HTTPDigestAuth(public_key, private_key)) + response = httpx.get(f"{api_url}/clusters/{cluster_id}", auth=DigestAuth(public_key, private_key)) if response.status_code == 200: return response.json() @@ -127,10 +127,10 @@ class TidbService: body = {"password": new_password, "builtinRole": "role_admin", "customRoles": []} - response = requests.patch( + response = httpx.patch( f"{api_url}/clusters/{cluster_id}/sqlUsers/{account}", json=body, - auth=HTTPDigestAuth(public_key, private_key), + auth=DigestAuth(public_key, private_key), ) if response.status_code == 200: @@ -161,9 +161,7 @@ class TidbService: tidb_serverless_list_map = {item.cluster_id: item for item in tidb_serverless_list} cluster_ids = [item.cluster_id for item in tidb_serverless_list] params = {"clusterIds": cluster_ids, "view": "BASIC"} - response = requests.get( - f"{api_url}/clusters:batchGet", params=params, auth=HTTPDigestAuth(public_key, private_key) - ) + response = httpx.get(f"{api_url}/clusters:batchGet", params=params, auth=DigestAuth(public_key, private_key)) if response.status_code == 200: response_data = response.json() @@ -224,8 +222,8 @@ class TidbService: clusters.append(cluster_data) request_body = {"requests": clusters} - response = requests.post( - f"{api_url}/clusters:batchCreate", json=request_body, auth=HTTPDigestAuth(public_key, private_key) + response = httpx.post( + f"{api_url}/clusters:batchCreate", json=request_body, auth=DigestAuth(public_key, private_key) ) if response.status_code == 200: diff --git a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py index b8897c4165..27ae038a06 100644 --- a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py +++ b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py @@ -55,13 +55,13 @@ class TiDBVector(BaseVector): return Table( self._collection_name, self._orm_base.metadata, - Column(Field.PRIMARY_KEY.value, String(36), primary_key=True, nullable=False), + Column(Field.PRIMARY_KEY, String(36), primary_key=True, nullable=False), Column( - Field.VECTOR.value, + Field.VECTOR, VectorType(dim), nullable=False, ), - Column(Field.TEXT_KEY.value, TEXT, nullable=False), + Column(Field.TEXT_KEY, TEXT, nullable=False), Column("meta", JSON, nullable=False), Column("create_time", DateTime, server_default=sqlalchemy.text("CURRENT_TIMESTAMP")), Column( diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index dc4f026ff3..0beb388693 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -71,6 +71,12 @@ class Vector: from core.rag.datasource.vdb.milvus.milvus_vector import MilvusVectorFactory return MilvusVectorFactory + case VectorType.ALIBABACLOUD_MYSQL: + from core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector import ( + AlibabaCloudMySQLVectorFactory, + ) + + return AlibabaCloudMySQLVectorFactory case VectorType.MYSCALE: from core.rag.datasource.vdb.myscale.myscale_vector import MyScaleVectorFactory diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index a415142196..bc7d93a2e0 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -2,6 +2,7 @@ from enum import StrEnum class VectorType(StrEnum): + ALIBABACLOUD_MYSQL = "alibabacloud_mysql" ANALYTICDB = "analyticdb" CHROMA = "chroma" MILVUS = "milvus" diff --git a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py index d1bdd3baef..e5feecf2bc 100644 --- a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py +++ b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py @@ -76,11 +76,11 @@ class VikingDBVector(BaseVector): if not self._has_collection(): fields = [ - Field(field_name=vdb_Field.PRIMARY_KEY.value, field_type=FieldType.String, is_primary_key=True), - Field(field_name=vdb_Field.METADATA_KEY.value, field_type=FieldType.String), - Field(field_name=vdb_Field.GROUP_KEY.value, field_type=FieldType.String), - Field(field_name=vdb_Field.CONTENT_KEY.value, field_type=FieldType.Text), - Field(field_name=vdb_Field.VECTOR.value, field_type=FieldType.Vector, dim=dimension), + Field(field_name=vdb_Field.PRIMARY_KEY, field_type=FieldType.String, is_primary_key=True), + Field(field_name=vdb_Field.METADATA_KEY, field_type=FieldType.String), + Field(field_name=vdb_Field.GROUP_KEY, field_type=FieldType.String), + Field(field_name=vdb_Field.CONTENT_KEY, field_type=FieldType.Text), + Field(field_name=vdb_Field.VECTOR, field_type=FieldType.Vector, dim=dimension), ] self._client.create_collection( @@ -100,7 +100,7 @@ class VikingDBVector(BaseVector): collection_name=self._collection_name, index_name=self._index_name, vector_index=vector_index, - partition_by=vdb_Field.GROUP_KEY.value, + partition_by=vdb_Field.GROUP_KEY, description="Index For Dify", ) redis_client.set(collection_exist_cache_key, 1, ex=3600) @@ -126,11 +126,11 @@ class VikingDBVector(BaseVector): # FIXME: fix the type of metadata later doc = Data( { - vdb_Field.PRIMARY_KEY.value: metadatas[i]["doc_id"], # type: ignore - vdb_Field.VECTOR.value: embeddings[i] if embeddings else None, - vdb_Field.CONTENT_KEY.value: page_content, - vdb_Field.METADATA_KEY.value: json.dumps(metadata), - vdb_Field.GROUP_KEY.value: self._group_id, + vdb_Field.PRIMARY_KEY: metadatas[i]["doc_id"], # type: ignore + vdb_Field.VECTOR: embeddings[i] if embeddings else None, + vdb_Field.CONTENT_KEY: page_content, + vdb_Field.METADATA_KEY: json.dumps(metadata), + vdb_Field.GROUP_KEY: self._group_id, } ) docs.append(doc) @@ -151,7 +151,7 @@ class VikingDBVector(BaseVector): # Note: Metadata field value is an dict, but vikingdb field # not support json type results = self._client.get_index(self._collection_name, self._index_name).search( - filter={"op": "must", "field": vdb_Field.GROUP_KEY.value, "conds": [self._group_id]}, + filter={"op": "must", "field": vdb_Field.GROUP_KEY, "conds": [self._group_id]}, # max value is 5000 limit=5000, ) @@ -161,7 +161,7 @@ class VikingDBVector(BaseVector): ids = [] for result in results: - metadata = result.fields.get(vdb_Field.METADATA_KEY.value) + metadata = result.fields.get(vdb_Field.METADATA_KEY) if metadata is not None: metadata = json.loads(metadata) if metadata.get(key) == value: @@ -189,12 +189,12 @@ class VikingDBVector(BaseVector): docs = [] for result in results: - metadata = result.fields.get(vdb_Field.METADATA_KEY.value) + metadata = result.fields.get(vdb_Field.METADATA_KEY) if metadata is not None: metadata = json.loads(metadata) if result.score >= score_threshold: metadata["score"] = result.score - doc = Document(page_content=result.fields.get(vdb_Field.CONTENT_KEY.value), metadata=metadata) + doc = Document(page_content=result.fields.get(vdb_Field.CONTENT_KEY), metadata=metadata) docs.append(doc) docs = sorted(docs, key=lambda x: x.metadata.get("score", 0) if x.metadata else 0, reverse=True) return docs diff --git a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py index 3ec08b93ed..4793d2bb50 100644 --- a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py +++ b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py @@ -1,10 +1,24 @@ +""" +Weaviate vector database implementation for Dify's RAG system. + +This module provides integration with Weaviate vector database for storing and retrieving +document embeddings used in retrieval-augmented generation workflows. +""" + import datetime import json +import logging +import uuid as _uuid from typing import Any +from urllib.parse import urlparse -import requests -import weaviate # type: ignore +import weaviate +import weaviate.classes.config as wc from pydantic import BaseModel, model_validator +from weaviate.classes.data import DataObject +from weaviate.classes.init import Auth +from weaviate.classes.query import Filter, MetadataQuery +from weaviate.exceptions import UnexpectedStatusCodeError from configs import dify_config from core.rag.datasource.vdb.field import Field @@ -16,265 +30,394 @@ from core.rag.models.document import Document from extensions.ext_redis import redis_client from models.dataset import Dataset +logger = logging.getLogger(__name__) + class WeaviateConfig(BaseModel): + """ + Configuration model for Weaviate connection settings. + + Attributes: + endpoint: Weaviate server endpoint URL + api_key: Optional API key for authentication + batch_size: Number of objects to batch per insert operation + """ + endpoint: str api_key: str | None = None batch_size: int = 100 @model_validator(mode="before") @classmethod - def validate_config(cls, values: dict): + def validate_config(cls, values: dict) -> dict: + """Validates that required configuration values are present.""" if not values["endpoint"]: raise ValueError("config WEAVIATE_ENDPOINT is required") return values class WeaviateVector(BaseVector): + """ + Weaviate vector database implementation for document storage and retrieval. + + Handles creation, insertion, deletion, and querying of document embeddings + in a Weaviate collection. + """ + def __init__(self, collection_name: str, config: WeaviateConfig, attributes: list): + """ + Initializes the Weaviate vector store. + + Args: + collection_name: Name of the Weaviate collection + config: Weaviate configuration settings + attributes: List of metadata attributes to store + """ super().__init__(collection_name) self._client = self._init_client(config) self._attributes = attributes - def _init_client(self, config: WeaviateConfig) -> weaviate.Client: - auth_config = weaviate.AuthApiKey(api_key=config.api_key or "") + def _init_client(self, config: WeaviateConfig) -> weaviate.WeaviateClient: + """ + Initializes and returns a connected Weaviate client. - weaviate.connect.connection.has_grpc = False # ty: ignore [unresolved-attribute] + Configures both HTTP and gRPC connections with proper authentication. + """ + p = urlparse(config.endpoint) + host = p.hostname or config.endpoint.replace("https://", "").replace("http://", "") + http_secure = p.scheme == "https" + http_port = p.port or (443 if http_secure else 80) - try: - client = weaviate.Client( - url=config.endpoint, auth_client_secret=auth_config, timeout_config=(5, 60), startup_period=None - ) - except requests.ConnectionError: - raise ConnectionError("Vector database connection error") + grpc_host = host + grpc_secure = http_secure + grpc_port = 443 if grpc_secure else 50051 - client.batch.configure( - # `batch_size` takes an `int` value to enable auto-batching - # (`None` is used for manual batching) - batch_size=config.batch_size, - # dynamically update the `batch_size` based on import speed - dynamic=True, - # `timeout_retries` takes an `int` value to retry on time outs - timeout_retries=3, + client = weaviate.connect_to_custom( + http_host=host, + http_port=http_port, + http_secure=http_secure, + grpc_host=grpc_host, + grpc_port=grpc_port, + grpc_secure=grpc_secure, + auth_credentials=Auth.api_key(config.api_key) if config.api_key else None, ) + if not client.is_ready(): + raise ConnectionError("Vector database is not ready") + return client def get_type(self) -> str: + """Returns the vector database type identifier.""" return VectorType.WEAVIATE def get_collection_name(self, dataset: Dataset) -> str: + """ + Retrieves or generates the collection name for a dataset. + + Uses existing index structure if available, otherwise generates from dataset ID. + """ if dataset.index_struct_dict: class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] if not class_prefix.endswith("_Node"): - # original class_prefix class_prefix += "_Node" - return class_prefix dataset_id = dataset.id return Dataset.gen_collection_name_by_id(dataset_id) - def to_index_struct(self): + def to_index_struct(self) -> dict: + """Returns the index structure dictionary for persistence.""" return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}} def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): - # create collection + """ + Creates a new collection and adds initial documents with embeddings. + """ self._create_collection() - # create vector self.add_texts(texts, embeddings) def _create_collection(self): + """ + Creates the Weaviate collection with required schema if it doesn't exist. + + Uses Redis locking to prevent concurrent creation attempts. + """ lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = f"vector_indexing_{self._collection_name}" - if redis_client.get(collection_exist_cache_key): + cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(cache_key): return - schema = self._default_schema(self._collection_name) - if not self._client.schema.contains(schema): - # create collection - self._client.schema.create_class(schema) - redis_client.set(collection_exist_cache_key, 1, ex=3600) + + try: + if not self._client.collections.exists(self._collection_name): + self._client.collections.create( + name=self._collection_name, + properties=[ + wc.Property( + name=Field.TEXT_KEY.value, + data_type=wc.DataType.TEXT, + tokenization=wc.Tokenization.WORD, + ), + wc.Property(name="document_id", data_type=wc.DataType.TEXT), + wc.Property(name="doc_id", data_type=wc.DataType.TEXT), + wc.Property(name="chunk_index", data_type=wc.DataType.INT), + ], + vector_config=wc.Configure.Vectors.self_provided(), + ) + + self._ensure_properties() + redis_client.set(cache_key, 1, ex=3600) + except Exception as e: + logger.exception("Error creating collection %s", self._collection_name) + raise + + def _ensure_properties(self) -> None: + """ + Ensures all required properties exist in the collection schema. + + Adds missing properties if the collection exists but lacks them. + """ + if not self._client.collections.exists(self._collection_name): + return + + col = self._client.collections.use(self._collection_name) + cfg = col.config.get() + existing = {p.name for p in (cfg.properties or [])} + + to_add = [] + if "document_id" not in existing: + to_add.append(wc.Property(name="document_id", data_type=wc.DataType.TEXT)) + if "doc_id" not in existing: + to_add.append(wc.Property(name="doc_id", data_type=wc.DataType.TEXT)) + if "chunk_index" not in existing: + to_add.append(wc.Property(name="chunk_index", data_type=wc.DataType.INT)) + + for prop in to_add: + try: + col.config.add_property(prop) + except Exception as e: + logger.warning("Could not add property %s: %s", prop.name, e) + + def _get_uuids(self, documents: list[Document]) -> list[str]: + """ + Generates deterministic UUIDs for documents based on their content. + + Uses UUID5 with URL namespace to ensure consistent IDs for identical content. + """ + URL_NAMESPACE = _uuid.UUID("6ba7b811-9dad-11d1-80b4-00c04fd430c8") + + uuids = [] + for doc in documents: + uuid_val = _uuid.uuid5(URL_NAMESPACE, doc.page_content) + uuids.append(str(uuid_val)) + + return uuids def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + """ + Adds documents with their embeddings to the collection. + + Batches insertions for efficiency and returns the list of inserted object IDs. + """ uuids = self._get_uuids(documents) texts = [d.page_content for d in documents] metadatas = [d.metadata for d in documents] - ids = [] + col = self._client.collections.use(self._collection_name) + objs: list[DataObject] = [] + ids_out: list[str] = [] - with self._client.batch as batch: - for i, text in enumerate(texts): - data_properties = {Field.TEXT_KEY.value: text} - if metadatas is not None: - # metadata maybe None - for key, val in (metadatas[i] or {}).items(): - data_properties[key] = self._json_serializable(val) + for i, text in enumerate(texts): + props: dict[str, Any] = {Field.TEXT_KEY.value: text} + meta = metadatas[i] or {} + for k, v in meta.items(): + props[k] = self._json_serializable(v) - batch.add_data_object( - data_object=data_properties, - class_name=self._collection_name, - uuid=uuids[i], - vector=embeddings[i] if embeddings else None, + candidate = uuids[i] if uuids else None + uid = candidate if (candidate and self._is_uuid(candidate)) else str(_uuid.uuid4()) + ids_out.append(uid) + + vec_payload = None + if embeddings and i < len(embeddings) and embeddings[i]: + vec_payload = {"default": embeddings[i]} + + objs.append( + DataObject( + uuid=uid, + properties=props, # type: ignore[arg-type] # mypy incorrectly infers DataObject signature + vector=vec_payload, ) - ids.append(uuids[i]) - return ids + ) - def delete_by_metadata_field(self, key: str, value: str): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - where_filter = {"operator": "Equal", "path": [key], "valueText": value} + batch_size = max(1, int(dify_config.WEAVIATE_BATCH_SIZE or 100)) + with col.batch.dynamic() as batch: + for obj in objs: + batch.add_object(properties=obj.properties, uuid=obj.uuid, vector=obj.vector) - self._client.batch.delete_objects(class_name=self._collection_name, where=where_filter, output="minimal") + return ids_out + + def _is_uuid(self, val: str) -> bool: + """Validates whether a string is a valid UUID format.""" + try: + _uuid.UUID(str(val)) + return True + except Exception: + return False + + def delete_by_metadata_field(self, key: str, value: str) -> None: + """Deletes all objects matching a specific metadata field value.""" + if not self._client.collections.exists(self._collection_name): + return + + col = self._client.collections.use(self._collection_name) + col.data.delete_many(where=Filter.by_property(key).equal(value)) def delete(self): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - self._client.schema.delete_class(self._collection_name) + """Deletes the entire collection from Weaviate.""" + if self._client.collections.exists(self._collection_name): + self._client.collections.delete(self._collection_name) def text_exists(self, id: str) -> bool: - collection_name = self._collection_name - schema = self._default_schema(self._collection_name) - - # check whether the index already exists - if not self._client.schema.contains(schema): + """Checks if a document with the given doc_id exists in the collection.""" + if not self._client.collections.exists(self._collection_name): return False - result = ( - self._client.query.get(collection_name) - .with_additional(["id"]) - .with_where( - { - "path": ["doc_id"], - "operator": "Equal", - "valueText": id, - } - ) - .with_limit(1) - .do() + + col = self._client.collections.use(self._collection_name) + res = col.query.fetch_objects( + filters=Filter.by_property("doc_id").equal(id), + limit=1, + return_properties=["doc_id"], ) - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") + return len(res.objects) > 0 - entries = result["data"]["Get"][collection_name] - if len(entries) == 0: - return False + def delete_by_ids(self, ids: list[str]) -> None: + """ + Deletes objects by their UUID identifiers. - return True + Silently ignores 404 errors for non-existent IDs. + """ + if not self._client.collections.exists(self._collection_name): + return - def delete_by_ids(self, ids: list[str]): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - for uuid in ids: - try: - self._client.data_object.delete( - class_name=self._collection_name, - uuid=uuid, - ) - except weaviate.UnexpectedStatusCodeException as e: - # tolerate not found error - if e.status_code != 404: - raise e + col = self._client.collections.use(self._collection_name) + + for uid in ids: + try: + col.data.delete_by_id(uid) + except UnexpectedStatusCodeError as e: + if getattr(e, "status_code", None) != 404: + raise def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: - """Look up similar documents by embedding vector in Weaviate.""" - collection_name = self._collection_name - properties = self._attributes - properties.append(Field.TEXT_KEY.value) - query_obj = self._client.query.get(collection_name, properties) + """ + Performs vector similarity search using the provided query vector. - vector = {"vector": query_vector} - document_ids_filter = kwargs.get("document_ids_filter") - if document_ids_filter: - operands = [] - for document_id_filter in document_ids_filter: - operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter}) - where_filter = {"operator": "Or", "operands": operands} - query_obj = query_obj.with_where(where_filter) - result = ( - query_obj.with_near_vector(vector) - .with_limit(kwargs.get("top_k", 4)) - .with_additional(["vector", "distance"]) - .do() + Filters by document IDs if provided and applies score threshold. + Returns documents sorted by relevance score. + """ + if not self._client.collections.exists(self._collection_name): + return [] + + col = self._client.collections.use(self._collection_name) + props = list({*self._attributes, "document_id", Field.TEXT_KEY.value}) + + where = None + doc_ids = kwargs.get("document_ids_filter") or [] + if doc_ids: + ors = [Filter.by_property("document_id").equal(x) for x in doc_ids] + where = ors[0] + for f in ors[1:]: + where = where | f + + top_k = int(kwargs.get("top_k", 4)) + score_threshold = float(kwargs.get("score_threshold") or 0.0) + + res = col.query.near_vector( + near_vector=query_vector, + limit=top_k, + return_properties=props, + return_metadata=MetadataQuery(distance=True), + include_vector=False, + filters=where, + target_vector="default", ) - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") - docs_and_scores = [] - for res in result["data"]["Get"][collection_name]: - text = res.pop(Field.TEXT_KEY.value) - score = 1 - res["_additional"]["distance"] - docs_and_scores.append((Document(page_content=text, metadata=res), score)) + docs: list[Document] = [] + for obj in res.objects: + properties = dict(obj.properties or {}) + text = properties.pop(Field.TEXT_KEY.value, "") + distance = (obj.metadata.distance if obj.metadata else None) or 1.0 + score = 1.0 - distance - docs = [] - for doc, score in docs_and_scores: - score_threshold = float(kwargs.get("score_threshold") or 0.0) - # check score threshold - if score >= score_threshold: - if doc.metadata is not None: - doc.metadata["score"] = score - docs.append(doc) - # Sort the documents by score in descending order - docs = sorted(docs, key=lambda x: x.metadata.get("score", 0) if x.metadata else 0, reverse=True) + if score > score_threshold: + properties["score"] = score + docs.append(Document(page_content=text, metadata=properties)) + + docs.sort(key=lambda d: d.metadata.get("score", 0.0), reverse=True) return docs def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - """Return docs using BM25F. - - Args: - query: Text to look up documents similar to. - - Returns: - List of Documents most similar to the query. """ - collection_name = self._collection_name - content: dict[str, Any] = {"concepts": [query]} - properties = self._attributes - properties.append(Field.TEXT_KEY.value) - if kwargs.get("search_distance"): - content["certainty"] = kwargs.get("search_distance") - query_obj = self._client.query.get(collection_name, properties) - document_ids_filter = kwargs.get("document_ids_filter") - if document_ids_filter: - operands = [] - for document_id_filter in document_ids_filter: - operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter}) - where_filter = {"operator": "Or", "operands": operands} - query_obj = query_obj.with_where(where_filter) - query_obj = query_obj.with_additional(["vector"]) - properties = ["text"] - result = query_obj.with_bm25(query=query, properties=properties).with_limit(kwargs.get("top_k", 4)).do() - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") - docs = [] - for res in result["data"]["Get"][collection_name]: - text = res.pop(Field.TEXT_KEY.value) - additional = res.pop("_additional") - docs.append(Document(page_content=text, vector=additional["vector"], metadata=res)) + Performs BM25 full-text search on document content. + + Filters by document IDs if provided and returns matching documents with vectors. + """ + if not self._client.collections.exists(self._collection_name): + return [] + + col = self._client.collections.use(self._collection_name) + props = list({*self._attributes, Field.TEXT_KEY.value}) + + where = None + doc_ids = kwargs.get("document_ids_filter") or [] + if doc_ids: + ors = [Filter.by_property("document_id").equal(x) for x in doc_ids] + where = ors[0] + for f in ors[1:]: + where = where | f + + top_k = int(kwargs.get("top_k", 4)) + + res = col.query.bm25( + query=query, + query_properties=[Field.TEXT_KEY.value], + limit=top_k, + return_properties=props, + include_vector=True, + filters=where, + ) + + docs: list[Document] = [] + for obj in res.objects: + properties = dict(obj.properties or {}) + text = properties.pop(Field.TEXT_KEY.value, "") + + vec = obj.vector + if isinstance(vec, dict): + vec = vec.get("default") or next(iter(vec.values()), None) + + docs.append(Document(page_content=text, vector=vec, metadata=properties)) return docs - def _default_schema(self, index_name: str): - return { - "class": index_name, - "properties": [ - { - "name": "text", - "dataType": ["text"], - } - ], - } - - def _json_serializable(self, value: Any): + def _json_serializable(self, value: Any) -> Any: + """Converts values to JSON-serializable format, handling datetime objects.""" if isinstance(value, datetime.datetime): return value.isoformat() return value class WeaviateVectorFactory(AbstractVectorFactory): + """Factory class for creating WeaviateVector instances.""" + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> WeaviateVector: + """ + Initializes a WeaviateVector instance for the given dataset. + + Uses existing collection name from dataset index structure or generates a new one. + Updates dataset index structure if not already set. + """ if dataset.index_struct_dict: class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] collection_name = class_prefix @@ -282,7 +425,6 @@ class WeaviateVectorFactory(AbstractVectorFactory): dataset_id = dataset.id collection_name = Dataset.gen_collection_name_by_id(dataset_id) dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.WEAVIATE, collection_name)) - return WeaviateVector( collection_name=collection_name, config=WeaviateConfig( diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index 5f94129a0c..937b8f033c 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -42,6 +42,9 @@ class CacheEmbedding(Embeddings): text_embeddings[i] = embedding.get_embedding() else: embedding_queue_indices.append(i) + + # NOTE: avoid closing the shared scoped session here; downstream code may still have pending work + if embedding_queue_indices: embedding_queue_texts = [texts[i] for i in embedding_queue_indices] embedding_queue_embeddings = [] diff --git a/api/core/rag/entities/event.py b/api/core/rag/entities/event.py index 24db5d77be..2d8d4060dd 100644 --- a/api/core/rag/entities/event.py +++ b/api/core/rag/entities/event.py @@ -1,11 +1,11 @@ from collections.abc import Mapping -from enum import Enum +from enum import StrEnum from typing import Any from pydantic import BaseModel, Field -class DatasourceStreamEvent(Enum): +class DatasourceStreamEvent(StrEnum): """ Datasource Stream event """ @@ -20,12 +20,12 @@ class BaseDatasourceEvent(BaseModel): class DatasourceErrorEvent(BaseDatasourceEvent): - event: str = DatasourceStreamEvent.ERROR.value + event: DatasourceStreamEvent = DatasourceStreamEvent.ERROR error: str = Field(..., description="error message") class DatasourceCompletedEvent(BaseDatasourceEvent): - event: str = DatasourceStreamEvent.COMPLETED.value + event: DatasourceStreamEvent = DatasourceStreamEvent.COMPLETED data: Mapping[str, Any] | list = Field(..., description="result") total: int | None = Field(default=0, description="total") completed: int | None = Field(default=0, description="completed") @@ -33,6 +33,6 @@ class DatasourceCompletedEvent(BaseDatasourceEvent): class DatasourceProcessingEvent(BaseDatasourceEvent): - event: str = DatasourceStreamEvent.PROCESSING.value + event: DatasourceStreamEvent = DatasourceStreamEvent.PROCESSING total: int | None = Field(..., description="total") completed: int | None = Field(..., description="completed") diff --git a/api/core/rag/extractor/entity/extract_setting.py b/api/core/rag/extractor/entity/extract_setting.py index b9bf9d0d8c..c3bfbce98f 100644 --- a/api/core/rag/extractor/entity/extract_setting.py +++ b/api/core/rag/extractor/entity/extract_setting.py @@ -17,9 +17,6 @@ class NotionInfo(BaseModel): tenant_id: str model_config = ConfigDict(arbitrary_types_allowed=True) - def __init__(self, **data): - super().__init__(**data) - class WebsiteInfo(BaseModel): """ @@ -47,6 +44,3 @@ class ExtractSetting(BaseModel): website_info: WebsiteInfo | None = None document_model: str | None = None model_config = ConfigDict(arbitrary_types_allowed=True) - - def __init__(self, **data): - super().__init__(**data) diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index 3dc08e1832..0f62f9c4b6 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -45,7 +45,7 @@ class ExtractProcessor: cls, upload_file: UploadFile, return_text: bool = False, is_automatic: bool = False ) -> Union[list[Document], str]: extract_setting = ExtractSetting( - datasource_type=DatasourceType.FILE.value, upload_file=upload_file, document_model="text_model" + datasource_type=DatasourceType.FILE, upload_file=upload_file, document_model="text_model" ) if return_text: delimiter = "\n" @@ -76,7 +76,7 @@ class ExtractProcessor: # https://stackoverflow.com/questions/26541416/generate-temporary-file-names-without-creating-actual-file-in-python#comment90414256_26541521 file_path = f"{temp_dir}/{tempfile.gettempdir()}{suffix}" Path(file_path).write_bytes(response.content) - extract_setting = ExtractSetting(datasource_type=DatasourceType.FILE.value, document_model="text_model") + extract_setting = ExtractSetting(datasource_type=DatasourceType.FILE, document_model="text_model") if return_text: delimiter = "\n" return delimiter.join( @@ -92,7 +92,7 @@ class ExtractProcessor: def extract( cls, extract_setting: ExtractSetting, is_automatic: bool = False, file_path: str | None = None ) -> list[Document]: - if extract_setting.datasource_type == DatasourceType.FILE.value: + if extract_setting.datasource_type == DatasourceType.FILE: with tempfile.TemporaryDirectory() as temp_dir: if not file_path: assert extract_setting.upload_file is not None, "upload_file is required" @@ -163,7 +163,7 @@ class ExtractProcessor: # txt extractor = TextExtractor(file_path, autodetect_encoding=True) return extractor.extract() - elif extract_setting.datasource_type == DatasourceType.NOTION.value: + elif extract_setting.datasource_type == DatasourceType.NOTION: assert extract_setting.notion_info is not None, "notion_info is required" extractor = NotionExtractor( notion_workspace_id=extract_setting.notion_info.notion_workspace_id, @@ -174,7 +174,7 @@ class ExtractProcessor: credential_id=extract_setting.notion_info.credential_id, ) return extractor.extract() - elif extract_setting.datasource_type == DatasourceType.WEBSITE.value: + elif extract_setting.datasource_type == DatasourceType.WEBSITE: assert extract_setting.website_info is not None, "website_info is required" if extract_setting.website_info.provider == "firecrawl": extractor = FirecrawlWebExtractor( diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py index e1ba6ef243..789ac8557d 100644 --- a/api/core/rag/extractor/firecrawl/firecrawl_app.py +++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py @@ -2,7 +2,7 @@ import json import time from typing import Any, cast -import requests +import httpx from extensions.ext_storage import storage @@ -25,7 +25,7 @@ class FirecrawlApp: } if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/scrape", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/scrape", json_data, headers) if response.status_code == 200: response_data = response.json() data = response_data["data"] @@ -42,7 +42,7 @@ class FirecrawlApp: json_data = {"url": url} if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/crawl", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/crawl", json_data, headers) if response.status_code == 200: # There's also another two fields in the response: "success" (bool) and "url" (str) job_id = response.json().get("id") @@ -51,9 +51,25 @@ class FirecrawlApp: self._handle_error(response, "start crawl job") return "" # unreachable + def map(self, url: str, params: dict[str, Any] | None = None) -> dict[str, Any]: + # Documentation: https://docs.firecrawl.dev/api-reference/endpoint/map + headers = self._prepare_headers() + json_data: dict[str, Any] = {"url": url, "integration": "dify"} + if params: + # Pass through provided params, including optional "sitemap": "only" | "include" | "skip" + json_data.update(params) + response = self._post_request(f"{self.base_url}/v2/map", json_data, headers) + if response.status_code == 200: + return cast(dict[str, Any], response.json()) + elif response.status_code in {402, 409, 500, 429, 408}: + self._handle_error(response, "start map job") + return {} + else: + raise Exception(f"Failed to start map job. Status code: {response.status_code}") + def check_crawl_status(self, job_id) -> dict[str, Any]: headers = self._prepare_headers() - response = self._get_request(f"{self.base_url}/v1/crawl/{job_id}", headers) + response = self._get_request(f"{self.base_url}/v2/crawl/{job_id}", headers) if response.status_code == 200: crawl_status_response = response.json() if crawl_status_response.get("status") == "completed": @@ -104,18 +120,18 @@ class FirecrawlApp: def _prepare_headers(self) -> dict[str, Any]: return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} - def _post_request(self, url, data, headers, retries=3, backoff_factor=0.5) -> requests.Response: + def _post_request(self, url, data, headers, retries=3, backoff_factor=0.5) -> httpx.Response: for attempt in range(retries): - response = requests.post(url, headers=headers, json=data) + response = httpx.post(url, headers=headers, json=data) if response.status_code == 502: time.sleep(backoff_factor * (2**attempt)) else: return response return response - def _get_request(self, url, headers, retries=3, backoff_factor=0.5) -> requests.Response: + def _get_request(self, url, headers, retries=3, backoff_factor=0.5) -> httpx.Response: for attempt in range(retries): - response = requests.get(url, headers=headers) + response = httpx.get(url, headers=headers) if response.status_code == 502: time.sleep(backoff_factor * (2**attempt)) else: @@ -135,12 +151,16 @@ class FirecrawlApp: "lang": "en", "country": "us", "timeout": 60000, - "ignoreInvalidURLs": False, + "ignoreInvalidURLs": True, "scrapeOptions": {}, + "sources": [ + {"type": "web"}, + ], + "integration": "dify", } if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/search", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/search", json_data, headers) if response.status_code == 200: response_data = response.json() if not response_data.get("success"): diff --git a/api/core/rag/extractor/notion_extractor.py b/api/core/rag/extractor/notion_extractor.py index bddf41af43..e87ab38349 100644 --- a/api/core/rag/extractor/notion_extractor.py +++ b/api/core/rag/extractor/notion_extractor.py @@ -3,7 +3,7 @@ import logging import operator from typing import Any, cast -import requests +import httpx from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor @@ -92,7 +92,7 @@ class NotionExtractor(BaseExtractor): if next_cursor: current_query["start_cursor"] = next_cursor - res = requests.post( + res = httpx.post( DATABASE_URL_TMPL.format(database_id=database_id), headers={ "Authorization": "Bearer " + self._notion_access_token, @@ -160,7 +160,7 @@ class NotionExtractor(BaseExtractor): while True: query_dict: dict[str, Any] = {} if not start_cursor else {"start_cursor": start_cursor} try: - res = requests.request( + res = httpx.request( "GET", block_url, headers={ @@ -173,7 +173,7 @@ class NotionExtractor(BaseExtractor): if res.status_code != 200: raise ValueError(f"Error fetching Notion block data: {res.text}") data = res.json() - except requests.RequestException as e: + except httpx.HTTPError as e: raise ValueError("Error fetching Notion block data") from e if "results" not in data or not isinstance(data["results"], list): raise ValueError("Error fetching Notion block data") @@ -222,7 +222,7 @@ class NotionExtractor(BaseExtractor): while True: query_dict: dict[str, Any] = {} if not start_cursor else {"start_cursor": start_cursor} - res = requests.request( + res = httpx.request( "GET", block_url, headers={ @@ -282,7 +282,7 @@ class NotionExtractor(BaseExtractor): while not done: query_dict: dict[str, Any] = {} if not start_cursor else {"start_cursor": start_cursor} - res = requests.request( + res = httpx.request( "GET", block_url, headers={ @@ -354,7 +354,7 @@ class NotionExtractor(BaseExtractor): query_dict: dict[str, Any] = {} - res = requests.request( + res = httpx.request( "GET", retrieve_page_url, headers={ diff --git a/api/core/rag/extractor/unstructured/unstructured_doc_extractor.py b/api/core/rag/extractor/unstructured/unstructured_doc_extractor.py index 5199208f70..7dd8beaa46 100644 --- a/api/core/rag/extractor/unstructured/unstructured_doc_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_doc_extractor.py @@ -1,6 +1,7 @@ import logging import os +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -49,7 +50,8 @@ class UnstructuredWordExtractor(BaseExtractor): from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py index ad04bd0bd1..d97d4c3a48 100644 --- a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py @@ -4,6 +4,7 @@ import logging from bs4 import BeautifulSoup +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -46,7 +47,8 @@ class UnstructuredEmailExtractor(BaseExtractor): from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py index fc14ee6275..3061d957ac 100644 --- a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py @@ -2,6 +2,7 @@ import logging import pypandoc # type: ignore +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -40,7 +41,8 @@ class UnstructuredEpubExtractor(BaseExtractor): from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py b/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py index 23030d7739..b6d8c47111 100644 --- a/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py @@ -1,5 +1,6 @@ import logging +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -32,7 +33,8 @@ class UnstructuredMarkdownExtractor(BaseExtractor): elements = partition_md(filename=self._file_path) from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py b/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py index f29e639d1b..ae60fc7981 100644 --- a/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py @@ -1,5 +1,6 @@ import logging +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -31,7 +32,8 @@ class UnstructuredMsgExtractor(BaseExtractor): elements = partition_msg(filename=self._file_path) from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py index d75e166f1b..2d4846d85e 100644 --- a/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py @@ -1,5 +1,6 @@ import logging +from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -32,7 +33,8 @@ class UnstructuredXmlExtractor(BaseExtractor): from unstructured.chunking.title import chunk_by_title - chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH + chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters) documents = [] for chunk in chunks: text = chunk.text.strip() diff --git a/api/core/rag/extractor/watercrawl/client.py b/api/core/rag/extractor/watercrawl/client.py index 6d596e07d8..7cf6c4d289 100644 --- a/api/core/rag/extractor/watercrawl/client.py +++ b/api/core/rag/extractor/watercrawl/client.py @@ -3,8 +3,8 @@ from collections.abc import Generator from typing import Union from urllib.parse import urljoin -import requests -from requests import Response +import httpx +from httpx import Response from core.rag.extractor.watercrawl.exceptions import ( WaterCrawlAuthenticationError, @@ -20,28 +20,45 @@ class BaseAPIClient: self.session = self.init_session() def init_session(self): - session = requests.Session() - session.headers.update({"X-API-Key": self.api_key}) - session.headers.update({"Content-Type": "application/json"}) - session.headers.update({"Accept": "application/json"}) - session.headers.update({"User-Agent": "WaterCrawl-Plugin"}) - session.headers.update({"Accept-Language": "en-US"}) - return session + headers = { + "X-API-Key": self.api_key, + "Content-Type": "application/json", + "Accept": "application/json", + "User-Agent": "WaterCrawl-Plugin", + "Accept-Language": "en-US", + } + return httpx.Client(headers=headers, timeout=None) + + def _request( + self, + method: str, + endpoint: str, + query_params: dict | None = None, + data: dict | None = None, + **kwargs, + ) -> Response: + stream = kwargs.pop("stream", False) + url = urljoin(self.base_url, endpoint) + if stream: + request = self.session.build_request(method, url, params=query_params, json=data) + return self.session.send(request, stream=True, **kwargs) + + return self.session.request(method, url, params=query_params, json=data, **kwargs) def _get(self, endpoint: str, query_params: dict | None = None, **kwargs): - return self.session.get(urljoin(self.base_url, endpoint), params=query_params, **kwargs) + return self._request("GET", endpoint, query_params=query_params, **kwargs) def _post(self, endpoint: str, query_params: dict | None = None, data: dict | None = None, **kwargs): - return self.session.post(urljoin(self.base_url, endpoint), params=query_params, json=data, **kwargs) + return self._request("POST", endpoint, query_params=query_params, data=data, **kwargs) def _put(self, endpoint: str, query_params: dict | None = None, data: dict | None = None, **kwargs): - return self.session.put(urljoin(self.base_url, endpoint), params=query_params, json=data, **kwargs) + return self._request("PUT", endpoint, query_params=query_params, data=data, **kwargs) def _delete(self, endpoint: str, query_params: dict | None = None, **kwargs): - return self.session.delete(urljoin(self.base_url, endpoint), params=query_params, **kwargs) + return self._request("DELETE", endpoint, query_params=query_params, **kwargs) def _patch(self, endpoint: str, query_params: dict | None = None, data: dict | None = None, **kwargs): - return self.session.patch(urljoin(self.base_url, endpoint), params=query_params, json=data, **kwargs) + return self._request("PATCH", endpoint, query_params=query_params, data=data, **kwargs) class WaterCrawlAPIClient(BaseAPIClient): @@ -49,14 +66,17 @@ class WaterCrawlAPIClient(BaseAPIClient): super().__init__(api_key, base_url) def process_eventstream(self, response: Response, download: bool = False) -> Generator: - for line in response.iter_lines(): - line = line.decode("utf-8") - if line.startswith("data:"): - line = line[5:].strip() - data = json.loads(line) - if data["type"] == "result" and download: - data["data"] = self.download_result(data["data"]) - yield data + try: + for raw_line in response.iter_lines(): + line = raw_line.decode("utf-8") if isinstance(raw_line, bytes) else raw_line + if line.startswith("data:"): + line = line[5:].strip() + data = json.loads(line) + if data["type"] == "result" and download: + data["data"] = self.download_result(data["data"]) + yield data + finally: + response.close() def process_response(self, response: Response) -> dict | bytes | list | None | Generator: if response.status_code == 401: @@ -170,7 +190,10 @@ class WaterCrawlAPIClient(BaseAPIClient): return event_data["data"] def download_result(self, result_object: dict): - response = requests.get(result_object["result"]) - response.raise_for_status() - result_object["result"] = response.json() + response = httpx.get(result_object["result"], timeout=None) + try: + response.raise_for_status() + result_object["result"] = response.json() + finally: + response.close() return result_object diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index f25f92cf81..1a9704688a 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -9,7 +9,7 @@ import uuid from urllib.parse import urlparse from xml.etree import ElementTree -import requests +import httpx from docx import Document as DocxDocument from configs import dify_config @@ -43,15 +43,19 @@ class WordExtractor(BaseExtractor): # If the file is a web path, download it to a temporary file, and use that if not os.path.isfile(self.file_path) and self._is_valid_url(self.file_path): - r = requests.get(self.file_path) + response = httpx.get(self.file_path, timeout=None) - if r.status_code != 200: - raise ValueError(f"Check the url of your file; returned status code {r.status_code}") + if response.status_code != 200: + response.close() + raise ValueError(f"Check the url of your file; returned status code {response.status_code}") self.web_path = self.file_path # TODO: use a better way to handle the file self.temp_file = tempfile.NamedTemporaryFile() # noqa SIM115 - self.temp_file.write(r.content) + try: + self.temp_file.write(response.content) + finally: + response.close() self.file_path = self.temp_file.name elif not os.path.isfile(self.file_path): raise ValueError(f"File path {self.file_path} is not a valid file or url") diff --git a/api/core/rag/index_processor/index_processor_base.py b/api/core/rag/index_processor/index_processor_base.py index 05cffb5a55..d4eff53204 100644 --- a/api/core/rag/index_processor/index_processor_base.py +++ b/api/core/rag/index_processor/index_processor_base.py @@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any, Optional from configs import dify_config from core.rag.extractor.entity.extract_setting import ExtractSetting from core.rag.models.document import Document +from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.rag.splitter.fixed_text_splitter import ( EnhanceRecursiveCharacterTextSplitter, FixedRecursiveCharacterTextSplitter, @@ -49,7 +50,7 @@ class BaseIndexProcessor(ABC): @abstractmethod def retrieve( self, - retrieval_method: str, + retrieval_method: RetrievalMethod, query: str, dataset: Dataset, top_k: int, diff --git a/api/core/rag/index_processor/processor/paragraph_index_processor.py b/api/core/rag/index_processor/processor/paragraph_index_processor.py index 755aa88d08..5e5fea7ea9 100644 --- a/api/core/rag/index_processor/processor/paragraph_index_processor.py +++ b/api/core/rag/index_processor/processor/paragraph_index_processor.py @@ -14,6 +14,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.models.document import Document +from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.tools.utils.text_processing_utils import remove_leading_symbols from libs import helper from models.dataset import Dataset, DatasetProcessRule @@ -38,11 +39,11 @@ class ParagraphIndexProcessor(BaseIndexProcessor): raise ValueError("No process rule found.") if process_rule.get("mode") == "automatic": automatic_rule = DatasetProcessRule.AUTOMATIC_RULES - rules = Rule(**automatic_rule) + rules = Rule.model_validate(automatic_rule) else: if not process_rule.get("rules"): raise ValueError("No rules found in process rule.") - rules = Rule(**process_rule.get("rules")) + rules = Rule.model_validate(process_rule.get("rules")) # Split the text documents into nodes. if not rules.segmentation: raise ValueError("No segmentation found in rules.") @@ -106,7 +107,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor): def retrieve( self, - retrieval_method: str, + retrieval_method: RetrievalMethod, query: str, dataset: Dataset, top_k: int, diff --git a/api/core/rag/index_processor/processor/parent_child_index_processor.py b/api/core/rag/index_processor/processor/parent_child_index_processor.py index e0ccd8b567..4fa78e2f95 100644 --- a/api/core/rag/index_processor/processor/parent_child_index_processor.py +++ b/api/core/rag/index_processor/processor/parent_child_index_processor.py @@ -16,6 +16,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.models.document import ChildDocument, Document, ParentChildStructureChunk +from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from libs import helper from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegment @@ -40,7 +41,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): raise ValueError("No process rule found.") if not process_rule.get("rules"): raise ValueError("No rules found in process rule.") - rules = Rule(**process_rule.get("rules")) + rules = Rule.model_validate(process_rule.get("rules")) all_documents: list[Document] = [] if rules.parent_mode == ParentMode.PARAGRAPH: # Split the text documents into nodes. @@ -110,7 +111,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): child_documents = document.children if child_documents: formatted_child_documents = [ - Document(**child_document.model_dump()) for child_document in child_documents + Document.model_validate(child_document.model_dump()) for child_document in child_documents ] vector.create(formatted_child_documents) @@ -161,7 +162,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): def retrieve( self, - retrieval_method: str, + retrieval_method: RetrievalMethod, query: str, dataset: Dataset, top_k: int, @@ -224,7 +225,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): return child_nodes def index(self, dataset: Dataset, document: DatasetDocument, chunks: Any): - parent_childs = ParentChildStructureChunk(**chunks) + parent_childs = ParentChildStructureChunk.model_validate(chunks) documents = [] for parent_child in parent_childs.parent_child_chunks: metadata = { @@ -274,7 +275,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): vector.create(all_child_documents) def format_preview(self, chunks: Any) -> Mapping[str, Any]: - parent_childs = ParentChildStructureChunk(**chunks) + parent_childs = ParentChildStructureChunk.model_validate(chunks) preview = [] for parent_child in parent_childs.parent_child_chunks: preview.append({"content": parent_child.parent_content, "child_chunks": parent_child.child_contents}) diff --git a/api/core/rag/index_processor/processor/qa_index_processor.py b/api/core/rag/index_processor/processor/qa_index_processor.py index 2054031643..3e3deb0180 100644 --- a/api/core/rag/index_processor/processor/qa_index_processor.py +++ b/api/core/rag/index_processor/processor/qa_index_processor.py @@ -21,6 +21,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.models.document import Document, QAStructureChunk +from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.tools.utils.text_processing_utils import remove_leading_symbols from libs import helper from models.dataset import Dataset @@ -47,7 +48,7 @@ class QAIndexProcessor(BaseIndexProcessor): raise ValueError("No process rule found.") if not process_rule.get("rules"): raise ValueError("No rules found in process rule.") - rules = Rule(**process_rule.get("rules")) + rules = Rule.model_validate(process_rule.get("rules")) splitter = self._get_splitter( processing_rule_mode=process_rule.get("mode"), max_tokens=rules.segmentation.max_tokens if rules.segmentation else 0, @@ -141,7 +142,7 @@ class QAIndexProcessor(BaseIndexProcessor): def retrieve( self, - retrieval_method: str, + retrieval_method: RetrievalMethod, query: str, dataset: Dataset, top_k: int, @@ -168,7 +169,7 @@ class QAIndexProcessor(BaseIndexProcessor): return docs def index(self, dataset: Dataset, document: DatasetDocument, chunks: Any): - qa_chunks = QAStructureChunk(**chunks) + qa_chunks = QAStructureChunk.model_validate(chunks) documents = [] for qa_chunk in qa_chunks.qa_chunks: metadata = { @@ -191,7 +192,7 @@ class QAIndexProcessor(BaseIndexProcessor): raise ValueError("Indexing technique must be high quality.") def format_preview(self, chunks: Any) -> Mapping[str, Any]: - qa_chunks = QAStructureChunk(**chunks) + qa_chunks = QAStructureChunk.model_validate(chunks) preview = [] for qa_chunk in qa_chunks.qa_chunks: preview.append({"question": qa_chunk.question, "answer": qa_chunk.answer}) diff --git a/api/core/rag/rerank/rerank_factory.py b/api/core/rag/rerank/rerank_factory.py index 1a3cf85736..524e83824c 100644 --- a/api/core/rag/rerank/rerank_factory.py +++ b/api/core/rag/rerank/rerank_factory.py @@ -8,9 +8,9 @@ class RerankRunnerFactory: @staticmethod def create_rerank_runner(runner_type: str, *args, **kwargs) -> BaseRerankRunner: match runner_type: - case RerankMode.RERANKING_MODEL.value: + case RerankMode.RERANKING_MODEL: return RerankModelRunner(*args, **kwargs) - case RerankMode.WEIGHTED_SCORE.value: + case RerankMode.WEIGHTED_SCORE: return WeightRerankRunner(*args, **kwargs) case _: raise ValueError(f"Unknown runner type: {runner_type}") diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index b08f80da49..99bbe615fb 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -61,7 +61,7 @@ from models.dataset import Document as DatasetDocument from services.external_knowledge_service import ExternalDatasetService default_retrieval_model: dict[str, Any] = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -364,7 +364,7 @@ class DatasetRetrieval: top_k = retrieval_model_config["top_k"] # get retrieval method if dataset.indexing_technique == "economy": - retrieval_method = "keyword_search" + retrieval_method = RetrievalMethod.KEYWORD_SEARCH else: retrieval_method = retrieval_model_config["search_method"] # get reranking model @@ -623,7 +623,7 @@ class DatasetRetrieval: if dataset.indexing_technique == "economy": # use keyword table query documents = RetrievalService.retrieve( - retrieval_method="keyword_search", + retrieval_method=RetrievalMethod.KEYWORD_SEARCH, dataset_id=dataset.id, query=query, top_k=top_k, @@ -692,7 +692,7 @@ class DatasetRetrieval: if retrieve_config.retrieve_strategy == DatasetRetrieveConfigEntity.RetrieveStrategy.SINGLE: # get retrieval model config default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 2, diff --git a/api/core/rag/retrieval/retrieval_methods.py b/api/core/rag/retrieval/retrieval_methods.py index c7c6e60c8d..c77a026351 100644 --- a/api/core/rag/retrieval/retrieval_methods.py +++ b/api/core/rag/retrieval/retrieval_methods.py @@ -1,7 +1,7 @@ -from enum import Enum +from enum import StrEnum -class RetrievalMethod(Enum): +class RetrievalMethod(StrEnum): SEMANTIC_SEARCH = "semantic_search" FULL_TEXT_SEARCH = "full_text_search" HYBRID_SEARCH = "hybrid_search" @@ -9,8 +9,8 @@ class RetrievalMethod(Enum): @staticmethod def is_support_semantic_search(retrieval_method: str) -> bool: - return retrieval_method in {RetrievalMethod.SEMANTIC_SEARCH.value, RetrievalMethod.HYBRID_SEARCH.value} + return retrieval_method in {RetrievalMethod.SEMANTIC_SEARCH, RetrievalMethod.HYBRID_SEARCH} @staticmethod def is_support_fulltext_search(retrieval_method: str) -> bool: - return retrieval_method in {RetrievalMethod.FULL_TEXT_SEARCH.value, RetrievalMethod.HYBRID_SEARCH.value} + return retrieval_method in {RetrievalMethod.FULL_TEXT_SEARCH, RetrievalMethod.HYBRID_SEARCH} diff --git a/api/core/rag/splitter/fixed_text_splitter.py b/api/core/rag/splitter/fixed_text_splitter.py index 8356861242..801d2a2a52 100644 --- a/api/core/rag/splitter/fixed_text_splitter.py +++ b/api/core/rag/splitter/fixed_text_splitter.py @@ -2,6 +2,7 @@ from __future__ import annotations +import re from typing import Any from core.model_manager import ModelInstance @@ -52,7 +53,7 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter) """Create a new TextSplitter.""" super().__init__(**kwargs) self._fixed_separator = fixed_separator - self._separators = separators or ["\n\n", "\n", " ", ""] + self._separators = separators or ["\n\n", "\n", "。", ". ", " ", ""] def split_text(self, text: str) -> list[str]: """Split incoming text and return chunks.""" @@ -90,16 +91,19 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter) # Now that we have the separator, split the text if separator: if separator == " ": - splits = text.split() + splits = re.split(r" +", text) else: splits = text.split(separator) splits = [item + separator if i < len(splits) else item for i, item in enumerate(splits)] else: splits = list(text) - splits = [s for s in splits if (s not in {"", "\n"})] + if separator == "\n": + splits = [s for s in splits if s != ""] + else: + splits = [s for s in splits if (s not in {"", "\n"})] _good_splits = [] _good_splits_lengths = [] # cache the lengths of the splits - _separator = "" if self._keep_separator else separator + _separator = separator if self._keep_separator else "" s_lens = self._length_function(splits) if separator != "": for s, s_len in zip(splits, s_lens): diff --git a/api/core/repositories/celery_workflow_execution_repository.py b/api/core/repositories/celery_workflow_execution_repository.py index eda7b54d6a..460bb75722 100644 --- a/api/core/repositories/celery_workflow_execution_repository.py +++ b/api/core/repositories/celery_workflow_execution_repository.py @@ -108,7 +108,7 @@ class CeleryWorkflowExecutionRepository(WorkflowExecutionRepository): execution_data = execution.model_dump() # Queue the save operation as a Celery task (fire and forget) - save_workflow_execution_task.delay( + save_workflow_execution_task.delay( # type: ignore execution_data=execution_data, tenant_id=self._tenant_id, app_id=self._app_id or "", diff --git a/api/core/tools/__base/tool_runtime.py b/api/core/tools/__base/tool_runtime.py index 3de0014c61..09bc817c01 100644 --- a/api/core/tools/__base/tool_runtime.py +++ b/api/core/tools/__base/tool_runtime.py @@ -1,7 +1,6 @@ from typing import Any -from openai import BaseModel -from pydantic import Field +from pydantic import BaseModel, Field from core.app.entities.app_invoke_entities import InvokeFrom from core.tools.entities.tool_entities import CredentialType, ToolInvokeFrom diff --git a/api/core/tools/builtin_tool/provider.py b/api/core/tools/builtin_tool/provider.py index 45fd16d684..2e94907f30 100644 --- a/api/core/tools/builtin_tool/provider.py +++ b/api/core/tools/builtin_tool/provider.py @@ -90,7 +90,7 @@ class BuiltinToolProviderController(ToolProviderController): tools.append( assistant_tool_class( provider=provider, - entity=ToolEntity(**tool), + entity=ToolEntity.model_validate(tool), runtime=ToolRuntime(tenant_id=""), ) ) @@ -111,7 +111,7 @@ class BuiltinToolProviderController(ToolProviderController): :return: the credentials schema """ - return self.get_credentials_schema_by_type(CredentialType.API_KEY.value) + return self.get_credentials_schema_by_type(CredentialType.API_KEY) def get_credentials_schema_by_type(self, credential_type: str) -> list[ProviderConfig]: """ @@ -122,7 +122,7 @@ class BuiltinToolProviderController(ToolProviderController): """ if credential_type == CredentialType.OAUTH2.value: return self.entity.oauth_schema.credentials_schema.copy() if self.entity.oauth_schema else [] - if credential_type == CredentialType.API_KEY.value: + if credential_type == CredentialType.API_KEY: return self.entity.credentials_schema.copy() if self.entity.credentials_schema else [] raise ValueError(f"Invalid credential type: {credential_type}") @@ -134,15 +134,15 @@ class BuiltinToolProviderController(ToolProviderController): """ return self.entity.oauth_schema.client_schema.copy() if self.entity.oauth_schema else [] - def get_supported_credential_types(self) -> list[str]: + def get_supported_credential_types(self) -> list[CredentialType]: """ returns the credential support type of the provider """ types = [] if self.entity.credentials_schema is not None and len(self.entity.credentials_schema) > 0: - types.append(CredentialType.API_KEY.value) + types.append(CredentialType.API_KEY) if self.entity.oauth_schema is not None and len(self.entity.oauth_schema.credentials_schema) > 0: - types.append(CredentialType.OAUTH2.value) + types.append(CredentialType.OAUTH2) return types def get_tools(self) -> list[BuiltinTool]: diff --git a/api/core/tools/builtin_tool/providers/code/_assets/icon.svg b/api/core/tools/builtin_tool/providers/code/_assets/icon.svg index b986ed9426..154726a081 100644 --- a/api/core/tools/builtin_tool/providers/code/_assets/icon.svg +++ b/api/core/tools/builtin_tool/providers/code/_assets/icon.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/api/core/tools/custom_tool/tool.py b/api/core/tools/custom_tool/tool.py index 34d0f5c622..f18f638f2d 100644 --- a/api/core/tools/custom_tool/tool.py +++ b/api/core/tools/custom_tool/tool.py @@ -290,6 +290,7 @@ class ApiTool(Tool): method_lc ]( # https://discuss.python.org/t/type-inference-for-function-return-types/42926 url, + max_retries=0, params=params, headers=headers, cookies=cookies, diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index 00c4ab9dd7..de6bf01ae9 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -61,7 +61,7 @@ class ToolProviderApiEntity(BaseModel): for tool in tools: if tool.get("parameters"): for parameter in tool.get("parameters"): - if parameter.get("type") == ToolParameter.ToolParameterType.SYSTEM_FILES.value: + if parameter.get("type") == ToolParameter.ToolParameterType.SYSTEM_FILES: parameter["type"] = "files" if parameter.get("input_schema") is None: parameter.pop("input_schema", None) @@ -110,7 +110,9 @@ class ToolProviderCredentialApiEntity(BaseModel): class ToolProviderCredentialInfoApiEntity(BaseModel): - supported_credential_types: list[str] = Field(description="The supported credential types of the provider") + supported_credential_types: list[CredentialType] = Field( + description="The supported credential types of the provider" + ) is_oauth_custom_client_enabled: bool = Field( default=False, description="Whether the OAuth custom client is enabled for the provider" ) diff --git a/api/core/tools/entities/common_entities.py b/api/core/tools/entities/common_entities.py index 2c6d9c1964..21d310bbb9 100644 --- a/api/core/tools/entities/common_entities.py +++ b/api/core/tools/entities/common_entities.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, model_validator class I18nObject(BaseModel): @@ -11,11 +11,12 @@ class I18nObject(BaseModel): pt_BR: str | None = Field(default=None) ja_JP: str | None = Field(default=None) - def __init__(self, **data): - super().__init__(**data) + @model_validator(mode="after") + def _populate_missing_locales(self): self.zh_Hans = self.zh_Hans or self.en_US self.pt_BR = self.pt_BR or self.en_US self.ja_JP = self.ja_JP or self.en_US + return self def to_dict(self): return {"zh_Hans": self.zh_Hans, "en_US": self.en_US, "pt_BR": self.pt_BR, "ja_JP": self.ja_JP} diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index a59b54216f..15a4f0aafd 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -113,7 +113,7 @@ class ApiProviderAuthType(StrEnum): # normalize & tiny alias for backward compatibility v = (value or "").strip().lower() if v == "api_key": - v = cls.API_KEY_HEADER.value + v = cls.API_KEY_HEADER for mode in cls: if mode.value == v: @@ -189,6 +189,11 @@ class ToolInvokeMessage(BaseModel): data: Mapping[str, Any] = Field(..., description="Detailed log data") metadata: Mapping[str, Any] = Field(default_factory=dict, description="The metadata of the log") + @field_validator("metadata", mode="before") + @classmethod + def _normalize_metadata(cls, value: Mapping[str, Any] | None) -> Mapping[str, Any]: + return value or {} + class RetrieverResourceMessage(BaseModel): retriever_resources: list[RetrievalSourceMetadata] = Field(..., description="retriever resources") context: str = Field(..., description="context") @@ -376,6 +381,11 @@ class ToolEntity(BaseModel): def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]: return v or [] + @field_validator("output_schema", mode="before") + @classmethod + def _normalize_output_schema(cls, value: Mapping[str, object] | None) -> Mapping[str, object]: + return value or {} + class OAuthSchema(BaseModel): client_schema: list[ProviderConfig] = Field( diff --git a/api/core/tools/mcp_tool/provider.py b/api/core/tools/mcp_tool/provider.py index 5b04f0edbe..0c2870727e 100644 --- a/api/core/tools/mcp_tool/provider.py +++ b/api/core/tools/mcp_tool/provider.py @@ -54,7 +54,7 @@ class MCPToolProviderController(ToolProviderController): """ tools = [] tools_data = json.loads(db_provider.tools) - remote_mcp_tools = [RemoteMCPTool(**tool) for tool in tools_data] + remote_mcp_tools = [RemoteMCPTool.model_validate(tool) for tool in tools_data] user = db_provider.load_user() tools = [ ToolEntity( @@ -76,7 +76,8 @@ class MCPToolProviderController(ToolProviderController): ) for remote_mcp_tool in remote_mcp_tools ] - + if not db_provider.icon: + raise ValueError("Database provider icon is required") return cls( entity=ToolProviderEntityWithPlugin( identity=ToolProviderIdentity( diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 9e5f5a7c23..af68971ca7 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -1008,7 +1008,7 @@ class ToolManager: config = tool_configurations.get(parameter.name, {}) if not (config and isinstance(config, dict) and config.get("value") is not None): continue - tool_input = ToolNodeData.ToolInput(**tool_configurations.get(parameter.name, {})) + tool_input = ToolNodeData.ToolInput.model_validate(tool_configurations.get(parameter.name, {})) if tool_input.type == "variable": variable = variable_pool.get(tool_input.value) if variable is None: diff --git a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py index 75c0c6738e..20e10be075 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py @@ -18,7 +18,7 @@ from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment default_retrieval_model: dict[str, Any] = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 2, @@ -126,7 +126,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool): data_source_type=document.data_source_type, segment_id=segment.id, retriever_from=self.retriever_from, - score=document_score_list.get(segment.index_node_id, None), + score=document_score_list.get(segment.index_node_id), doc_metadata=document.doc_metadata, ) @@ -172,7 +172,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool): if dataset.indexing_technique == "economy": # use keyword table query documents = RetrievalService.retrieve( - retrieval_method="keyword_search", + retrieval_method=RetrievalMethod.KEYWORD_SEARCH, dataset_id=dataset.id, query=query, top_k=retrieval_model.get("top_k") or 4, diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py index ac2967d0c1..dd0b4bedcf 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py @@ -18,6 +18,10 @@ class DatasetRetrieverBaseTool(BaseModel, ABC): retriever_from: str model_config = ConfigDict(arbitrary_types_allowed=True) + def run(self, query: str) -> str: + """Use the tool.""" + return self._run(query) + @abstractmethod def _run(self, query: str) -> str: """Use the tool. diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py index 0e2237befd..915a22dd0f 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py @@ -17,7 +17,7 @@ from models.dataset import Document as DatasetDocument from services.external_knowledge_service import ExternalDatasetService default_retrieval_model: dict[str, Any] = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "reranking_mode": "reranking_model", @@ -130,7 +130,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): if dataset.indexing_technique == "economy": # use keyword table query documents = RetrievalService.retrieve( - retrieval_method="keyword_search", + retrieval_method=RetrievalMethod.KEYWORD_SEARCH, dataset_id=dataset.id, query=query, top_k=self.top_k, diff --git a/api/core/tools/utils/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever_tool.py index a62d419243..fca6e6f1c7 100644 --- a/api/core/tools/utils/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever_tool.py @@ -124,7 +124,7 @@ class DatasetRetrieverTool(Tool): yield self.create_text_message(text="please input query") else: # invoke dataset retriever tool - result = self.retrieval_tool._run(query=query) + result = self.retrieval_tool.run(query=query) yield self.create_text_message(text=result) def validate_credentials( diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index 0851a54338..ca2aa39861 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -12,7 +12,7 @@ from core.file import File, FileTransferMethod, FileType from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool_file_manager import ToolFileManager from libs.login import current_user -from models.account import Account +from models import Account logger = logging.getLogger(__name__) diff --git a/api/core/tools/utils/model_invocation_utils.py b/api/core/tools/utils/model_invocation_utils.py index 526f5c8b9a..b4bae08a9b 100644 --- a/api/core/tools/utils/model_invocation_utils.py +++ b/api/core/tools/utils/model_invocation_utils.py @@ -5,6 +5,7 @@ Therefore, a model manager is needed to list/invoke/validate models. """ import json +from decimal import Decimal from typing import cast from core.model_manager import ModelManager @@ -118,10 +119,10 @@ class ModelInvocationUtils: model_response="", prompt_tokens=prompt_tokens, answer_tokens=0, - answer_unit_price=0, - answer_price_unit=0, + answer_unit_price=Decimal(), + answer_price_unit=Decimal(), provider_response_latency=0, - total_price=0, + total_price=Decimal(), currency="USD", ) @@ -152,7 +153,7 @@ class ModelInvocationUtils: raise InvokeModelError(f"Invoke error: {e}") # update tool model invoke - tool_model_invoke.model_response = response.message.content + tool_model_invoke.model_response = str(response.message.content) if response.usage: tool_model_invoke.answer_tokens = response.usage.completion_tokens tool_model_invoke.answer_unit_price = response.usage.completion_unit_price diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index 2e306db6c7..c7ac3387e5 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -2,9 +2,10 @@ import re from json import dumps as json_dumps from json import loads as json_loads from json.decoder import JSONDecodeError +from typing import Any +import httpx from flask import request -from requests import get from yaml import YAMLError, safe_load from core.tools.entities.common_entities import I18nObject @@ -127,34 +128,34 @@ class ApiBasedToolSchemaParser: if "allOf" in prop_dict: del prop_dict["allOf"] - # parse body parameters - if "schema" in interface["operation"]["requestBody"]["content"][content_type]: - body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"] - required = body_schema.get("required", []) - properties = body_schema.get("properties", {}) - for name, property in properties.items(): - tool = ToolParameter( - name=name, - label=I18nObject(en_US=name, zh_Hans=name), - human_description=I18nObject( - en_US=property.get("description", ""), zh_Hans=property.get("description", "") - ), - type=ToolParameter.ToolParameterType.STRING, - required=name in required, - form=ToolParameter.ToolParameterForm.LLM, - llm_description=property.get("description", ""), - default=property.get("default", None), - placeholder=I18nObject( - en_US=property.get("description", ""), zh_Hans=property.get("description", "") - ), - ) + # parse body parameters + if "schema" in interface["operation"]["requestBody"]["content"][content_type]: + body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"] + required = body_schema.get("required", []) + properties = body_schema.get("properties", {}) + for name, property in properties.items(): + tool = ToolParameter( + name=name, + label=I18nObject(en_US=name, zh_Hans=name), + human_description=I18nObject( + en_US=property.get("description", ""), zh_Hans=property.get("description", "") + ), + type=ToolParameter.ToolParameterType.STRING, + required=name in required, + form=ToolParameter.ToolParameterForm.LLM, + llm_description=property.get("description", ""), + default=property.get("default", None), + placeholder=I18nObject( + en_US=property.get("description", ""), zh_Hans=property.get("description", "") + ), + ) - # check if there is a type - typ = ApiBasedToolSchemaParser._get_tool_parameter_type(property) - if typ: - tool.type = typ + # check if there is a type + typ = ApiBasedToolSchemaParser._get_tool_parameter_type(property) + if typ: + tool.type = typ - parameters.append(tool) + parameters.append(tool) # check if parameters is duplicated parameters_count = {} @@ -241,7 +242,9 @@ class ApiBasedToolSchemaParser: return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(openapi, extra_info=extra_info, warning=warning) @staticmethod - def parse_swagger_to_openapi(swagger: dict, extra_info: dict | None = None, warning: dict | None = None): + def parse_swagger_to_openapi( + swagger: dict, extra_info: dict | None = None, warning: dict | None = None + ) -> dict[str, Any]: warning = warning or {} """ parse swagger to openapi @@ -257,7 +260,7 @@ class ApiBasedToolSchemaParser: if len(servers) == 0: raise ToolApiSchemaError("No server found in the swagger yaml.") - openapi = { + converted_openapi: dict[str, Any] = { "openapi": "3.0.0", "info": { "title": info.get("title", "Swagger"), @@ -275,7 +278,7 @@ class ApiBasedToolSchemaParser: # convert paths for path, path_item in swagger["paths"].items(): - openapi["paths"][path] = {} + converted_openapi["paths"][path] = {} for method, operation in path_item.items(): if "operationId" not in operation: raise ToolApiSchemaError(f"No operationId found in operation {method} {path}.") @@ -286,7 +289,7 @@ class ApiBasedToolSchemaParser: if warning is not None: warning["missing_summary"] = f"No summary or description found in operation {method} {path}." - openapi["paths"][path][method] = { + converted_openapi["paths"][path][method] = { "operationId": operation["operationId"], "summary": operation.get("summary", ""), "description": operation.get("description", ""), @@ -295,13 +298,14 @@ class ApiBasedToolSchemaParser: } if "requestBody" in operation: - openapi["paths"][path][method]["requestBody"] = operation["requestBody"] + converted_openapi["paths"][path][method]["requestBody"] = operation["requestBody"] # convert definitions - for name, definition in swagger["definitions"].items(): - openapi["components"]["schemas"][name] = definition + if "definitions" in swagger: + for name, definition in swagger["definitions"].items(): + converted_openapi["components"]["schemas"][name] = definition - return openapi + return converted_openapi @staticmethod def parse_openai_plugin_json_to_tool_bundle( @@ -330,15 +334,20 @@ class ApiBasedToolSchemaParser: raise ToolNotSupportedError("Only openapi is supported now.") # get openapi yaml - response = get(api_url, headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "}, timeout=5) - - if response.status_code != 200: - raise ToolProviderNotFoundError("cannot get openapi yaml from url.") - - return ApiBasedToolSchemaParser.parse_openapi_yaml_to_tool_bundle( - response.text, extra_info=extra_info, warning=warning + response = httpx.get( + api_url, headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "}, timeout=5 ) + try: + if response.status_code != 200: + raise ToolProviderNotFoundError("cannot get openapi yaml from url.") + + return ApiBasedToolSchemaParser.parse_openapi_yaml_to_tool_bundle( + response.text, extra_info=extra_info, warning=warning + ) + finally: + response.close() + @staticmethod def auto_parse_to_tool_bundle( content: str, extra_info: dict | None = None, warning: dict | None = None @@ -384,7 +393,7 @@ class ApiBasedToolSchemaParser: openapi = ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle( loaded_content, extra_info=extra_info, warning=warning ) - schema_type = ApiProviderSchemaType.OPENAPI.value + schema_type = ApiProviderSchemaType.OPENAPI return openapi, schema_type except ToolApiSchemaError as e: openapi_error = e @@ -394,7 +403,7 @@ class ApiBasedToolSchemaParser: converted_swagger = ApiBasedToolSchemaParser.parse_swagger_to_openapi( loaded_content, extra_info=extra_info, warning=warning ) - schema_type = ApiProviderSchemaType.SWAGGER.value + schema_type = ApiProviderSchemaType.SWAGGER return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle( converted_swagger, extra_info=extra_info, warning=warning ), schema_type @@ -406,7 +415,7 @@ class ApiBasedToolSchemaParser: openapi_plugin = ApiBasedToolSchemaParser.parse_openai_plugin_json_to_tool_bundle( json_dumps(loaded_content), extra_info=extra_info, warning=warning ) - return openapi_plugin, ApiProviderSchemaType.OPENAI_PLUGIN.value + return openapi_plugin, ApiProviderSchemaType.OPENAI_PLUGIN except ToolNotSupportedError as e: # maybe it's not plugin at all openapi_plugin_error = e diff --git a/api/core/workflow/README.md b/api/core/workflow/README.md index bef19ba90b..72f5dbe1e2 100644 --- a/api/core/workflow/README.md +++ b/api/core/workflow/README.md @@ -60,8 +60,8 @@ Extensible middleware for cross-cutting concerns: ```python engine = GraphEngine(graph) -engine.add_layer(DebugLoggingLayer(level="INFO")) -engine.add_layer(ExecutionLimitsLayer(max_nodes=100)) +engine.layer(DebugLoggingLayer(level="INFO")) +engine.layer(ExecutionLimitsLayer(max_nodes=100)) ``` ### Event-Driven Architecture @@ -117,7 +117,7 @@ The codebase enforces strict layering via import-linter: 1. Create class inheriting from `Layer` base 1. Override lifecycle methods: `on_graph_start()`, `on_event()`, `on_graph_end()` -1. Add to engine via `engine.add_layer()` +1. Add to engine via `engine.layer()` ### Debugging Workflow Execution diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/entities/variable_pool.py index 8ceabde7e6..2dc00fd70b 100644 --- a/api/core/workflow/entities/variable_pool.py +++ b/api/core/workflow/entities/variable_pool.py @@ -184,11 +184,22 @@ class VariablePool(BaseModel): """Extract the actual value from an ObjectSegment.""" return obj.value if isinstance(obj, ObjectSegment) else obj - def _get_nested_attribute(self, obj: Mapping[str, Any], attr: str): - """Get a nested attribute from a dictionary-like object.""" - if not isinstance(obj, dict): + def _get_nested_attribute(self, obj: Mapping[str, Any], attr: str) -> Segment | None: + """ + Get a nested attribute from a dictionary-like object. + + Args: + obj: The dictionary-like object to search. + attr: The key to look up. + + Returns: + Segment | None: + The corresponding Segment built from the attribute value if the key exists, + otherwise None. + """ + if not isinstance(obj, dict) or attr not in obj: return None - return obj.get(attr) + return variable_factory.build_segment(obj.get(attr)) def remove(self, selector: Sequence[str], /): """ diff --git a/api/core/workflow/enums.py b/api/core/workflow/enums.py index 00a125660a..eb88bb67ee 100644 --- a/api/core/workflow/enums.py +++ b/api/core/workflow/enums.py @@ -1,7 +1,7 @@ -from enum import Enum, StrEnum +from enum import StrEnum -class NodeState(Enum): +class NodeState(StrEnum): """State of a node or edge during workflow execution.""" UNKNOWN = "unknown" diff --git a/api/core/workflow/graph_engine/command_channels/redis_channel.py b/api/core/workflow/graph_engine/command_channels/redis_channel.py index 056e17bf5d..527647ae3b 100644 --- a/api/core/workflow/graph_engine/command_channels/redis_channel.py +++ b/api/core/workflow/graph_engine/command_channels/redis_channel.py @@ -41,6 +41,7 @@ class RedisChannel: self._redis = redis_client self._key = channel_key self._command_ttl = command_ttl + self._pending_key = f"{channel_key}:pending" def fetch_commands(self) -> list[GraphEngineCommand]: """ @@ -49,6 +50,9 @@ class RedisChannel: Returns: List of pending commands (drains the Redis list) """ + if not self._has_pending_commands(): + return [] + commands: list[GraphEngineCommand] = [] # Use pipeline for atomic operations @@ -85,6 +89,7 @@ class RedisChannel: with self._redis.pipeline() as pipe: pipe.rpush(self._key, command_json) pipe.expire(self._key, self._command_ttl) + pipe.set(self._pending_key, "1", ex=self._command_ttl) pipe.execute() def _deserialize_command(self, data: dict[str, Any]) -> GraphEngineCommand | None: @@ -105,10 +110,24 @@ class RedisChannel: command_type = CommandType(command_type_value) if command_type == CommandType.ABORT: - return AbortCommand(**data) + return AbortCommand.model_validate(data) else: # For other command types, use base class - return GraphEngineCommand(**data) + return GraphEngineCommand.model_validate(data) except (ValueError, TypeError): return None + + def _has_pending_commands(self) -> bool: + """ + Check and consume the pending marker to avoid unnecessary list reads. + + Returns: + True if commands should be fetched from Redis. + """ + with self._redis.pipeline() as pipe: + pipe.get(self._pending_key) + pipe.delete(self._pending_key) + pending_value, _ = pipe.execute() + + return pending_value is not None diff --git a/api/core/workflow/graph_engine/event_management/event_handlers.py b/api/core/workflow/graph_engine/event_management/event_handlers.py index 7247b17967..1cb5851ab1 100644 --- a/api/core/workflow/graph_engine/event_management/event_handlers.py +++ b/api/core/workflow/graph_engine/event_management/event_handlers.py @@ -7,6 +7,7 @@ from collections.abc import Mapping from functools import singledispatchmethod from typing import TYPE_CHECKING, final +from core.model_runtime.entities.llm_entities import LLMUsage from core.workflow.entities import GraphRuntimeState from core.workflow.enums import ErrorStrategy, NodeExecutionType from core.workflow.graph import Graph @@ -125,6 +126,7 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) is_initial_attempt = node_execution.retry_count == 0 node_execution.mark_started(event.id) + self._graph_runtime_state.increment_node_run_steps() # Track in response coordinator for stream ordering self._response_coordinator.track_node_execution(event.node_id, event.id) @@ -163,6 +165,8 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.mark_taken() + self._accumulate_node_usage(event.node_run_result.llm_usage) + # Store outputs in variable pool self._store_node_outputs(event.node_id, event.node_run_result.outputs) @@ -212,6 +216,8 @@ class EventHandler: node_execution.mark_failed(event.error) self._graph_execution.record_node_failure() + self._accumulate_node_usage(event.node_run_result.llm_usage) + result = self._error_handler.handle_node_failure(event) if result: @@ -235,6 +241,8 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.mark_taken() + self._accumulate_node_usage(event.node_run_result.llm_usage) + # Persist outputs produced by the exception strategy (e.g. default values) self._store_node_outputs(event.node_id, event.node_run_result.outputs) @@ -286,6 +294,19 @@ class EventHandler: self._state_manager.enqueue_node(event.node_id) self._state_manager.start_execution(event.node_id) + def _accumulate_node_usage(self, usage: LLMUsage) -> None: + """Accumulate token usage into the shared runtime state.""" + if usage.total_tokens <= 0: + return + + self._graph_runtime_state.add_tokens(usage.total_tokens) + + current_usage = self._graph_runtime_state.llm_usage + if current_usage.total_tokens == 0: + self._graph_runtime_state.llm_usage = usage + else: + self._graph_runtime_state.llm_usage = current_usage.plus(usage) + def _store_node_outputs(self, node_id: str, outputs: Mapping[str, object]) -> None: """ Store node outputs in the variable pool. diff --git a/api/core/workflow/graph_engine/layers/README.md b/api/core/workflow/graph_engine/layers/README.md index 8ee35baec0..17845ee1f0 100644 --- a/api/core/workflow/graph_engine/layers/README.md +++ b/api/core/workflow/graph_engine/layers/README.md @@ -30,7 +30,7 @@ debug_layer = DebugLoggingLayer( ) engine = GraphEngine(graph) -engine.add_layer(debug_layer) +engine.layer(debug_layer) engine.run() ``` diff --git a/api/core/workflow/graph_engine/layers/execution_limits.py b/api/core/workflow/graph_engine/layers/execution_limits.py index e39af89837..a2d36d142d 100644 --- a/api/core/workflow/graph_engine/layers/execution_limits.py +++ b/api/core/workflow/graph_engine/layers/execution_limits.py @@ -10,7 +10,7 @@ When limits are exceeded, the layer automatically aborts execution. import logging import time -from enum import Enum +from enum import StrEnum from typing import final from typing_extensions import override @@ -24,7 +24,7 @@ from core.workflow.graph_events import ( from core.workflow.graph_events.node import NodeRunFailedEvent, NodeRunSucceededEvent -class LimitType(Enum): +class LimitType(StrEnum): """Types of execution limits that can be exceeded.""" STEP_LIMIT = "step_limit" diff --git a/api/core/workflow/graph_engine/orchestration/dispatcher.py b/api/core/workflow/graph_engine/orchestration/dispatcher.py index a7229ce4e8..8340c10b49 100644 --- a/api/core/workflow/graph_engine/orchestration/dispatcher.py +++ b/api/core/workflow/graph_engine/orchestration/dispatcher.py @@ -8,7 +8,12 @@ import threading import time from typing import TYPE_CHECKING, final -from core.workflow.graph_events.base import GraphNodeEventBase +from core.workflow.graph_events import ( + GraphNodeEventBase, + NodeRunExceptionEvent, + NodeRunFailedEvent, + NodeRunSucceededEvent, +) from ..event_management import EventManager from .execution_coordinator import ExecutionCoordinator @@ -72,13 +77,16 @@ class Dispatcher: if self._thread and self._thread.is_alive(): self._thread.join(timeout=10.0) + _COMMAND_TRIGGER_EVENTS = ( + NodeRunSucceededEvent, + NodeRunFailedEvent, + NodeRunExceptionEvent, + ) + def _dispatcher_loop(self) -> None: """Main dispatcher loop.""" try: while not self._stop_event.is_set(): - # Check for commands - self._execution_coordinator.check_commands() - # Check for scaling self._execution_coordinator.check_scaling() @@ -87,6 +95,8 @@ class Dispatcher: event = self._event_queue.get(timeout=0.1) # Route to the event handler self._event_handler.dispatch(event) + if self._should_check_commands(event): + self._execution_coordinator.check_commands() self._event_queue.task_done() except queue.Empty: # Check if execution is complete @@ -102,3 +112,7 @@ class Dispatcher: # Signal the event emitter that execution is complete if self._event_emitter: self._event_emitter.mark_complete() + + def _should_check_commands(self, event: GraphNodeEventBase) -> bool: + """Return True if the event represents a node completion.""" + return isinstance(event, self._COMMAND_TRIGGER_EVENTS) diff --git a/api/core/workflow/graph_engine/response_coordinator/coordinator.py b/api/core/workflow/graph_engine/response_coordinator/coordinator.py index 985992f3f1..3db40c545e 100644 --- a/api/core/workflow/graph_engine/response_coordinator/coordinator.py +++ b/api/core/workflow/graph_engine/response_coordinator/coordinator.py @@ -212,10 +212,11 @@ class ResponseStreamCoordinator: edge = self._graph.edges[edge_id] source_node = self._graph.nodes[edge.tail] - # Check if node is a branch/container (original behavior) + # Check if node is a branch, container, or response node if source_node.execution_type in { NodeExecutionType.BRANCH, NodeExecutionType.CONTAINER, + NodeExecutionType.RESPONSE, } or source_node.blocks_variable_output(variable_selectors): blocking_edges.append(edge_id) diff --git a/api/core/workflow/node_events/node.py b/api/core/workflow/node_events/node.py index c1aeb9fe27..93dfefb679 100644 --- a/api/core/workflow/node_events/node.py +++ b/api/core/workflow/node_events/node.py @@ -20,6 +20,7 @@ class ModelInvokeCompletedEvent(NodeEventBase): usage: LLMUsage finish_reason: str | None = None reasoning_content: str | None = None + structured_output: dict | None = None class RunRetryEvent(NodeEventBase): diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index ec05805879..4a24b18465 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -252,7 +252,10 @@ class AgentNode(Node): if all(isinstance(v, dict) for _, v in parameters.items()): params = {} for key, param in parameters.items(): - if param.get("auto", ParamsAutoGenerated.OPEN.value) == ParamsAutoGenerated.CLOSE.value: + if param.get("auto", ParamsAutoGenerated.OPEN) in ( + ParamsAutoGenerated.CLOSE, + 0, + ): value_param = param.get("value", {}) params[key] = value_param.get("value", "") if value_param is not None else None else: @@ -266,7 +269,7 @@ class AgentNode(Node): value = cast(list[dict[str, Any]], value) tool_value = [] for tool in value: - provider_type = ToolProviderType(tool.get("type", ToolProviderType.BUILT_IN.value)) + provider_type = ToolProviderType(tool.get("type", ToolProviderType.BUILT_IN)) setting_params = tool.get("settings", {}) parameters = tool.get("parameters", {}) manual_input_params = [key for key, value in parameters.items() if value is not None] @@ -288,7 +291,7 @@ class AgentNode(Node): # But for backward compatibility with historical data # this version field judgment is still preserved here. runtime_variable_pool: VariablePool | None = None - if node_data.version != "1" or node_data.tool_node_version != "1": + if node_data.version != "1" or node_data.tool_node_version is not None: runtime_variable_pool = variable_pool tool_runtime = ToolManager.get_agent_tool_runtime( self.tenant_id, self.app_id, entity, self.invoke_from, runtime_variable_pool @@ -417,7 +420,7 @@ class AgentNode(Node): def _fetch_memory(self, model_instance: ModelInstance) -> TokenBufferMemory | None: # get conversation id conversation_id_variable = self.graph_runtime_state.variable_pool.get( - ["sys", SystemVariableKey.CONVERSATION_ID.value] + ["sys", SystemVariableKey.CONVERSATION_ID] ) if not isinstance(conversation_id_variable, StringSegment): return None @@ -476,7 +479,7 @@ class AgentNode(Node): if meta_version and Version(meta_version) > Version("0.0.1"): return tools else: - return [tool for tool in tools if tool.get("type") != ToolProviderType.MCP.value] + return [tool for tool in tools if tool.get("type") != ToolProviderType.MCP] def _transform_message( self, diff --git a/api/core/workflow/nodes/datasource/datasource_node.py b/api/core/workflow/nodes/datasource/datasource_node.py index 937f4c944f..e392cb5f5c 100644 --- a/api/core/workflow/nodes/datasource/datasource_node.py +++ b/api/core/workflow/nodes/datasource/datasource_node.py @@ -75,11 +75,11 @@ class DatasourceNode(Node): node_data = self._node_data variable_pool = self.graph_runtime_state.variable_pool - datasource_type_segement = variable_pool.get(["sys", SystemVariableKey.DATASOURCE_TYPE.value]) + datasource_type_segement = variable_pool.get(["sys", SystemVariableKey.DATASOURCE_TYPE]) if not datasource_type_segement: raise DatasourceNodeError("Datasource type is not set") datasource_type = str(datasource_type_segement.value) if datasource_type_segement.value else None - datasource_info_segement = variable_pool.get(["sys", SystemVariableKey.DATASOURCE_INFO.value]) + datasource_info_segement = variable_pool.get(["sys", SystemVariableKey.DATASOURCE_INFO]) if not datasource_info_segement: raise DatasourceNodeError("Datasource info is not set") datasource_info_value = datasource_info_segement.value @@ -267,7 +267,7 @@ class DatasourceNode(Node): return result def _fetch_files(self, variable_pool: VariablePool) -> list[File]: - variable = variable_pool.get(["sys", SystemVariableKey.FILES.value]) + variable = variable_pool.get(["sys", SystemVariableKey.FILES]) assert isinstance(variable, ArrayAnyVariable | ArrayAnySegment) return list(variable.value) if variable else [] diff --git a/api/core/workflow/nodes/end/end_node.py b/api/core/workflow/nodes/end/end_node.py index 2bdfe4efce..7ec74084d0 100644 --- a/api/core/workflow/nodes/end/end_node.py +++ b/api/core/workflow/nodes/end/end_node.py @@ -16,7 +16,7 @@ class EndNode(Node): _node_data: EndNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = EndNodeData(**data) + self._node_data = EndNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index c47ffb5ab0..d3d3571b44 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -87,7 +87,7 @@ class Executor: node_data.authorization.config.api_key ).text - self.url: str = node_data.url + self.url = node_data.url self.method = node_data.method self.auth = node_data.authorization self.timeout = timeout @@ -349,11 +349,10 @@ class Executor: "timeout": (self.timeout.connect, self.timeout.read, self.timeout.write), "ssl_verify": self.ssl_verify, "follow_redirects": True, - "max_retries": self.max_retries, } # request_args = {k: v for k, v in request_args.items() if v is not None} try: - response: httpx.Response = _METHOD_MAP[method_lc](**request_args) + response: httpx.Response = _METHOD_MAP[method_lc](**request_args, max_retries=self.max_retries) except (ssrf_proxy.MaxRetriesExceededError, httpx.RequestError) as e: raise HttpRequestNodeError(str(e)) from e # FIXME: fix type ignore, this maybe httpx type issue diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 826820a8e3..55dec3fb08 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -165,6 +165,8 @@ class HttpRequestNode(Node): body_type = typed_node_data.body.type data = typed_node_data.body.data match body_type: + case "none": + pass case "binary": if len(data) != 1: raise RequestBodyError("invalid body data, should have only one item") @@ -232,7 +234,7 @@ class HttpRequestNode(Node): mapping = { "tool_file_id": tool_file.id, - "transfer_method": FileTransferMethod.TOOL_FILE.value, + "transfer_method": FileTransferMethod.TOOL_FILE, } file = file_factory.build_from_mapping( mapping=mapping, diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index 075f6f8444..7e3b6ecc1a 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -83,7 +83,7 @@ class IfElseNode(Node): else: # TODO: Update database then remove this # Fallback to old structure if cases are not defined - input_conditions, group_result, final_result = _should_not_use_old_function( # ty: ignore [deprecated] + input_conditions, group_result, final_result = _should_not_use_old_function( # pyright: ignore [reportDeprecated] condition_processor=condition_processor, variable_pool=self.graph_runtime_state.variable_pool, conditions=self._node_data.conditions or [], diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 1a417b5739..c089a68bd4 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -10,6 +10,8 @@ from typing_extensions import TypeIs from core.variables import IntegerVariable, NoneSegment from core.variables.segments import ArrayAnySegment, ArraySegment +from core.variables.variables import VariableUnion +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID from core.workflow.entities import VariablePool from core.workflow.enums import ( ErrorStrategy, @@ -93,7 +95,7 @@ class IterationNode(Node): "config": { "is_parallel": False, "parallel_nums": 10, - "error_handle_mode": ErrorHandleMode.TERMINATED.value, + "error_handle_mode": ErrorHandleMode.TERMINATED, }, } @@ -217,6 +219,13 @@ class IterationNode(Node): graph_engine=graph_engine, ) + # Sync conversation variables after each iteration completes + self._sync_conversation_variables_from_snapshot( + self._extract_conversation_variable_snapshot( + variable_pool=graph_engine.graph_runtime_state.variable_pool + ) + ) + # Update the total tokens from this iteration self.graph_runtime_state.total_tokens += graph_engine.graph_runtime_state.total_tokens iter_run_map[str(index)] = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() @@ -235,7 +244,10 @@ class IterationNode(Node): with ThreadPoolExecutor(max_workers=max_workers) as executor: # Submit all iteration tasks - future_to_index: dict[Future[tuple[datetime, list[GraphNodeEventBase], object | None, int]], int] = {} + future_to_index: dict[ + Future[tuple[datetime, list[GraphNodeEventBase], object | None, int, dict[str, VariableUnion]]], + int, + ] = {} for index, item in enumerate(iterator_list_value): yield IterationNextEvent(index=index) future = executor.submit( @@ -252,7 +264,7 @@ class IterationNode(Node): index = future_to_index[future] try: result = future.result() - iter_start_at, events, output_value, tokens_used = result + iter_start_at, events, output_value, tokens_used, conversation_snapshot = result # Update outputs at the correct index outputs[index] = output_value @@ -264,6 +276,9 @@ class IterationNode(Node): self.graph_runtime_state.total_tokens += tokens_used iter_run_map[str(index)] = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() + # Sync conversation variables after iteration completion + self._sync_conversation_variables_from_snapshot(conversation_snapshot) + except Exception as e: # Handle errors based on error_handle_mode match self._node_data.error_handle_mode: @@ -288,7 +303,7 @@ class IterationNode(Node): item: object, flask_app: Flask, context_vars: contextvars.Context, - ) -> tuple[datetime, list[GraphNodeEventBase], object | None, int]: + ) -> tuple[datetime, list[GraphNodeEventBase], object | None, int, dict[str, VariableUnion]]: """Execute a single iteration in parallel mode and return results.""" with preserve_flask_contexts(flask_app=flask_app, context_vars=context_vars): iter_start_at = datetime.now(UTC).replace(tzinfo=None) @@ -307,8 +322,17 @@ class IterationNode(Node): # Get the output value from the temporary outputs list output_value = outputs_temp[0] if outputs_temp else None + conversation_snapshot = self._extract_conversation_variable_snapshot( + variable_pool=graph_engine.graph_runtime_state.variable_pool + ) - return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens + return ( + iter_start_at, + events, + output_value, + graph_engine.graph_runtime_state.total_tokens, + conversation_snapshot, + ) def _handle_iteration_success( self, @@ -318,10 +342,13 @@ class IterationNode(Node): iterator_list_value: Sequence[object], iter_run_map: dict[str, float], ) -> Generator[NodeEventBase, None, None]: + # Flatten the list of lists if all outputs are lists + flattened_outputs = self._flatten_outputs_if_needed(outputs) + yield IterationSucceededEvent( start_at=started_at, inputs=inputs, - outputs={"output": outputs}, + outputs={"output": flattened_outputs}, steps=len(iterator_list_value), metadata={ WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, @@ -333,13 +360,39 @@ class IterationNode(Node): yield StreamCompletedEvent( node_run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={"output": outputs}, + outputs={"output": flattened_outputs}, metadata={ WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, }, ) ) + def _flatten_outputs_if_needed(self, outputs: list[object]) -> list[object]: + """ + Flatten the outputs list if all elements are lists. + This maintains backward compatibility with version 1.8.1 behavior. + """ + if not outputs: + return outputs + + # Check if all non-None outputs are lists + non_none_outputs = [output for output in outputs if output is not None] + if not non_none_outputs: + return outputs + + if all(isinstance(output, list) for output in non_none_outputs): + # Flatten the list of lists + flattened: list[Any] = [] + for output in outputs: + if isinstance(output, list): + flattened.extend(output) + elif output is not None: + # This shouldn't happen based on our check, but handle it gracefully + flattened.append(output) + return flattened + + return outputs + def _handle_iteration_failure( self, started_at: datetime, @@ -349,10 +402,13 @@ class IterationNode(Node): iter_run_map: dict[str, float], error: IterationNodeError, ) -> Generator[NodeEventBase, None, None]: + # Flatten the list of lists if all outputs are lists (even in failure case) + flattened_outputs = self._flatten_outputs_if_needed(outputs) + yield IterationFailedEvent( start_at=started_at, inputs=inputs, - outputs={"output": outputs}, + outputs={"output": flattened_outputs}, steps=len(iterator_list_value), metadata={ WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: self.graph_runtime_state.total_tokens, @@ -430,6 +486,23 @@ class IterationNode(Node): return variable_mapping + def _extract_conversation_variable_snapshot(self, *, variable_pool: VariablePool) -> dict[str, VariableUnion]: + conversation_variables = variable_pool.variable_dictionary.get(CONVERSATION_VARIABLE_NODE_ID, {}) + return {name: variable.model_copy(deep=True) for name, variable in conversation_variables.items()} + + def _sync_conversation_variables_from_snapshot(self, snapshot: dict[str, VariableUnion]) -> None: + parent_pool = self.graph_runtime_state.variable_pool + parent_conversations = parent_pool.variable_dictionary.get(CONVERSATION_VARIABLE_NODE_ID, {}) + + current_keys = set(parent_conversations.keys()) + snapshot_keys = set(snapshot.keys()) + + for removed_key in current_keys - snapshot_keys: + parent_pool.remove((CONVERSATION_VARIABLE_NODE_ID, removed_key)) + + for name, variable in snapshot.items(): + parent_pool.add((CONVERSATION_VARIABLE_NODE_ID, name), variable) + def _append_iteration_info_to_event( self, event: GraphNodeEventBase, diff --git a/api/core/workflow/nodes/iteration/iteration_start_node.py b/api/core/workflow/nodes/iteration/iteration_start_node.py index 80f39ccebc..90b7f4539b 100644 --- a/api/core/workflow/nodes/iteration/iteration_start_node.py +++ b/api/core/workflow/nodes/iteration/iteration_start_node.py @@ -18,7 +18,7 @@ class IterationStartNode(Node): _node_data: IterationStartNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = IterationStartNodeData(**data) + self._node_data = IterationStartNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/knowledge_index/entities.py b/api/core/workflow/nodes/knowledge_index/entities.py index c79373afd5..3daca90b9b 100644 --- a/api/core/workflow/nodes/knowledge_index/entities.py +++ b/api/core/workflow/nodes/knowledge_index/entities.py @@ -2,6 +2,7 @@ from typing import Literal, Union from pydantic import BaseModel +from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.workflow.nodes.base import BaseNodeData @@ -63,7 +64,7 @@ class RetrievalSetting(BaseModel): Retrieval Setting. """ - search_method: Literal["semantic_search", "keyword_search", "full_text_search", "hybrid_search"] + search_method: RetrievalMethod top_k: int score_threshold: float | None = 0.5 score_threshold_enabled: bool = False diff --git a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py index 4b6bad1aa3..2751f24048 100644 --- a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py +++ b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py @@ -2,7 +2,7 @@ import datetime import logging import time from collections.abc import Mapping -from typing import Any, cast +from typing import Any from sqlalchemy import func, select @@ -27,7 +27,7 @@ from .exc import ( logger = logging.getLogger(__name__) default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 2, @@ -62,7 +62,7 @@ class KnowledgeIndexNode(Node): return self._node_data def _run(self) -> NodeRunResult: # type: ignore - node_data = cast(KnowledgeIndexNodeData, self._node_data) + node_data = self._node_data variable_pool = self.graph_runtime_state.variable_pool dataset_id = variable_pool.get(["sys", SystemVariableKey.DATASET_ID]) if not dataset_id: @@ -77,7 +77,7 @@ class KnowledgeIndexNode(Node): raise KnowledgeIndexNodeError("Index chunk variable is required.") invoke_from = variable_pool.get(["sys", SystemVariableKey.INVOKE_FROM]) if invoke_from: - is_preview = invoke_from.value == InvokeFrom.DEBUGGER.value + is_preview = invoke_from.value == InvokeFrom.DEBUGGER else: is_preview = False chunks = variable.value @@ -136,6 +136,11 @@ class KnowledgeIndexNode(Node): document = db.session.query(Document).filter_by(id=document_id.value).first() if not document: raise KnowledgeIndexNodeError(f"Document {document_id.value} not found.") + doc_id_value = document.id + ds_id_value = dataset.id + dataset_name_value = dataset.name + document_name_value = document.name + created_at_value = document.created_at # chunk nodes by chunk size indexing_start_at = time.perf_counter() index_processor = IndexProcessorFactory(dataset.chunk_structure).init_index_processor() @@ -161,16 +166,16 @@ class KnowledgeIndexNode(Node): document.word_count = ( db.session.query(func.sum(DocumentSegment.word_count)) .where( - DocumentSegment.document_id == document.id, - DocumentSegment.dataset_id == dataset.id, + DocumentSegment.document_id == doc_id_value, + DocumentSegment.dataset_id == ds_id_value, ) .scalar() ) db.session.add(document) # update document segment status db.session.query(DocumentSegment).where( - DocumentSegment.document_id == document.id, - DocumentSegment.dataset_id == dataset.id, + DocumentSegment.document_id == doc_id_value, + DocumentSegment.dataset_id == ds_id_value, ).update( { DocumentSegment.status: "completed", @@ -182,13 +187,13 @@ class KnowledgeIndexNode(Node): db.session.commit() return { - "dataset_id": dataset.id, - "dataset_name": dataset.name, + "dataset_id": ds_id_value, + "dataset_name": dataset_name_value, "batch": batch.value, - "document_id": document.id, - "document_name": document.name, - "created_at": document.created_at.timestamp(), - "display_status": document.indexing_status, + "document_id": doc_id_value, + "document_name": document_name_value, + "created_at": created_at_value.timestamp(), + "display_status": "completed", } def _get_preview_output(self, chunk_structure: str, chunks: Any) -> Mapping[str, Any]: diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 1afb2e05b9..7091b62463 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -72,7 +72,7 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -107,7 +107,7 @@ class KnowledgeRetrievalNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index 7a31d69221..180eb2ad90 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -41,7 +41,7 @@ class ListOperatorNode(Node): _node_data: ListOperatorNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = ListOperatorNodeData(**data) + self._node_data = ListOperatorNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy @@ -161,6 +161,8 @@ class ListOperatorNode(Node): elif isinstance(variable, ArrayFileSegment): if isinstance(condition.value, str): value = self.graph_runtime_state.variable_pool.convert_template(condition.value).text + elif isinstance(condition.value, bool): + raise ValueError(f"File filter expects a string value, got {type(condition.value)}") else: value = condition.value filter_func = _get_file_filter_func( diff --git a/api/core/workflow/nodes/llm/file_saver.py b/api/core/workflow/nodes/llm/file_saver.py index 81f2df0891..3f32fa894a 100644 --- a/api/core/workflow/nodes/llm/file_saver.py +++ b/api/core/workflow/nodes/llm/file_saver.py @@ -46,7 +46,7 @@ class LLMFileSaver(tp.Protocol): dot (`.`). For example, `.py` and `.tar.gz` are both valid values, while `py` and `tar.gz` are not. """ - pass + raise NotImplementedError() def save_remote_url(self, url: str, file_type: FileType) -> File: """save_remote_url saves the file from a remote url returned by LLM. @@ -56,7 +56,7 @@ class LLMFileSaver(tp.Protocol): :param url: the url of the file. :param file_type: the file type of the file, check `FileType` enum for reference. """ - pass + raise NotImplementedError() EngineFactory: tp.TypeAlias = tp.Callable[[], Engine] diff --git a/api/core/workflow/nodes/llm/llm_utils.py b/api/core/workflow/nodes/llm/llm_utils.py index ad969cdad1..aff84433b2 100644 --- a/api/core/workflow/nodes/llm/llm_utils.py +++ b/api/core/workflow/nodes/llm/llm_utils.py @@ -92,7 +92,7 @@ def fetch_memory( return None # get conversation id - conversation_id_variable = variable_pool.get(["sys", SystemVariableKey.CONVERSATION_ID.value]) + conversation_id_variable = variable_pool.get(["sys", SystemVariableKey.CONVERSATION_ID]) if not isinstance(conversation_id_variable, StringSegment): return None conversation_id = conversation_id_variable.value @@ -143,7 +143,7 @@ def deduct_llm_quota(tenant_id: str, model_instance: ModelInstance, usage: LLMUs Provider.tenant_id == tenant_id, # TODO: Use provider name with prefix after the data migration. Provider.provider_name == ModelProviderID(model_instance.provider).provider_name, - Provider.provider_type == ProviderType.SYSTEM.value, + Provider.provider_type == ProviderType.SYSTEM, Provider.quota_type == system_configuration.current_quota_type.value, Provider.quota_limit > Provider.quota_used, ) diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 7767440be6..13f6d904e6 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -23,6 +23,7 @@ from core.model_runtime.entities.llm_entities import ( LLMResult, LLMResultChunk, LLMResultChunkWithStructuredOutput, + LLMResultWithStructuredOutput, LLMStructuredOutput, LLMUsage, ) @@ -127,7 +128,7 @@ class LLMNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( @@ -165,6 +166,7 @@ class LLMNode(Node): node_inputs: dict[str, Any] = {} process_data: dict[str, Any] = {} result_text = "" + clean_text = "" usage = LLMUsage.empty_usage() finish_reason = None reasoning_content = None @@ -278,6 +280,13 @@ class LLMNode(Node): # Extract clean text from tags clean_text, _ = LLMNode._split_reasoning(result_text, self._node_data.reasoning_format) + # Process structured output if available from the event. + structured_output = ( + LLMStructuredOutput(structured_output=event.structured_output) + if event.structured_output + else None + ) + # deduct quota llm_utils.deduct_llm_quota(tenant_id=self.tenant_id, model_instance=model_instance, usage=usage) break @@ -936,7 +945,7 @@ class LLMNode(Node): variable_mapping["#files#"] = typed_node_data.vision.configs.variable_selector if typed_node_data.memory: - variable_mapping["#sys.query#"] = ["sys", SystemVariableKey.QUERY.value] + variable_mapping["#sys.query#"] = ["sys", SystemVariableKey.QUERY] if typed_node_data.prompt_config: enable_jinja = False @@ -1048,7 +1057,7 @@ class LLMNode(Node): @staticmethod def handle_blocking_result( *, - invoke_result: LLMResult, + invoke_result: LLMResult | LLMResultWithStructuredOutput, saver: LLMFileSaver, file_outputs: list["File"], reasoning_format: Literal["separated", "tagged"] = "tagged", @@ -1079,6 +1088,8 @@ class LLMNode(Node): finish_reason=None, # Reasoning content for workflow variables and downstream nodes reasoning_content=reasoning_content, + # Pass structured output if enabled + structured_output=getattr(invoke_result, "structured_output", None), ) @staticmethod diff --git a/api/core/workflow/nodes/loop/loop_end_node.py b/api/core/workflow/nodes/loop/loop_end_node.py index 38aef06d24..e5bce1230c 100644 --- a/api/core/workflow/nodes/loop/loop_end_node.py +++ b/api/core/workflow/nodes/loop/loop_end_node.py @@ -18,7 +18,7 @@ class LoopEndNode(Node): _node_data: LoopEndNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = LoopEndNodeData(**data) + self._node_data = LoopEndNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/loop/loop_start_node.py b/api/core/workflow/nodes/loop/loop_start_node.py index e777a8cbe9..e065dc90a0 100644 --- a/api/core/workflow/nodes/loop/loop_start_node.py +++ b/api/core/workflow/nodes/loop/loop_start_node.py @@ -18,7 +18,7 @@ class LoopStartNode(Node): _node_data: LoopStartNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = LoopStartNodeData(**data) + self._node_data = LoopStartNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/parameter_extractor/prompts.py b/api/core/workflow/nodes/parameter_extractor/prompts.py index ab7ddcc32a..b74be8f206 100644 --- a/api/core/workflow/nodes/parameter_extractor/prompts.py +++ b/api/core/workflow/nodes/parameter_extractor/prompts.py @@ -179,6 +179,6 @@ CHAT_EXAMPLE = [ "required": ["food"], }, }, - "assistant": {"text": "I need to output a valid JSON object.", "json": {"result": "apple pie"}}, + "assistant": {"text": "I need to output a valid JSON object.", "json": {"food": "apple pie"}}, }, ] diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 483cfff574..592a6566fd 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -68,7 +68,7 @@ class QuestionClassifierNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( @@ -111,9 +111,9 @@ class QuestionClassifierNode(Node): query = variable.value if variable else None variables = {"query": query} # fetch model config - model_instance, model_config = LLMNode._fetch_model_config( - node_data_model=node_data.model, + model_instance, model_config = llm_utils.fetch_model_config( tenant_id=self.tenant_id, + node_data_model=node_data.model, ) # fetch memory memory = llm_utils.fetch_memory( diff --git a/api/core/workflow/nodes/start/start_node.py b/api/core/workflow/nodes/start/start_node.py index 2f33c54128..3b134be1a1 100644 --- a/api/core/workflow/nodes/start/start_node.py +++ b/api/core/workflow/nodes/start/start_node.py @@ -16,7 +16,7 @@ class StartNode(Node): _node_data: StartNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = StartNodeData(**data) + self._node_data = StartNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/nodes/template_transform/template_transform_node.py b/api/core/workflow/nodes/template_transform/template_transform_node.py index cf05ef253a..254a8318b5 100644 --- a/api/core/workflow/nodes/template_transform/template_transform_node.py +++ b/api/core/workflow/nodes/template_transform/template_transform_node.py @@ -1,7 +1,7 @@ -import os from collections.abc import Mapping, Sequence from typing import Any +from configs import dify_config from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage from core.workflow.enums import ErrorStrategy, NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult @@ -9,7 +9,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.template_transform.entities import TemplateTransformNodeData -MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = int(os.environ.get("TEMPLATE_TRANSFORM_MAX_LENGTH", "80000")) +MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH class TemplateTransformNode(Node): diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 5f2abcd378..cd0094f531 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -79,7 +79,7 @@ class ToolNode(Node): # But for backward compatibility with historical data # this version field judgment is still preserved here. variable_pool: VariablePool | None = None - if node_data.version != "1" or node_data.tool_node_version != "1": + if node_data.version != "1" or node_data.tool_node_version is not None: variable_pool = self.graph_runtime_state.variable_pool tool_runtime = ToolManager.get_workflow_tool_runtime( self.tenant_id, self.app_id, self._node_id, self._node_data, self.invoke_from, variable_pool @@ -224,7 +224,7 @@ class ToolNode(Node): return result def _fetch_files(self, variable_pool: "VariablePool") -> list[File]: - variable = variable_pool.get(["sys", SystemVariableKey.FILES.value]) + variable = variable_pool.get(["sys", SystemVariableKey.FILES]) assert isinstance(variable, ArrayAnyVariable | ArrayAnySegment) return list(variable.value) if variable else [] diff --git a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py index be00d55937..0ac0d3d858 100644 --- a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py +++ b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py @@ -15,7 +15,7 @@ class VariableAggregatorNode(Node): _node_data: VariableAssignerNodeData def init_node_data(self, data: Mapping[str, Any]): - self._node_data = VariableAssignerNodeData(**data) + self._node_data = VariableAssignerNodeData.model_validate(data) def _get_error_strategy(self) -> ErrorStrategy | None: return self._node_data.error_strategy diff --git a/api/core/workflow/variable_loader.py b/api/core/workflow/variable_loader.py index a35215855e..1b31022495 100644 --- a/api/core/workflow/variable_loader.py +++ b/api/core/workflow/variable_loader.py @@ -66,8 +66,8 @@ def load_into_variable_pool( # NOTE(QuantumGhost): this logic needs to be in sync with # `WorkflowEntry.mapping_user_inputs_to_variable_pool`. node_variable_list = key.split(".") - if len(node_variable_list) < 1: - raise ValueError(f"Invalid variable key: {key}. It should have at least one element.") + if len(node_variable_list) < 2: + raise ValueError(f"Invalid variable key: {key}. It should have at least two elements.") if key in user_inputs: continue node_variable_key = ".".join(node_variable_list[1:]) diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 49645ff120..4cd885cfa5 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -227,7 +227,7 @@ class WorkflowEntry: "height": node_height, "type": "custom", "data": { - "type": NodeType.START.value, + "type": NodeType.START, "title": "Start", "desc": "Start", }, @@ -416,4 +416,8 @@ class WorkflowEntry: # append variable and value to variable pool if variable_node_id != ENVIRONMENT_VARIABLE_NODE_ID: + # In single run, the input_value is set as the LLM's structured output value within the variable_pool. + if len(variable_key_list) == 2 and variable_key_list[0] == "structured_output": + input_value = {variable_key_list[1]: input_value} + variable_key_list = variable_key_list[0:1] variable_pool.add([variable_node_id] + variable_key_list, input_value) diff --git a/api/events/event_handlers/clean_when_dataset_deleted.py b/api/events/event_handlers/clean_when_dataset_deleted.py index 7caa2d1cc9..0f6aa0e778 100644 --- a/api/events/event_handlers/clean_when_dataset_deleted.py +++ b/api/events/event_handlers/clean_when_dataset_deleted.py @@ -1,10 +1,13 @@ from events.dataset_event import dataset_was_deleted +from models import Dataset from tasks.clean_dataset_task import clean_dataset_task @dataset_was_deleted.connect -def handle(sender, **kwargs): +def handle(sender: Dataset, **kwargs): dataset = sender + assert dataset.doc_form + assert dataset.indexing_technique clean_dataset_task.delay( dataset.id, dataset.tenant_id, diff --git a/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py b/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py index 6c9fc0bf1d..1b44d8a1e2 100644 --- a/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py +++ b/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py @@ -12,9 +12,9 @@ def handle(sender, **kwargs): if synced_draft_workflow is None: return for node_data in synced_draft_workflow.graph_dict.get("nodes", []): - if node_data.get("data", {}).get("type") == NodeType.TOOL.value: + if node_data.get("data", {}).get("type") == NodeType.TOOL: try: - tool_entity = ToolEntity(**node_data["data"]) + tool_entity = ToolEntity.model_validate(node_data["data"]) tool_runtime = ToolManager.get_tool_runtime( provider_type=tool_entity.provider_type, provider_id=tool_entity.provider_id, diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py index 898ec1f153..53e0065f6e 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py @@ -53,7 +53,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]: # fetch all knowledge retrieval nodes knowledge_retrieval_nodes = [ - node for node in nodes if node.get("data", {}).get("type") == NodeType.KNOWLEDGE_RETRIEVAL.value + node for node in nodes if node.get("data", {}).get("type") == NodeType.KNOWLEDGE_RETRIEVAL ] if not knowledge_retrieval_nodes: @@ -61,7 +61,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]: for node in knowledge_retrieval_nodes: try: - node_data = KnowledgeRetrievalNodeData(**node.get("data", {})) + node_data = KnowledgeRetrievalNodeData.model_validate(node.get("data", {})) dataset_ids.update(dataset_id for dataset_id in node_data.dataset_ids) except Exception: continue diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py index 27efa539dc..e1c96fb050 100644 --- a/api/events/event_handlers/update_provider_when_message_created.py +++ b/api/events/event_handlers/update_provider_when_message_created.py @@ -1,10 +1,11 @@ import logging import time as time_module from datetime import datetime -from typing import Any +from typing import Any, cast from pydantic import BaseModel from sqlalchemy import update +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session from configs import dify_config @@ -139,7 +140,7 @@ def handle(sender: Message, **kwargs): filters=_ProviderUpdateFilters( tenant_id=tenant_id, provider_name=ModelProviderID(model_config.provider).provider_name, - provider_type=ProviderType.SYSTEM.value, + provider_type=ProviderType.SYSTEM, quota_type=provider_configuration.system_configuration.current_quota_type.value, ), values=_ProviderUpdateValues(quota_used=Provider.quota_used + used_quota, last_used=current_time), @@ -267,7 +268,7 @@ def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation] # Build and execute the update statement stmt = update(Provider).where(*where_conditions).values(**update_values) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) rows_affected = result.rowcount logger.debug( diff --git a/api/extensions/ext_app_metrics.py b/api/extensions/ext_app_metrics.py index 56a69a1862..4a6490b9f0 100644 --- a/api/extensions/ext_app_metrics.py +++ b/api/extensions/ext_app_metrics.py @@ -10,14 +10,14 @@ from dify_app import DifyApp def init_app(app: DifyApp): @app.after_request - def after_request(response): + def after_request(response): # pyright: ignore[reportUnusedFunction] """Add Version headers to the response.""" response.headers.add("X-Version", dify_config.project.version) response.headers.add("X-Env", dify_config.DEPLOY_ENV) return response @app.route("/health") - def health(): + def health(): # pyright: ignore[reportUnusedFunction] return Response( json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.project.version}), status=200, @@ -25,7 +25,7 @@ def init_app(app: DifyApp): ) @app.route("/threads") - def threads(): + def threads(): # pyright: ignore[reportUnusedFunction] num_threads = threading.active_count() threads = threading.enumerate() @@ -50,7 +50,7 @@ def init_app(app: DifyApp): } @app.route("/db-pool-stat") - def pool_stat(): + def pool_stat(): # pyright: ignore[reportUnusedFunction] from extensions.ext_database import db engine = db.engine diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 585539e2ce..6d7d81ed87 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -145,6 +145,7 @@ def init_app(app: DifyApp) -> Celery: } if dify_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK and dify_config.MARKETPLACE_ENABLED: imports.append("schedule.check_upgradable_plugin_task") + imports.append("tasks.process_tenant_plugin_autoupgrade_check_task") beat_schedule["check_upgradable_plugin_task"] = { "task": "schedule.check_upgradable_plugin_task.check_upgradable_plugin_task", "schedule": crontab(minute="*/15"), diff --git a/api/extensions/ext_database.py b/api/extensions/ext_database.py index 067ce39e4f..c90b1d0a9f 100644 --- a/api/extensions/ext_database.py +++ b/api/extensions/ext_database.py @@ -10,7 +10,7 @@ from models.engine import db logger = logging.getLogger(__name__) # Global flag to avoid duplicate registration of event listener -_GEVENT_COMPATIBILITY_SETUP: bool = False +_gevent_compatibility_setup: bool = False def _safe_rollback(connection): @@ -26,14 +26,14 @@ def _safe_rollback(connection): def _setup_gevent_compatibility(): - global _GEVENT_COMPATIBILITY_SETUP # pylint: disable=global-statement + global _gevent_compatibility_setup # pylint: disable=global-statement # Avoid duplicate registration - if _GEVENT_COMPATIBILITY_SETUP: + if _gevent_compatibility_setup: return @event.listens_for(Pool, "reset") - def _safe_reset(dbapi_connection, connection_record, reset_state): # pylint: disable=unused-argument + def _safe_reset(dbapi_connection, connection_record, reset_state): # pyright: ignore[reportUnusedFunction] if reset_state.terminate_only: return @@ -47,7 +47,7 @@ def _setup_gevent_compatibility(): except (AttributeError, ImportError): _safe_rollback(dbapi_connection) - _GEVENT_COMPATIBILITY_SETUP = True + _gevent_compatibility_setup = True def init_app(app: DifyApp): diff --git a/api/extensions/ext_import_modules.py b/api/extensions/ext_import_modules.py index 9566f430b6..4eb363ff93 100644 --- a/api/extensions/ext_import_modules.py +++ b/api/extensions/ext_import_modules.py @@ -2,4 +2,4 @@ from dify_app import DifyApp def init_app(app: DifyApp): - from events import event_handlers # noqa: F401 + from events import event_handlers # noqa: F401 # pyright: ignore[reportUnusedImport] diff --git a/api/extensions/ext_login.py b/api/extensions/ext_login.py index 5571c0d9ba..836a5d938c 100644 --- a/api/extensions/ext_login.py +++ b/api/extensions/ext_login.py @@ -9,7 +9,7 @@ from configs import dify_config from dify_app import DifyApp from extensions.ext_database import db from libs.passport import PassportService -from models.account import Account, Tenant, TenantAccountJoin +from models import Account, Tenant, TenantAccountJoin from models.model import AppMCPServer, EndUser from services.account_service import AccountService diff --git a/api/extensions/ext_otel.py b/api/extensions/ext_otel.py index b0059693e2..cb6e4849a9 100644 --- a/api/extensions/ext_otel.py +++ b/api/extensions/ext_otel.py @@ -136,8 +136,8 @@ def init_app(app: DifyApp): from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HTTPSpanExporter from opentelemetry.instrumentation.celery import CeleryInstrumentor from opentelemetry.instrumentation.flask import FlaskInstrumentor + from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor from opentelemetry.instrumentation.redis import RedisInstrumentor - from opentelemetry.instrumentation.requests import RequestsInstrumentor from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor from opentelemetry.metrics import get_meter, get_meter_provider, set_meter_provider from opentelemetry.propagate import set_global_textmap @@ -237,7 +237,7 @@ def init_app(app: DifyApp): instrument_exception_logging() init_sqlalchemy_instrumentor(app) RedisInstrumentor().instrument() - RequestsInstrumentor().instrument() + HTTPXClientInstrumentor().instrument() atexit.register(shutdown_tracer) diff --git a/api/extensions/ext_sentry.py b/api/extensions/ext_sentry.py index 6cfa99a62a..5ed7840211 100644 --- a/api/extensions/ext_sentry.py +++ b/api/extensions/ext_sentry.py @@ -4,7 +4,6 @@ from dify_app import DifyApp def init_app(app: DifyApp): if dify_config.SENTRY_DSN: - import openai import sentry_sdk from langfuse import parse_error # type: ignore from sentry_sdk.integrations.celery import CeleryIntegration @@ -28,7 +27,6 @@ def init_app(app: DifyApp): HTTPException, ValueError, FileNotFoundError, - openai.APIStatusError, InvokeRateLimitError, parse_error.defaultErrorResponse, ], diff --git a/api/extensions/storage/aliyun_oss_storage.py b/api/extensions/storage/aliyun_oss_storage.py index 00bf5d4f93..5da4737138 100644 --- a/api/extensions/storage/aliyun_oss_storage.py +++ b/api/extensions/storage/aliyun_oss_storage.py @@ -33,7 +33,9 @@ class AliyunOssStorage(BaseStorage): def load_once(self, filename: str) -> bytes: obj = self.client.get_object(self.__wrapper_folder_filename(filename)) - data: bytes = obj.read() + data = obj.read() + if not isinstance(data, bytes): + return b"" return data def load_stream(self, filename: str) -> Generator: diff --git a/api/extensions/storage/aws_s3_storage.py b/api/extensions/storage/aws_s3_storage.py index e755ab089a..6ab2a95e3c 100644 --- a/api/extensions/storage/aws_s3_storage.py +++ b/api/extensions/storage/aws_s3_storage.py @@ -39,10 +39,10 @@ class AwsS3Storage(BaseStorage): self.client.head_bucket(Bucket=self.bucket_name) except ClientError as e: # if bucket not exists, create it - if e.response["Error"]["Code"] == "404": + if e.response.get("Error", {}).get("Code") == "404": self.client.create_bucket(Bucket=self.bucket_name) # if bucket is not accessible, pass, maybe the bucket is existing but not accessible - elif e.response["Error"]["Code"] == "403": + elif e.response.get("Error", {}).get("Code") == "403": pass else: # other error, raise exception @@ -55,7 +55,7 @@ class AwsS3Storage(BaseStorage): try: data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise @@ -66,7 +66,7 @@ class AwsS3Storage(BaseStorage): response = self.client.get_object(Bucket=self.bucket_name, Key=filename) yield from response["Body"].iter_chunks() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("file not found") elif "reached max retries" in str(ex): raise ValueError("please do not request the same file too frequently") diff --git a/api/extensions/storage/azure_blob_storage.py b/api/extensions/storage/azure_blob_storage.py index 9053aece89..4bccaf13c8 100644 --- a/api/extensions/storage/azure_blob_storage.py +++ b/api/extensions/storage/azure_blob_storage.py @@ -27,24 +27,38 @@ class AzureBlobStorage(BaseStorage): self.credential = None def save(self, filename, data): + if not self.bucket_name: + return + client = self._sync_client() blob_container = client.get_container_client(container=self.bucket_name) blob_container.upload_blob(filename, data) def load_once(self, filename: str) -> bytes: + if not self.bucket_name: + raise FileNotFoundError("Azure bucket name is not configured.") + client = self._sync_client() blob = client.get_container_client(container=self.bucket_name) blob = blob.get_blob_client(blob=filename) - data: bytes = blob.download_blob().readall() + data = blob.download_blob().readall() + if not isinstance(data, bytes): + raise TypeError(f"Expected bytes from blob.readall(), got {type(data).__name__}") return data def load_stream(self, filename: str) -> Generator: + if not self.bucket_name: + raise FileNotFoundError("Azure bucket name is not configured.") + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) blob_data = blob.download_blob() yield from blob_data.chunks() def download(self, filename, target_filepath): + if not self.bucket_name: + return + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) @@ -53,12 +67,18 @@ class AzureBlobStorage(BaseStorage): blob_data.readinto(my_blob) def exists(self, filename): + if not self.bucket_name: + return False + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) return blob.exists() def delete(self, filename): + if not self.bucket_name: + return + client = self._sync_client() blob_container = client.get_container_client(container=self.bucket_name) diff --git a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py index 2ffac9a92d..06c528ca41 100644 --- a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py +++ b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py @@ -430,7 +430,7 @@ class ClickZettaVolumeStorage(BaseStorage): rows = self._execute_sql(sql, fetch=True) - exists = len(rows) > 0 + exists = len(rows) > 0 if rows else False logger.debug("File %s exists check: %s", filename, exists) return exists except Exception as e: @@ -509,16 +509,17 @@ class ClickZettaVolumeStorage(BaseStorage): rows = self._execute_sql(sql, fetch=True) result = [] - for row in rows: - file_path = row[0] # relative_path column + if rows: + for row in rows: + file_path = row[0] # relative_path column - # For User Volume, remove dify prefix from results - dify_prefix_with_slash = f"{self._config.dify_prefix}/" - if volume_prefix == "USER VOLUME" and file_path.startswith(dify_prefix_with_slash): - file_path = file_path[len(dify_prefix_with_slash) :] # Remove prefix + # For User Volume, remove dify prefix from results + dify_prefix_with_slash = f"{self._config.dify_prefix}/" + if volume_prefix == "USER VOLUME" and file_path.startswith(dify_prefix_with_slash): + file_path = file_path[len(dify_prefix_with_slash) :] # Remove prefix - if files and not file_path.endswith("/") or directories and file_path.endswith("/"): - result.append(file_path) + if files and not file_path.endswith("/") or directories and file_path.endswith("/"): + result.append(file_path) logger.debug("Scanned %d items in path %s", len(result), path) return result diff --git a/api/extensions/storage/clickzetta_volume/file_lifecycle.py b/api/extensions/storage/clickzetta_volume/file_lifecycle.py index 6ab02ad8cc..dc5aa8e39c 100644 --- a/api/extensions/storage/clickzetta_volume/file_lifecycle.py +++ b/api/extensions/storage/clickzetta_volume/file_lifecycle.py @@ -264,7 +264,7 @@ class FileLifecycleManager: logger.warning("File %s not found in metadata", filename) return False - metadata_dict[filename]["status"] = FileStatus.ARCHIVED.value + metadata_dict[filename]["status"] = FileStatus.ARCHIVED metadata_dict[filename]["modified_at"] = datetime.now().isoformat() self._save_metadata(metadata_dict) @@ -309,7 +309,7 @@ class FileLifecycleManager: # Update metadata metadata_dict = self._load_metadata() if filename in metadata_dict: - metadata_dict[filename]["status"] = FileStatus.DELETED.value + metadata_dict[filename]["status"] = FileStatus.DELETED metadata_dict[filename]["modified_at"] = datetime.now().isoformat() self._save_metadata(metadata_dict) diff --git a/api/extensions/storage/clickzetta_volume/volume_permissions.py b/api/extensions/storage/clickzetta_volume/volume_permissions.py index eb1116638f..6dcf800abb 100644 --- a/api/extensions/storage/clickzetta_volume/volume_permissions.py +++ b/api/extensions/storage/clickzetta_volume/volume_permissions.py @@ -439,6 +439,11 @@ class VolumePermissionManager: self._permission_cache.clear() logger.debug("Permission cache cleared") + @property + def volume_type(self) -> str | None: + """Get the volume type.""" + return self._volume_type + def get_permission_summary(self, dataset_id: str | None = None) -> dict[str, bool]: """Get permission summary @@ -632,13 +637,13 @@ def check_volume_permission(permission_manager: VolumePermissionManager, operati VolumePermissionError: If no permission """ if not permission_manager.validate_operation(operation, dataset_id): - error_message = f"Permission denied for operation '{operation}' on {permission_manager._volume_type} volume" + error_message = f"Permission denied for operation '{operation}' on {permission_manager.volume_type} volume" if dataset_id: error_message += f" (dataset: {dataset_id})" raise VolumePermissionError( error_message, operation=operation, - volume_type=permission_manager._volume_type or "unknown", + volume_type=permission_manager.volume_type or "unknown", dataset_id=dataset_id, ) diff --git a/api/extensions/storage/google_cloud_storage.py b/api/extensions/storage/google_cloud_storage.py index 705639f42e..7f59252f2f 100644 --- a/api/extensions/storage/google_cloud_storage.py +++ b/api/extensions/storage/google_cloud_storage.py @@ -35,12 +35,16 @@ class GoogleCloudStorage(BaseStorage): def load_once(self, filename: str) -> bytes: bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") data: bytes = blob.download_as_bytes() return data def load_stream(self, filename: str) -> Generator: bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") with blob.open(mode="rb") as blob_stream: while chunk := blob_stream.read(4096): yield chunk @@ -48,6 +52,8 @@ class GoogleCloudStorage(BaseStorage): def download(self, filename, target_filepath): bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") blob.download_to_filename(target_filepath) def exists(self, filename): diff --git a/api/extensions/storage/huawei_obs_storage.py b/api/extensions/storage/huawei_obs_storage.py index 07f1d19970..3e75ecb7a9 100644 --- a/api/extensions/storage/huawei_obs_storage.py +++ b/api/extensions/storage/huawei_obs_storage.py @@ -45,7 +45,7 @@ class HuaweiObsStorage(BaseStorage): def _get_meta(self, filename): res = self.client.getObjectMetadata(bucketName=self.bucket_name, objectKey=filename) - if res.status < 300: + if res and res.status and res.status < 300: return res else: return None diff --git a/api/extensions/storage/opendal_storage.py b/api/extensions/storage/opendal_storage.py index b10391c7f1..f7146adba6 100644 --- a/api/extensions/storage/opendal_storage.py +++ b/api/extensions/storage/opendal_storage.py @@ -3,9 +3,9 @@ import os from collections.abc import Generator from pathlib import Path +import opendal from dotenv import dotenv_values from opendal import Operator -from opendal.layers import RetryLayer from extensions.storage.base_storage import BaseStorage @@ -35,7 +35,7 @@ class OpenDALStorage(BaseStorage): root = kwargs.get("root", "storage") Path(root).mkdir(parents=True, exist_ok=True) - retry_layer = RetryLayer(max_times=3, factor=2.0, jitter=True) + retry_layer = opendal.layers.RetryLayer(max_times=3, factor=2.0, jitter=True) self.op = Operator(scheme=scheme, **kwargs).layer(retry_layer) logger.debug("opendal operator created with scheme %s", scheme) logger.debug("added retry layer to opendal operator") diff --git a/api/extensions/storage/oracle_oci_storage.py b/api/extensions/storage/oracle_oci_storage.py index 82829f7fd5..acc00cbd6b 100644 --- a/api/extensions/storage/oracle_oci_storage.py +++ b/api/extensions/storage/oracle_oci_storage.py @@ -29,7 +29,7 @@ class OracleOCIStorage(BaseStorage): try: data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise @@ -40,7 +40,7 @@ class OracleOCIStorage(BaseStorage): response = self.client.get_object(Bucket=self.bucket_name, Key=filename) yield from response["Body"].iter_chunks() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise diff --git a/api/extensions/storage/supabase_storage.py b/api/extensions/storage/supabase_storage.py index 711c3f7211..2ca84d4c15 100644 --- a/api/extensions/storage/supabase_storage.py +++ b/api/extensions/storage/supabase_storage.py @@ -46,13 +46,13 @@ class SupabaseStorage(BaseStorage): Path(target_filepath).write_bytes(result) def exists(self, filename): - result = self.client.storage.from_(self.bucket_name).list(filename) - if result.count() > 0: + result = self.client.storage.from_(self.bucket_name).list(path=filename) + if len(result) > 0: return True return False def delete(self, filename): - self.client.storage.from_(self.bucket_name).remove(filename) + self.client.storage.from_(self.bucket_name).remove([filename]) def bucket_exists(self): buckets = self.client.storage.list_buckets() diff --git a/api/extensions/storage/volcengine_tos_storage.py b/api/extensions/storage/volcengine_tos_storage.py index 32839d3497..8ed8e4c170 100644 --- a/api/extensions/storage/volcengine_tos_storage.py +++ b/api/extensions/storage/volcengine_tos_storage.py @@ -11,6 +11,14 @@ class VolcengineTosStorage(BaseStorage): def __init__(self): super().__init__() + if not dify_config.VOLCENGINE_TOS_ACCESS_KEY: + raise ValueError("VOLCENGINE_TOS_ACCESS_KEY is not set") + if not dify_config.VOLCENGINE_TOS_SECRET_KEY: + raise ValueError("VOLCENGINE_TOS_SECRET_KEY is not set") + if not dify_config.VOLCENGINE_TOS_ENDPOINT: + raise ValueError("VOLCENGINE_TOS_ENDPOINT is not set") + if not dify_config.VOLCENGINE_TOS_REGION: + raise ValueError("VOLCENGINE_TOS_REGION is not set") self.bucket_name = dify_config.VOLCENGINE_TOS_BUCKET_NAME self.client = tos.TosClientV2( ak=dify_config.VOLCENGINE_TOS_ACCESS_KEY, @@ -20,27 +28,39 @@ class VolcengineTosStorage(BaseStorage): ) def save(self, filename, data): + if not self.bucket_name: + raise ValueError("VOLCENGINE_TOS_BUCKET_NAME is not set") self.client.put_object(bucket=self.bucket_name, key=filename, content=data) def load_once(self, filename: str) -> bytes: + if not self.bucket_name: + raise FileNotFoundError("VOLCENGINE_TOS_BUCKET_NAME is not set") data = self.client.get_object(bucket=self.bucket_name, key=filename).read() if not isinstance(data, bytes): raise TypeError(f"Expected bytes, got {type(data).__name__}") return data def load_stream(self, filename: str) -> Generator: + if not self.bucket_name: + raise FileNotFoundError("VOLCENGINE_TOS_BUCKET_NAME is not set") response = self.client.get_object(bucket=self.bucket_name, key=filename) while chunk := response.read(4096): yield chunk def download(self, filename, target_filepath): + if not self.bucket_name: + raise ValueError("VOLCENGINE_TOS_BUCKET_NAME is not set") self.client.get_object_to_file(bucket=self.bucket_name, key=filename, file_path=target_filepath) def exists(self, filename): + if not self.bucket_name: + return False res = self.client.head_object(bucket=self.bucket_name, key=filename) if res.status_code != 200: return False return True def delete(self, filename): + if not self.bucket_name: + return self.client.delete_object(bucket=self.bucket_name, key=filename) diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index d66c757249..735fff53d1 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -45,7 +45,7 @@ def build_from_message_file( } # Set the correct ID field based on transfer method - if message_file.transfer_method == FileTransferMethod.TOOL_FILE.value: + if message_file.transfer_method == FileTransferMethod.TOOL_FILE: mapping["tool_file_id"] = message_file.upload_file_id else: mapping["upload_file_id"] = message_file.upload_file_id @@ -64,7 +64,10 @@ def build_from_mapping( config: FileUploadConfig | None = None, strict_type_validation: bool = False, ) -> File: - transfer_method = FileTransferMethod.value_of(mapping.get("transfer_method")) + transfer_method_value = mapping.get("transfer_method") + if not transfer_method_value: + raise ValueError("transfer_method is required in file mapping") + transfer_method = FileTransferMethod.value_of(transfer_method_value) build_functions: dict[FileTransferMethod, Callable] = { FileTransferMethod.LOCAL_FILE: _build_from_local_file, @@ -104,6 +107,8 @@ def build_from_mappings( ) -> Sequence[File]: # TODO(QuantumGhost): Performance concern - each mapping triggers a separate database query. # Implement batch processing to reduce database load when handling multiple files. + # Filter out None/empty mappings to avoid errors + valid_mappings = [m for m in mappings if m and m.get("transfer_method")] files = [ build_from_mapping( mapping=mapping, @@ -111,7 +116,7 @@ def build_from_mappings( config=config, strict_type_validation=strict_type_validation, ) - for mapping in mappings + for mapping in valid_mappings ] if ( @@ -368,9 +373,7 @@ def _build_from_datasource_file( if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = ( - FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM.value else detected_file_type - ) + file_type = FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type return File( id=mapping.get("datasource_file_id"), diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index 2104e66254..494194369a 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -142,6 +142,8 @@ def build_segment(value: Any, /) -> Segment: # below if value is None: return NoneSegment() + if isinstance(value, Segment): + return value if isinstance(value, str): return StringSegment(value=value) if isinstance(value, bool): diff --git a/api/libs/collection_utils.py b/api/libs/collection_utils.py new file mode 100644 index 0000000000..f97308ca44 --- /dev/null +++ b/api/libs/collection_utils.py @@ -0,0 +1,14 @@ +def convert_to_lower_and_upper_set(inputs: list[str] | set[str]) -> set[str]: + """ + Convert a list or set of strings to a set containing both lower and upper case versions of each string. + + Args: + inputs (list[str] | set[str]): A list or set of strings to be converted. + + Returns: + set[str]: A set containing both lower and upper case versions of each string. + """ + if not inputs: + return set() + else: + return {case for s in inputs if s for case in (s.lower(), s.upper())} diff --git a/api/libs/external_api.py b/api/libs/external_api.py index cf91b0117f..a59230caaa 100644 --- a/api/libs/external_api.py +++ b/api/libs/external_api.py @@ -22,7 +22,7 @@ def register_external_error_handlers(api: Api): got_request_exception.send(current_app, exception=e) # If Werkzeug already prepared a Response, just use it. - if getattr(e, "response", None) is not None: + if e.response is not None: return e.response status_code = getattr(e, "code", 500) or 500 @@ -94,7 +94,7 @@ def register_external_error_handlers(api: Api): got_request_exception.send(current_app, exception=e) status_code = 500 - data = getattr(e, "data", {"message": http_status_message(status_code)}) + data: dict[str, Any] = getattr(e, "data", {"message": http_status_message(status_code)}) # 🔒 Normalize non-mapping data (e.g., if someone set e.data = Response) if not isinstance(data, dict): @@ -106,7 +106,7 @@ def register_external_error_handlers(api: Api): # Log stack exc_info: Any = sys.exc_info() if exc_info[1] is None: - exc_info = None + exc_info = (None, None, None) current_app.log_exception(exc_info) return data, status_code diff --git a/api/libs/gmpy2_pkcs10aep_cipher.py b/api/libs/gmpy2_pkcs10aep_cipher.py index 9759156c0f..fc38d51005 100644 --- a/api/libs/gmpy2_pkcs10aep_cipher.py +++ b/api/libs/gmpy2_pkcs10aep_cipher.py @@ -27,7 +27,7 @@ import gmpy2 # type: ignore from Crypto import Random from Crypto.Signature.pss import MGF1 from Crypto.Util.number import bytes_to_long, ceil_div, long_to_bytes -from Crypto.Util.py3compat import _copy_bytes, bord +from Crypto.Util.py3compat import bord from Crypto.Util.strxor import strxor @@ -72,7 +72,7 @@ class PKCS1OAepCipher: else: self._mgf = lambda x, y: MGF1(x, y, self._hashObj) - self._label = _copy_bytes(None, None, label) + self._label = bytes(label) self._randfunc = randfunc def can_encrypt(self): @@ -120,7 +120,7 @@ class PKCS1OAepCipher: # Step 2b ps = b"\x00" * ps_len # Step 2c - db = lHash + ps + b"\x01" + _copy_bytes(None, None, message) + db = lHash + ps + b"\x01" + bytes(message) # Step 2d ros = self._randfunc(hLen) # Step 2e diff --git a/api/libs/helper.py b/api/libs/helper.py index 0551470f65..b878141d8e 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -24,7 +24,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from extensions.ext_redis import redis_client if TYPE_CHECKING: - from models.account import Account + from models import Account from models.model import EndUser logger = logging.getLogger(__name__) @@ -43,7 +43,7 @@ def extract_tenant_id(user: Union["Account", "EndUser"]) -> str | None: Raises: ValueError: If user is neither Account nor EndUser """ - from models.account import Account + from models import Account from models.model import EndUser if isinstance(user, Account): @@ -78,7 +78,7 @@ class AvatarUrlField(fields.Raw): if obj is None: return None - from models.account import Account + from models import Account if isinstance(obj, Account) and obj.avatar is not None: return file_helpers.get_signed_file_url(obj.avatar) diff --git a/api/libs/login.py b/api/libs/login.py index 0535f52ea1..2c75ef9297 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -7,12 +7,21 @@ from flask_login.config import EXEMPT_METHODS # type: ignore from werkzeug.local import LocalProxy from configs import dify_config -from models.account import Account +from models import Account from models.model import EndUser #: A proxy for the current user. If no user is logged in, this will be an #: anonymous user current_user = cast(Union[Account, EndUser, None], LocalProxy(lambda: _get_user())) + + +def current_account_with_tenant(): + if not isinstance(current_user, Account): + raise ValueError("current_user must be an Account instance") + assert current_user.current_tenant_id is not None, "The tenant information should be loaded." + return current_user, current_user.current_tenant_id + + from typing import ParamSpec, TypeVar P = ParamSpec("P") diff --git a/api/libs/sendgrid.py b/api/libs/sendgrid.py index ecc4b3fb98..a270fa70fa 100644 --- a/api/libs/sendgrid.py +++ b/api/libs/sendgrid.py @@ -14,7 +14,7 @@ class SendGridClient: def send(self, mail: dict): logger.debug("Sending email with SendGrid") - + _to = "" try: _to = mail["to"] @@ -28,7 +28,7 @@ class SendGridClient: content = Content("text/html", mail["html"]) sg_mail = Mail(from_email, to_email, subject, content) mail_json = sg_mail.get() - response = sg.client.mail.send.post(request_body=mail_json) # ty: ignore [call-non-callable] + response = sg.client.mail.send.post(request_body=mail_json) # type: ignore logger.debug(response.status_code) logger.debug(response.body) logger.debug(response.headers) diff --git a/api/libs/validators.py b/api/libs/validators.py new file mode 100644 index 0000000000..4d762e8116 --- /dev/null +++ b/api/libs/validators.py @@ -0,0 +1,5 @@ +def validate_description_length(description: str | None) -> str | None: + """Validate description length.""" + if description and len(description) > 400: + raise ValueError("Description cannot exceed 400 characters.") + return description diff --git a/api/migrations/env.py b/api/migrations/env.py index a5d815dcfd..66a4614e80 100644 --- a/api/migrations/env.py +++ b/api/migrations/env.py @@ -37,10 +37,11 @@ config.set_main_option('sqlalchemy.url', get_engine_url()) # my_important_option = config.get_main_option("my_important_option") # ... etc. -from models.base import Base +from models.base import TypeBase + def get_metadata(): - return Base.metadata + return TypeBase.metadata def include_object(object, name, type_, reflected, compare_to): if type_ == "foreign_key_constraint": diff --git a/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py b/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py new file mode 100644 index 0000000000..7d6797fca0 --- /dev/null +++ b/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py @@ -0,0 +1,84 @@ +"""add app_mode for messsage + +Revision ID: d98acf217d43 +Revises: 68519ad5cd18 +Create Date: 2025-10-14 16:18:08.568011 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd98acf217d43' +down_revision = '68519ad5cd18' +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('app_mode', sa.String(length=255), nullable=True)) + batch_op.create_index('message_app_mode_idx', ['app_mode'], unique=False) + + conn = op.get_bind() + + # Strategy: Update in batches to minimize lock time + # For large tables (millions of rows), this prevents long-running transactions + batch_size = 10000 + + print("Starting backfill of app_mode from conversations...") + + # Use a more efficient UPDATE with JOIN + # This query updates messages.app_mode from conversations.mode + # Using string formatting for LIMIT since it's a constant + update_query = f""" + UPDATE messages m + SET app_mode = c.mode + FROM conversations c + WHERE m.conversation_id = c.id + AND m.app_mode IS NULL + AND m.id IN ( + SELECT id FROM messages + WHERE app_mode IS NULL + LIMIT {batch_size} + ) + """ + + # Execute batched updates + total_updated = 0 + iteration = 0 + while True: + iteration += 1 + result = conn.execute(sa.text(update_query)) + + # Check if result is None or has no rowcount + if result is None: + print("Warning: Query returned None, stopping backfill") + break + + rows_updated = result.rowcount if hasattr(result, 'rowcount') else 0 + total_updated += rows_updated + + if rows_updated == 0: + break + + print(f"Iteration {iteration}: Updated {rows_updated} messages (total: {total_updated})") + + # For very large tables, add a small delay to reduce load + # Uncomment if needed: import time; time.sleep(0.1) + + print(f"Backfill completed. Total messages updated: {total_updated}") + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.drop_index('message_app_mode_idx') + batch_op.drop_column('app_mode') + + # ### end Alembic commands ### diff --git a/api/models/account.py b/api/models/account.py index 8c1f990aa2..86cd9e41b5 100644 --- a/api/models/account.py +++ b/api/models/account.py @@ -1,15 +1,16 @@ import enum import json +from dataclasses import field from datetime import datetime from typing import Any, Optional import sqlalchemy as sa from flask_login import UserMixin # type: ignore[import-untyped] from sqlalchemy import DateTime, String, func, select -from sqlalchemy.orm import Mapped, Session, mapped_column, reconstructor +from sqlalchemy.orm import Mapped, Session, mapped_column from typing_extensions import deprecated -from models.base import Base +from models.base import TypeBase from .engine import db from .types import StringUUID @@ -83,31 +84,37 @@ class AccountStatus(enum.StrEnum): CLOSED = "closed" -class Account(UserMixin, Base): +class Account(UserMixin, TypeBase): __tablename__ = "accounts" __table_args__ = (sa.PrimaryKeyConstraint("id", name="account_pkey"), sa.Index("account_email_idx", "email")) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) name: Mapped[str] = mapped_column(String(255)) email: Mapped[str] = mapped_column(String(255)) - password: Mapped[str | None] = mapped_column(String(255)) - password_salt: Mapped[str | None] = mapped_column(String(255)) - avatar: Mapped[str | None] = mapped_column(String(255), nullable=True) - interface_language: Mapped[str | None] = mapped_column(String(255)) - interface_theme: Mapped[str | None] = mapped_column(String(255), nullable=True) - timezone: Mapped[str | None] = mapped_column(String(255)) - last_login_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - last_login_ip: Mapped[str | None] = mapped_column(String(255), nullable=True) - last_active_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), nullable=False) - status: Mapped[str] = mapped_column(String(16), server_default=sa.text("'active'::character varying")) - initialized_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), nullable=False) - updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), nullable=False) + password: Mapped[str | None] = mapped_column(String(255), default=None) + password_salt: Mapped[str | None] = mapped_column(String(255), default=None) + avatar: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) + interface_language: Mapped[str | None] = mapped_column(String(255), default=None) + interface_theme: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) + timezone: Mapped[str | None] = mapped_column(String(255), default=None) + last_login_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) + last_login_ip: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) + last_active_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + status: Mapped[str] = mapped_column( + String(16), server_default=sa.text("'active'::character varying"), default="active" + ) + initialized_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) - @reconstructor - def init_on_load(self): - self.role: TenantAccountRole | None = None - self._current_tenant: Tenant | None = None + role: TenantAccountRole | None = field(default=None, init=False) + _current_tenant: "Tenant | None" = field(default=None, init=False) @property def is_password_set(self): @@ -226,18 +233,24 @@ class TenantStatus(enum.StrEnum): ARCHIVE = "archive" -class Tenant(Base): +class Tenant(TypeBase): __tablename__ = "tenants" __table_args__ = (sa.PrimaryKeyConstraint("id", name="tenant_pkey"),) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) name: Mapped[str] = mapped_column(String(255)) - encrypt_public_key: Mapped[str | None] = mapped_column(sa.Text) - plan: Mapped[str] = mapped_column(String(255), server_default=sa.text("'basic'::character varying")) - status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'normal'::character varying")) - custom_config: Mapped[str | None] = mapped_column(sa.Text) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), nullable=False) - updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) + encrypt_public_key: Mapped[str | None] = mapped_column(sa.Text, default=None) + plan: Mapped[str] = mapped_column( + String(255), server_default=sa.text("'basic'::character varying"), default="basic" + ) + status: Mapped[str] = mapped_column( + String(255), server_default=sa.text("'normal'::character varying"), default="normal" + ) + custom_config: Mapped[str | None] = mapped_column(sa.Text, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp(), init=False) def get_accounts(self) -> list[Account]: return list( @@ -257,7 +270,7 @@ class Tenant(Base): self.custom_config = json.dumps(value) -class TenantAccountJoin(Base): +class TenantAccountJoin(TypeBase): __tablename__ = "tenant_account_joins" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="tenant_account_join_pkey"), @@ -266,17 +279,21 @@ class TenantAccountJoin(Base): sa.UniqueConstraint("tenant_id", "account_id", name="unique_tenant_account_join"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID) account_id: Mapped[str] = mapped_column(StringUUID) - current: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false")) - role: Mapped[str] = mapped_column(String(16), server_default="normal") - invited_by: Mapped[str | None] = mapped_column(StringUUID) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) + current: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false"), default=False) + role: Mapped[str] = mapped_column(String(16), server_default="normal", default="normal") + invited_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) -class AccountIntegrate(Base): +class AccountIntegrate(TypeBase): __tablename__ = "account_integrates" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="account_integrate_pkey"), @@ -284,16 +301,20 @@ class AccountIntegrate(Base): sa.UniqueConstraint("provider", "open_id", name="unique_provider_open_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) account_id: Mapped[str] = mapped_column(StringUUID) provider: Mapped[str] = mapped_column(String(16)) open_id: Mapped[str] = mapped_column(String(255)) encrypted_token: Mapped[str] = mapped_column(String(255)) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, server_default=func.current_timestamp(), nullable=False, init=False + ) -class InvitationCode(Base): +class InvitationCode(TypeBase): __tablename__ = "invitation_codes" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="invitation_code_pkey"), @@ -301,18 +322,22 @@ class InvitationCode(Base): sa.Index("invitation_codes_code_idx", "code", "status"), ) - id: Mapped[int] = mapped_column(sa.Integer) + id: Mapped[int] = mapped_column(sa.Integer, init=False) batch: Mapped[str] = mapped_column(String(255)) code: Mapped[str] = mapped_column(String(32)) - status: Mapped[str] = mapped_column(String(16), server_default=sa.text("'unused'::character varying")) - used_at: Mapped[datetime | None] = mapped_column(DateTime) - used_by_tenant_id: Mapped[str | None] = mapped_column(StringUUID) - used_by_account_id: Mapped[str | None] = mapped_column(StringUUID) - deprecated_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - created_at: Mapped[datetime] = mapped_column(DateTime, server_default=sa.text("CURRENT_TIMESTAMP(0)")) + status: Mapped[str] = mapped_column( + String(16), server_default=sa.text("'unused'::character varying"), default="unused" + ) + used_at: Mapped[datetime | None] = mapped_column(DateTime, default=None) + used_by_tenant_id: Mapped[str | None] = mapped_column(StringUUID, default=None) + used_by_account_id: Mapped[str | None] = mapped_column(StringUUID, default=None) + deprecated_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, server_default=sa.text("CURRENT_TIMESTAMP(0)"), nullable=False, init=False + ) -class TenantPluginPermission(Base): +class TenantPluginPermission(TypeBase): class InstallPermission(enum.StrEnum): EVERYONE = "everyone" ADMINS = "admins" @@ -329,13 +354,17 @@ class TenantPluginPermission(Base): sa.UniqueConstraint("tenant_id", name="unique_tenant_plugin"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - install_permission: Mapped[InstallPermission] = mapped_column(String(16), nullable=False, server_default="everyone") - debug_permission: Mapped[DebugPermission] = mapped_column(String(16), nullable=False, server_default="noone") + install_permission: Mapped[InstallPermission] = mapped_column( + String(16), nullable=False, server_default="everyone", default=InstallPermission.EVERYONE + ) + debug_permission: Mapped[DebugPermission] = mapped_column( + String(16), nullable=False, server_default="noone", default=DebugPermission.NOBODY + ) -class TenantPluginAutoUpgradeStrategy(Base): +class TenantPluginAutoUpgradeStrategy(TypeBase): class StrategySetting(enum.StrEnum): DISABLED = "disabled" FIX_ONLY = "fix_only" @@ -352,12 +381,20 @@ class TenantPluginAutoUpgradeStrategy(Base): sa.UniqueConstraint("tenant_id", name="unique_tenant_plugin_auto_upgrade_strategy"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - strategy_setting: Mapped[StrategySetting] = mapped_column(String(16), nullable=False, server_default="fix_only") - upgrade_time_of_day: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) # seconds of the day - upgrade_mode: Mapped[UpgradeMode] = mapped_column(String(16), nullable=False, server_default="exclude") - exclude_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False) # plugin_id (author/name) - include_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False) # plugin_id (author/name) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + strategy_setting: Mapped[StrategySetting] = mapped_column( + String(16), nullable=False, server_default="fix_only", default=StrategySetting.FIX_ONLY + ) + upgrade_mode: Mapped[UpgradeMode] = mapped_column( + String(16), nullable=False, server_default="exclude", default=UpgradeMode.EXCLUDE + ) + exclude_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False, default_factory=list) + include_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False, default_factory=list) + upgrade_time_of_day: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) diff --git a/api/models/api_based_extension.py b/api/models/api_based_extension.py index 60167d9069..e86826fc3d 100644 --- a/api/models/api_based_extension.py +++ b/api/models/api_based_extension.py @@ -9,7 +9,7 @@ from .base import Base from .types import StringUUID -class APIBasedExtensionPoint(enum.Enum): +class APIBasedExtensionPoint(enum.StrEnum): APP_EXTERNAL_DATA_TOOL_QUERY = "app.external_data_tool.query" PING = "ping" APP_MODERATION_INPUT = "app.moderation.input" diff --git a/api/models/dataset.py b/api/models/dataset.py index 25ebe14738..5653445f2b 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -61,18 +61,18 @@ class Dataset(Base): created_by = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) - updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) embedding_model = mapped_column(db.String(255), nullable=True) embedding_model_provider = mapped_column(db.String(255), nullable=True) - keyword_number = db.Column(db.Integer, nullable=True, server_default=db.text("10")) + keyword_number = mapped_column(sa.Integer, nullable=True, server_default=db.text("10")) collection_binding_id = mapped_column(StringUUID, nullable=True) retrieval_model = mapped_column(JSONB, nullable=True) - built_in_field_enabled = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) - icon_info = db.Column(JSONB, nullable=True) - runtime_mode = db.Column(db.String(255), nullable=True, server_default=db.text("'general'::character varying")) - pipeline_id = db.Column(StringUUID, nullable=True) - chunk_structure = db.Column(db.String(255), nullable=True) - enable_api = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) + built_in_field_enabled = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + icon_info = mapped_column(JSONB, nullable=True) + runtime_mode = mapped_column(db.String(255), nullable=True, server_default=db.text("'general'::character varying")) + pipeline_id = mapped_column(StringUUID, nullable=True) + chunk_structure = mapped_column(db.String(255), nullable=True) + enable_api = mapped_column(sa.Boolean, nullable=False, server_default=db.text("true")) @property def total_documents(self): @@ -184,7 +184,7 @@ class Dataset(Base): @property def retrieval_model_dict(self): default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 2, @@ -754,7 +754,7 @@ class DocumentSegment(Base): if process_rule and process_rule.mode == "hierarchical": rules_dict = process_rule.rules_dict if rules_dict: - rules = Rule(**rules_dict) + rules = Rule.model_validate(rules_dict) if rules.parent_mode and rules.parent_mode != ParentMode.FULL_DOC: child_chunks = ( db.session.query(ChildChunk) @@ -772,7 +772,7 @@ class DocumentSegment(Base): if process_rule and process_rule.mode == "hierarchical": rules_dict = process_rule.rules_dict if rules_dict: - rules = Rule(**rules_dict) + rules = Rule.model_validate(rules_dict) if rules.parent_mode: child_chunks = ( db.session.query(ChildChunk) @@ -1226,21 +1226,21 @@ class PipelineBuiltInTemplate(Base): # type: ignore[name-defined] __tablename__ = "pipeline_built_in_templates" __table_args__ = (db.PrimaryKeyConstraint("id", name="pipeline_built_in_template_pkey"),) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.Text, nullable=False) - chunk_structure = db.Column(db.String(255), nullable=False) - icon = db.Column(db.JSON, nullable=False) - yaml_content = db.Column(db.Text, nullable=False) - copyright = db.Column(db.String(255), nullable=False) - privacy_policy = db.Column(db.String(255), nullable=False) - position = db.Column(db.Integer, nullable=False) - install_count = db.Column(db.Integer, nullable=False, default=0) - language = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - created_by = db.Column(StringUUID, nullable=False) - updated_by = db.Column(StringUUID, nullable=True) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + name = mapped_column(db.String(255), nullable=False) + description = mapped_column(sa.Text, nullable=False) + chunk_structure = mapped_column(db.String(255), nullable=False) + icon = mapped_column(sa.JSON, nullable=False) + yaml_content = mapped_column(sa.Text, nullable=False) + copyright = mapped_column(db.String(255), nullable=False) + privacy_policy = mapped_column(db.String(255), nullable=False) + position = mapped_column(sa.Integer, nullable=False) + install_count = mapped_column(sa.Integer, nullable=False, default=0) + language = mapped_column(db.String(255), nullable=False) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + created_by = mapped_column(StringUUID, nullable=False) + updated_by = mapped_column(StringUUID, nullable=True) @property def created_user_name(self): @@ -1257,20 +1257,20 @@ class PipelineCustomizedTemplate(Base): # type: ignore[name-defined] db.Index("pipeline_customized_template_tenant_idx", "tenant_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - tenant_id = db.Column(StringUUID, nullable=False) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.Text, nullable=False) - chunk_structure = db.Column(db.String(255), nullable=False) - icon = db.Column(db.JSON, nullable=False) - position = db.Column(db.Integer, nullable=False) - yaml_content = db.Column(db.Text, nullable=False) - install_count = db.Column(db.Integer, nullable=False, default=0) - language = db.Column(db.String(255), nullable=False) - created_by = db.Column(StringUUID, nullable=False) - updated_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + tenant_id = mapped_column(StringUUID, nullable=False) + name = mapped_column(db.String(255), nullable=False) + description = mapped_column(sa.Text, nullable=False) + chunk_structure = mapped_column(db.String(255), nullable=False) + icon = mapped_column(sa.JSON, nullable=False) + position = mapped_column(sa.Integer, nullable=False) + yaml_content = mapped_column(sa.Text, nullable=False) + install_count = mapped_column(sa.Integer, nullable=False, default=0) + language = mapped_column(db.String(255), nullable=False) + created_by = mapped_column(StringUUID, nullable=False) + updated_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) @property def created_user_name(self): @@ -1284,17 +1284,17 @@ class Pipeline(Base): # type: ignore[name-defined] __tablename__ = "pipelines" __table_args__ = (db.PrimaryKeyConstraint("id", name="pipeline_pkey"),) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.Text, nullable=False, server_default=db.text("''::character varying")) - workflow_id = db.Column(StringUUID, nullable=True) - is_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - is_published = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - created_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + name = mapped_column(db.String(255), nullable=False) + description = mapped_column(sa.Text, nullable=False, server_default=db.text("''::character varying")) + workflow_id = mapped_column(StringUUID, nullable=True) + is_public = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + is_published = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + created_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) def retrieve_dataset(self, session: Session): return session.query(Dataset).where(Dataset.pipeline_id == self.id).first() @@ -1307,25 +1307,25 @@ class DocumentPipelineExecutionLog(Base): db.Index("document_pipeline_execution_logs_document_id_idx", "document_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - pipeline_id = db.Column(StringUUID, nullable=False) - document_id = db.Column(StringUUID, nullable=False) - datasource_type = db.Column(db.String(255), nullable=False) - datasource_info = db.Column(db.Text, nullable=False) - datasource_node_id = db.Column(db.String(255), nullable=False) - input_data = db.Column(db.JSON, nullable=False) - created_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + pipeline_id = mapped_column(StringUUID, nullable=False) + document_id = mapped_column(StringUUID, nullable=False) + datasource_type = mapped_column(db.String(255), nullable=False) + datasource_info = mapped_column(sa.Text, nullable=False) + datasource_node_id = mapped_column(db.String(255), nullable=False) + input_data = mapped_column(sa.JSON, nullable=False) + created_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) class PipelineRecommendedPlugin(Base): __tablename__ = "pipeline_recommended_plugins" __table_args__ = (db.PrimaryKeyConstraint("id", name="pipeline_recommended_plugin_pkey"),) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - plugin_id = db.Column(db.Text, nullable=False) - provider_name = db.Column(db.Text, nullable=False) - position = db.Column(db.Integer, nullable=False, default=0) - active = db.Column(db.Boolean, nullable=False, default=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + plugin_id = mapped_column(sa.Text, nullable=False) + provider_name = mapped_column(sa.Text, nullable=False) + position = mapped_column(sa.Integer, nullable=False, default=0) + active = mapped_column(sa.Boolean, nullable=False, default=True) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/model.py b/api/models/model.py index a8218c3a4e..2373421e7d 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -186,13 +186,13 @@ class App(Base): if len(keys) >= 4: provider_type = tool.get("provider_type", "") provider_id = tool.get("provider_id", "") - if provider_type == ToolProviderType.API.value: + if provider_type == ToolProviderType.API: try: uuid.UUID(provider_id) except Exception: continue api_provider_ids.append(provider_id) - if provider_type == ToolProviderType.BUILT_IN.value: + if provider_type == ToolProviderType.BUILT_IN: try: # check if it's hardcoded try: @@ -251,23 +251,23 @@ class App(Base): provider_type = tool.get("provider_type", "") provider_id = tool.get("provider_id", "") - if provider_type == ToolProviderType.API.value: + if provider_type == ToolProviderType.API: if uuid.UUID(provider_id) not in existing_api_providers: deleted_tools.append( { - "type": ToolProviderType.API.value, + "type": ToolProviderType.API, "tool_name": tool["tool_name"], "provider_id": provider_id, } ) - if provider_type == ToolProviderType.BUILT_IN.value: + if provider_type == ToolProviderType.BUILT_IN: generic_provider_id = GenericProviderID(provider_id) if not existing_builtin_providers[generic_provider_id.provider_name]: deleted_tools.append( { - "type": ToolProviderType.BUILT_IN.value, + "type": ToolProviderType.BUILT_IN, "tool_name": tool["tool_name"], "provider_id": provider_id, # use the original one } @@ -910,6 +910,7 @@ class Message(Base): Index("message_account_idx", "app_id", "from_source", "from_account_id"), Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"), Index("message_created_at_idx", "created_at"), + Index("message_app_mode_idx", "app_mode"), ) id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) @@ -943,6 +944,7 @@ class Message(Base): updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) agent_based: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) workflow_run_id: Mapped[str | None] = mapped_column(StringUUID) + app_mode: Mapped[str | None] = mapped_column(String(255), nullable=True) @property def inputs(self) -> dict[str, Any]: @@ -1154,7 +1156,7 @@ class Message(Base): files: list[File] = [] for message_file in message_files: - if message_file.transfer_method == FileTransferMethod.LOCAL_FILE.value: + if message_file.transfer_method == FileTransferMethod.LOCAL_FILE: if message_file.upload_file_id is None: raise ValueError(f"MessageFile {message_file.id} is a local file but has no upload_file_id") file = file_factory.build_from_mapping( @@ -1166,7 +1168,7 @@ class Message(Base): }, tenant_id=current_app.tenant_id, ) - elif message_file.transfer_method == FileTransferMethod.REMOTE_URL.value: + elif message_file.transfer_method == FileTransferMethod.REMOTE_URL: if message_file.url is None: raise ValueError(f"MessageFile {message_file.id} is a remote url but has no url") file = file_factory.build_from_mapping( @@ -1179,7 +1181,7 @@ class Message(Base): }, tenant_id=current_app.tenant_id, ) - elif message_file.transfer_method == FileTransferMethod.TOOL_FILE.value: + elif message_file.transfer_method == FileTransferMethod.TOOL_FILE: if message_file.upload_file_id is None: assert message_file.url is not None message_file.upload_file_id = message_file.url.split("/")[-1].split(".")[0] diff --git a/api/models/oauth.py b/api/models/oauth.py index 1d5d37e3e1..ef23780dc8 100644 --- a/api/models/oauth.py +++ b/api/models/oauth.py @@ -1,7 +1,8 @@ from datetime import datetime +import sqlalchemy as sa from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.orm import Mapped +from sqlalchemy.orm import Mapped, mapped_column from .base import Base from .engine import db @@ -15,10 +16,10 @@ class DatasourceOauthParamConfig(Base): # type: ignore[name-defined] db.UniqueConstraint("plugin_id", "provider", name="datasource_oauth_config_datasource_id_provider_idx"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - plugin_id: Mapped[str] = db.Column(db.String(255), nullable=False) - provider: Mapped[str] = db.Column(db.String(255), nullable=False) - system_credentials: Mapped[dict] = db.Column(JSONB, nullable=False) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + plugin_id: Mapped[str] = mapped_column(db.String(255), nullable=False) + provider: Mapped[str] = mapped_column(db.String(255), nullable=False) + system_credentials: Mapped[dict] = mapped_column(JSONB, nullable=False) class DatasourceProvider(Base): @@ -28,19 +29,19 @@ class DatasourceProvider(Base): db.UniqueConstraint("tenant_id", "plugin_id", "provider", "name", name="datasource_provider_unique_name"), db.Index("datasource_provider_auth_type_provider_idx", "tenant_id", "plugin_id", "provider"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - tenant_id = db.Column(StringUUID, nullable=False) - name: Mapped[str] = db.Column(db.String(255), nullable=False) - provider: Mapped[str] = db.Column(db.String(255), nullable=False) - plugin_id: Mapped[str] = db.Column(db.String(255), nullable=False) - auth_type: Mapped[str] = db.Column(db.String(255), nullable=False) - encrypted_credentials: Mapped[dict] = db.Column(JSONB, nullable=False) - avatar_url: Mapped[str] = db.Column(db.Text, nullable=True, default="default") - is_default: Mapped[bool] = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - expires_at: Mapped[int] = db.Column(db.Integer, nullable=False, server_default="-1") + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + tenant_id = mapped_column(StringUUID, nullable=False) + name: Mapped[str] = mapped_column(db.String(255), nullable=False) + provider: Mapped[str] = mapped_column(db.String(255), nullable=False) + plugin_id: Mapped[str] = mapped_column(db.String(255), nullable=False) + auth_type: Mapped[str] = mapped_column(db.String(255), nullable=False) + encrypted_credentials: Mapped[dict] = mapped_column(JSONB, nullable=False) + avatar_url: Mapped[str] = mapped_column(sa.Text, nullable=True, default="default") + is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + expires_at: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default="-1") - created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, default=datetime.now) - updated_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, default=datetime.now) + created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, default=datetime.now) + updated_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, default=datetime.now) class DatasourceOauthTenantParamConfig(Base): @@ -50,12 +51,12 @@ class DatasourceOauthTenantParamConfig(Base): db.UniqueConstraint("tenant_id", "plugin_id", "provider", name="datasource_oauth_tenant_config_unique"), ) - id = db.Column(StringUUID, server_default=db.text("uuidv7()")) - tenant_id = db.Column(StringUUID, nullable=False) - provider: Mapped[str] = db.Column(db.String(255), nullable=False) - plugin_id: Mapped[str] = db.Column(db.String(255), nullable=False) - client_params: Mapped[dict] = db.Column(JSONB, nullable=False, default={}) - enabled: Mapped[bool] = db.Column(db.Boolean, nullable=False, default=False) + id = mapped_column(StringUUID, server_default=db.text("uuidv7()")) + tenant_id = mapped_column(StringUUID, nullable=False) + provider: Mapped[str] = mapped_column(db.String(255), nullable=False) + plugin_id: Mapped[str] = mapped_column(db.String(255), nullable=False) + client_params: Mapped[dict] = mapped_column(JSONB, nullable=False, default={}) + enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False) - created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, default=datetime.now) - updated_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, default=datetime.now) + created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, default=datetime.now) + updated_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, default=datetime.now) diff --git a/api/models/provider.py b/api/models/provider.py index aacc6e505a..f6852d49f4 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -107,7 +107,7 @@ class Provider(Base): """ Returns True if the provider is enabled. """ - if self.provider_type == ProviderType.SYSTEM.value: + if self.provider_type == ProviderType.SYSTEM: return self.is_valid else: return self.is_valid and self.token_is_set diff --git a/api/models/source.py b/api/models/source.py index 5b4c486bc4..0ed7c4c70e 100644 --- a/api/models/source.py +++ b/api/models/source.py @@ -6,12 +6,12 @@ from sqlalchemy import DateTime, String, func from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, mapped_column -from models.base import Base +from models.base import TypeBase from .types import StringUUID -class DataSourceOauthBinding(Base): +class DataSourceOauthBinding(TypeBase): __tablename__ = "data_source_oauth_bindings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="source_binding_pkey"), @@ -19,17 +19,25 @@ class DataSourceOauthBinding(Base): sa.Index("source_info_idx", "source_info", postgresql_using="gin"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - tenant_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) access_token: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) - source_info = mapped_column(JSONB, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - disabled: Mapped[bool | None] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false")) + source_info: Mapped[dict] = mapped_column(JSONB, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) + disabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"), default=False) -class DataSourceApiKeyAuthBinding(Base): +class DataSourceApiKeyAuthBinding(TypeBase): __tablename__ = "data_source_api_key_auth_bindings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="data_source_api_key_auth_binding_pkey"), @@ -37,14 +45,22 @@ class DataSourceApiKeyAuthBinding(Base): sa.Index("data_source_api_key_auth_binding_provider_idx", "provider"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - tenant_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) category: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) - credentials = mapped_column(sa.Text, nullable=True) # JSON - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - disabled: Mapped[bool | None] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false")) + credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) # JSON + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) + disabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"), default=False) def to_dict(self): return { @@ -52,7 +68,7 @@ class DataSourceApiKeyAuthBinding(Base): "tenant_id": self.tenant_id, "category": self.category, "provider": self.provider, - "credentials": json.loads(self.credentials), + "credentials": json.loads(self.credentials) if self.credentials else None, "created_at": self.created_at.timestamp(), "updated_at": self.updated_at.timestamp(), "disabled": self.disabled, diff --git a/api/models/task.py b/api/models/task.py index 3da1674536..513f167cce 100644 --- a/api/models/task.py +++ b/api/models/task.py @@ -6,43 +6,43 @@ from sqlalchemy import DateTime, String from sqlalchemy.orm import Mapped, mapped_column from libs.datetime_utils import naive_utc_now -from models.base import Base - -from .engine import db +from models.base import TypeBase -class CeleryTask(Base): +class CeleryTask(TypeBase): """Task result/status.""" __tablename__ = "celery_taskmeta" - id = mapped_column(sa.Integer, sa.Sequence("task_id_sequence"), primary_key=True, autoincrement=True) - task_id = mapped_column(String(155), unique=True) - status = mapped_column(String(50), default=states.PENDING) - result = mapped_column(db.PickleType, nullable=True) - date_done = mapped_column( + id: Mapped[int] = mapped_column( + sa.Integer, sa.Sequence("task_id_sequence"), primary_key=True, autoincrement=True, init=False + ) + task_id: Mapped[str] = mapped_column(String(155), unique=True) + status: Mapped[str] = mapped_column(String(50), default=states.PENDING) + result: Mapped[bytes | None] = mapped_column(sa.PickleType, nullable=True, default=None) + date_done: Mapped[datetime | None] = mapped_column( DateTime, - default=lambda: naive_utc_now(), - onupdate=lambda: naive_utc_now(), + default=naive_utc_now, + onupdate=naive_utc_now, nullable=True, ) - traceback = mapped_column(sa.Text, nullable=True) - name = mapped_column(String(155), nullable=True) - args = mapped_column(sa.LargeBinary, nullable=True) - kwargs = mapped_column(sa.LargeBinary, nullable=True) - worker = mapped_column(String(155), nullable=True) - retries: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) - queue = mapped_column(String(155), nullable=True) + traceback: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) + name: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) + args: Mapped[bytes | None] = mapped_column(sa.LargeBinary, nullable=True, default=None) + kwargs: Mapped[bytes | None] = mapped_column(sa.LargeBinary, nullable=True, default=None) + worker: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) + retries: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None) + queue: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) -class CeleryTaskSet(Base): +class CeleryTaskSet(TypeBase): """TaskSet result.""" __tablename__ = "celery_tasksetmeta" id: Mapped[int] = mapped_column( - sa.Integer, sa.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True + sa.Integer, sa.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True, init=False ) - taskset_id = mapped_column(String(155), unique=True) - result = mapped_column(db.PickleType, nullable=True) - date_done: Mapped[datetime | None] = mapped_column(DateTime, default=lambda: naive_utc_now(), nullable=True) + taskset_id: Mapped[str] = mapped_column(String(155), unique=True) + result: Mapped[bytes | None] = mapped_column(sa.PickleType, nullable=True, default=None) + date_done: Mapped[datetime | None] = mapped_column(DateTime, default=naive_utc_now, nullable=True) diff --git a/api/models/tools.py b/api/models/tools.py index 7211d7aa3a..aec53da50c 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,6 +1,7 @@ import json from collections.abc import Mapping from datetime import datetime +from decimal import Decimal from typing import TYPE_CHECKING, Any, cast from urllib.parse import urlparse @@ -13,7 +14,7 @@ from core.helper import encrypter from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration -from models.base import Base, TypeBase +from models.base import TypeBase from .engine import db from .model import Account, App, Tenant @@ -42,28 +43,28 @@ class ToolOAuthSystemClient(TypeBase): # tenant level tool oauth client params (client_id, client_secret, etc.) -class ToolOAuthTenantClient(Base): +class ToolOAuthTenantClient(TypeBase): __tablename__ = "tool_oauth_tenant_clients" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="tool_oauth_tenant_client_pkey"), sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_tool_oauth_tenant_client"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) plugin_id: Mapped[str] = mapped_column(String(512), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) - enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) + enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"), init=False) # oauth params of the tool provider - encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False) + encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False, init=False) @property def oauth_params(self) -> dict[str, Any]: return cast(dict[str, Any], json.loads(self.encrypted_oauth_params or "{}")) -class BuiltinToolProvider(Base): +class BuiltinToolProvider(TypeBase): """ This table stores the tool provider information for built-in tools for each tenant. """ @@ -75,37 +76,45 @@ class BuiltinToolProvider(Base): ) # id of the tool provider - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) name: Mapped[str] = mapped_column( - String(256), nullable=False, server_default=sa.text("'API KEY 1'::character varying") + String(256), + nullable=False, + server_default=sa.text("'API KEY 1'::character varying"), ) # id of the tenant - tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=True) + tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) # who created this tool provider user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # name of the tool provider provider: Mapped[str] = mapped_column(String(256), nullable=False) # credential of the tool provider - encrypted_credentials: Mapped[str] = mapped_column(sa.Text, nullable=True) + encrypted_credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False ) updated_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, + nullable=False, + server_default=sa.text("CURRENT_TIMESTAMP(0)"), + onupdate=func.current_timestamp(), + init=False, ) - is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) + is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"), default=False) # credential type, e.g., "api-key", "oauth2" credential_type: Mapped[str] = mapped_column( - String(32), nullable=False, server_default=sa.text("'api-key'::character varying") + String(32), nullable=False, server_default=sa.text("'api-key'::character varying"), default="api-key" ) - expires_at: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, server_default=sa.text("-1")) + expires_at: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, server_default=sa.text("-1"), default=-1) @property def credentials(self) -> dict[str, Any]: + if not self.encrypted_credentials: + return {} return cast(dict[str, Any], json.loads(self.encrypted_credentials)) -class ApiToolProvider(Base): +class ApiToolProvider(TypeBase): """ The table stores the api providers. """ @@ -116,31 +125,43 @@ class ApiToolProvider(Base): sa.UniqueConstraint("name", "tenant_id", name="unique_api_tool_provider"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # name of the api provider - name = mapped_column(String(255), nullable=False, server_default=sa.text("'API KEY 1'::character varying")) + name: Mapped[str] = mapped_column( + String(255), + nullable=False, + server_default=sa.text("'API KEY 1'::character varying"), + ) # icon icon: Mapped[str] = mapped_column(String(255), nullable=False) # original schema - schema = mapped_column(sa.Text, nullable=False) + schema: Mapped[str] = mapped_column(sa.Text, nullable=False) schema_type_str: Mapped[str] = mapped_column(String(40), nullable=False) # who created this tool - user_id = mapped_column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = mapped_column(StringUUID, nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # description of the provider - description = mapped_column(sa.Text, nullable=False) + description: Mapped[str] = mapped_column(sa.Text, nullable=False) # json format tools - tools_str = mapped_column(sa.Text, nullable=False) + tools_str: Mapped[str] = mapped_column(sa.Text, nullable=False) # json format credentials - credentials_str = mapped_column(sa.Text, nullable=False) + credentials_str: Mapped[str] = mapped_column(sa.Text, nullable=False) # privacy policy - privacy_policy = mapped_column(String(255), nullable=True) + privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) # custom_disclaimer custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") - created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) @property def schema_type(self) -> "ApiProviderSchemaType": @@ -152,7 +173,7 @@ class ApiToolProvider(Base): def tools(self) -> list["ApiToolBundle"]: from core.tools.entities.tool_bundle import ApiToolBundle - return [ApiToolBundle(**tool) for tool in json.loads(self.tools_str)] + return [ApiToolBundle.model_validate(tool) for tool in json.loads(self.tools_str)] @property def credentials(self) -> dict[str, Any]: @@ -189,7 +210,7 @@ class ToolLabelBinding(TypeBase): label_name: Mapped[str] = mapped_column(String(40), nullable=False) -class WorkflowToolProvider(Base): +class WorkflowToolProvider(TypeBase): """ The table stores the workflow providers. """ @@ -201,7 +222,7 @@ class WorkflowToolProvider(Base): sa.UniqueConstraint("tenant_id", "app_id", name="unique_workflow_tool_provider_app_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # name of the workflow provider name: Mapped[str] = mapped_column(String(255), nullable=False) # label of the workflow provider @@ -219,15 +240,19 @@ class WorkflowToolProvider(Base): # description of the provider description: Mapped[str] = mapped_column(sa.Text, nullable=False) # parameter configuration - parameter_configuration: Mapped[str] = mapped_column(sa.Text, nullable=False, server_default="[]") + parameter_configuration: Mapped[str] = mapped_column(sa.Text, nullable=False, server_default="[]", default="[]") # privacy policy - privacy_policy: Mapped[str] = mapped_column(String(255), nullable=True, server_default="") + privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, server_default="", default=None) created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False ) updated_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, + nullable=False, + server_default=sa.text("CURRENT_TIMESTAMP(0)"), + onupdate=func.current_timestamp(), + init=False, ) @property @@ -242,14 +267,17 @@ class WorkflowToolProvider(Base): def parameter_configurations(self) -> list["WorkflowToolParameterConfiguration"]: from core.tools.entities.tool_entities import WorkflowToolParameterConfiguration - return [WorkflowToolParameterConfiguration(**config) for config in json.loads(self.parameter_configuration)] + return [ + WorkflowToolParameterConfiguration.model_validate(config) + for config in json.loads(self.parameter_configuration) + ] @property def app(self) -> App | None: return db.session.query(App).where(App.id == self.app_id).first() -class MCPToolProvider(Base): +class MCPToolProvider(TypeBase): """ The table stores the mcp providers. """ @@ -262,7 +290,7 @@ class MCPToolProvider(Base): sa.UniqueConstraint("tenant_id", "server_identifier", name="unique_mcp_provider_server_identifier"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # name of the mcp provider name: Mapped[str] = mapped_column(String(40), nullable=False) # server identifier of the mcp provider @@ -272,27 +300,33 @@ class MCPToolProvider(Base): # hash of server_url for uniqueness check server_url_hash: Mapped[str] = mapped_column(String(64), nullable=False) # icon of the mcp provider - icon: Mapped[str] = mapped_column(String(255), nullable=True) + icon: Mapped[str | None] = mapped_column(String(255), nullable=True) # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # who created this tool user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # encrypted credentials - encrypted_credentials: Mapped[str] = mapped_column(sa.Text, nullable=True) + encrypted_credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) # authed authed: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False) # tools tools: Mapped[str] = mapped_column(sa.Text, nullable=False, default="[]") created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False ) updated_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") + sa.DateTime, + nullable=False, + server_default=sa.text("CURRENT_TIMESTAMP(0)"), + onupdate=func.current_timestamp(), + init=False, + ) + timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("30"), default=30.0) + sse_read_timeout: Mapped[float] = mapped_column( + sa.Float, nullable=False, server_default=sa.text("300"), default=300.0 ) - timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("30")) - sse_read_timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("300")) # encrypted headers for MCP server requests - encrypted_headers: Mapped[str | None] = mapped_column(sa.Text, nullable=True) + encrypted_headers: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) def load_user(self) -> Account | None: return db.session.query(Account).where(Account.id == self.user_id).first() @@ -303,21 +337,24 @@ class MCPToolProvider(Base): @property def credentials(self) -> dict[str, Any]: + if not self.encrypted_credentials: + return {} try: return cast(dict[str, Any], json.loads(self.encrypted_credentials)) or {} - except Exception: + except json.JSONDecodeError: return {} @property def mcp_tools(self) -> list["MCPTool"]: from core.mcp.types import Tool as MCPTool - return [MCPTool(**tool) for tool in json.loads(self.tools)] + return [MCPTool.model_validate(tool) for tool in json.loads(self.tools)] @property def provider_icon(self) -> Mapping[str, str] | str: from core.file import helpers as file_helpers + assert self.icon try: return json.loads(self.icon) except json.JSONDecodeError: @@ -416,7 +453,7 @@ class MCPToolProvider(Base): return encrypter.decrypt(self.credentials) -class ToolModelInvoke(Base): +class ToolModelInvoke(TypeBase): """ store the invoke logs from tool invoke """ @@ -424,37 +461,47 @@ class ToolModelInvoke(Base): __tablename__ = "tool_model_invokes" __table_args__ = (sa.PrimaryKeyConstraint("id", name="tool_model_invoke_pkey"),) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # who invoke this tool - user_id = mapped_column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = mapped_column(StringUUID, nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # provider provider: Mapped[str] = mapped_column(String(255), nullable=False) # type - tool_type = mapped_column(String(40), nullable=False) + tool_type: Mapped[str] = mapped_column(String(40), nullable=False) # tool name - tool_name = mapped_column(String(128), nullable=False) + tool_name: Mapped[str] = mapped_column(String(128), nullable=False) # invoke parameters - model_parameters = mapped_column(sa.Text, nullable=False) + model_parameters: Mapped[str] = mapped_column(sa.Text, nullable=False) # prompt messages - prompt_messages = mapped_column(sa.Text, nullable=False) + prompt_messages: Mapped[str] = mapped_column(sa.Text, nullable=False) # invoke response - model_response = mapped_column(sa.Text, nullable=False) + model_response: Mapped[str] = mapped_column(sa.Text, nullable=False) prompt_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) answer_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) - answer_unit_price = mapped_column(sa.Numeric(10, 4), nullable=False) - answer_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) - provider_response_latency = mapped_column(sa.Float, nullable=False, server_default=sa.text("0")) - total_price = mapped_column(sa.Numeric(10, 7)) + answer_unit_price: Mapped[Decimal] = mapped_column(sa.Numeric(10, 4), nullable=False) + answer_price_unit: Mapped[Decimal] = mapped_column( + sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001") + ) + provider_response_latency: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("0")) + total_price: Mapped[Decimal | None] = mapped_column(sa.Numeric(10, 7)) currency: Mapped[str] = mapped_column(String(255), nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) @deprecated -class ToolConversationVariables(Base): +class ToolConversationVariables(TypeBase): """ store the conversation variables from tool invoke """ @@ -467,18 +514,26 @@ class ToolConversationVariables(Base): sa.Index("conversation_id_idx", "conversation_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # conversation user id - user_id = mapped_column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = mapped_column(StringUUID, nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # conversation id - conversation_id = mapped_column(StringUUID, nullable=False) + conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # variables pool - variables_str = mapped_column(sa.Text, nullable=False) + variables_str: Mapped[str] = mapped_column(sa.Text, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) @property def variables(self): @@ -516,7 +571,7 @@ class ToolFile(TypeBase): @deprecated -class DeprecatedPublishedAppTool(Base): +class DeprecatedPublishedAppTool(TypeBase): """ The table stores the apps published as a tool for each person. """ @@ -527,29 +582,37 @@ class DeprecatedPublishedAppTool(Base): sa.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # id of the app - app_id = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False) + app_id: Mapped[str] = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False) user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # who published this tool - description = mapped_column(sa.Text, nullable=False) + description: Mapped[str] = mapped_column(sa.Text, nullable=False) # llm_description of the tool, for LLM - llm_description = mapped_column(sa.Text, nullable=False) + llm_description: Mapped[str] = mapped_column(sa.Text, nullable=False) # query description, query will be seem as a parameter of the tool, # to describe this parameter to llm, we need this field - query_description = mapped_column(sa.Text, nullable=False) + query_description: Mapped[str] = mapped_column(sa.Text, nullable=False) # query name, the name of the query parameter - query_name = mapped_column(String(40), nullable=False) + query_name: Mapped[str] = mapped_column(String(40), nullable=False) # name of the tool provider - tool_name = mapped_column(String(40), nullable=False) + tool_name: Mapped[str] = mapped_column(String(40), nullable=False) # author - author = mapped_column(String(40), nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")) - updated_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")) + author: Mapped[str] = mapped_column(String(40), nullable=False) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=sa.text("CURRENT_TIMESTAMP(0)"), + onupdate=func.current_timestamp(), + init=False, + ) @property def description_i18n(self) -> "I18nObject": from core.tools.entities.common_entities import I18nObject - return I18nObject(**json.loads(self.description)) + return I18nObject.model_validate(json.loads(self.description)) diff --git a/api/models/web.py b/api/models/web.py index 74f99e187b..7df5bd6e87 100644 --- a/api/models/web.py +++ b/api/models/web.py @@ -4,46 +4,58 @@ import sqlalchemy as sa from sqlalchemy import DateTime, String, func from sqlalchemy.orm import Mapped, mapped_column -from models.base import Base +from models.base import TypeBase from .engine import db from .model import Message from .types import StringUUID -class SavedMessage(Base): +class SavedMessage(TypeBase): __tablename__ = "saved_messages" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="saved_message_pkey"), sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - app_id = mapped_column(StringUUID, nullable=False) - message_id = mapped_column(StringUUID, nullable=False) - created_by_role = mapped_column( + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_by_role: Mapped[str] = mapped_column( String(255), nullable=False, server_default=sa.text("'end_user'::character varying") ) - created_by = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + init=False, + ) @property def message(self): return db.session.query(Message).where(Message.id == self.message_id).first() -class PinnedConversation(Base): +class PinnedConversation(TypeBase): __tablename__ = "pinned_conversations" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="pinned_conversation_pkey"), sa.Index("pinned_conversation_conversation_idx", "app_id", "conversation_id", "created_by_role", "created_by"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - app_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) conversation_id: Mapped[str] = mapped_column(StringUUID) - created_by_role = mapped_column( - String(255), nullable=False, server_default=sa.text("'end_user'::character varying") + created_by_role: Mapped[str] = mapped_column( + String(255), + nullable=False, + server_default=sa.text("'end_user'::character varying"), + ) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + init=False, ) - created_by = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/workflow.py b/api/models/workflow.py index e61005953e..b898f02612 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -360,7 +360,9 @@ class Workflow(Base): @property def environment_variables(self) -> Sequence[StringVariable | IntegerVariable | FloatVariable | SecretVariable]: - # _environment_variables is guaranteed to be non-None due to server_default="{}" + # TODO: find some way to init `self._environment_variables` when instance created. + if self._environment_variables is None: + self._environment_variables = "{}" # Use workflow.tenant_id to avoid relying on request user in background threads tenant_id = self.tenant_id @@ -444,7 +446,9 @@ class Workflow(Base): @property def conversation_variables(self) -> Sequence[Variable]: - # _conversation_variables is guaranteed to be non-None due to server_default="{}" + # TODO: find some way to init `self._conversation_variables` when instance created. + if self._conversation_variables is None: + self._conversation_variables = "{}" variables_dict: dict[str, Any] = json.loads(self._conversation_variables) results = [variable_factory.build_conversation_variable_from_mapping(v) for v in variables_dict.values()] @@ -825,14 +829,14 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo if self.execution_metadata_dict: from core.workflow.nodes import NodeType - if self.node_type == NodeType.TOOL.value and "tool_info" in self.execution_metadata_dict: + if self.node_type == NodeType.TOOL and "tool_info" in self.execution_metadata_dict: tool_info: dict[str, Any] = self.execution_metadata_dict["tool_info"] extras["icon"] = ToolManager.get_tool_icon( tenant_id=self.tenant_id, provider_type=tool_info["provider_type"], provider_id=tool_info["provider_id"], ) - elif self.node_type == NodeType.DATASOURCE.value and "datasource_info" in self.execution_metadata_dict: + elif self.node_type == NodeType.DATASOURCE and "datasource_info" in self.execution_metadata_dict: datasource_info = self.execution_metadata_dict["datasource_info"] extras["icon"] = datasource_info.get("icon") return extras diff --git a/api/pyproject.toml b/api/pyproject.toml index 012702edd2..62af88a1b2 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,11 +1,10 @@ [project] name = "dify-api" -version = "1.9.0" +version = "1.9.1" requires-python = ">=3.11,<3.13" dependencies = [ "arize-phoenix-otel~=0.9.2", - "authlib==1.6.4", "azure-identity==1.16.1", "beautifulsoup4==4.12.2", "boto3==1.35.99", @@ -14,7 +13,7 @@ dependencies = [ "celery~=5.5.2", "chardet~=5.1.0", "flask~=3.1.2", - "flask-compress~=1.17", + "flask-compress>=1.17,<1.18", "flask-cors~=6.0.0", "flask-login~=0.6.3", "flask-migrate~=4.0.7", @@ -34,12 +33,10 @@ dependencies = [ "json-repair>=0.41.1", "langfuse~=2.51.3", "langsmith~=0.1.77", - "mailchimp-transactional~=1.0.50", "markdown~=3.5.1", "numpy~=1.26.4", - "openai~=1.61.0", "openpyxl~=3.1.5", - "opik~=1.7.25", + "opik~=1.8.72", "opentelemetry-api==1.27.0", "opentelemetry-distro==0.48b0", "opentelemetry-exporter-otlp==1.27.0", @@ -49,8 +46,9 @@ dependencies = [ "opentelemetry-instrumentation==0.48b0", "opentelemetry-instrumentation-celery==0.48b0", "opentelemetry-instrumentation-flask==0.48b0", + "opentelemetry-instrumentation-httpx==0.48b0", "opentelemetry-instrumentation-redis==0.48b0", - "opentelemetry-instrumentation-requests==0.48b0", + "opentelemetry-instrumentation-httpx==0.48b0", "opentelemetry-instrumentation-sqlalchemy==0.48b0", "opentelemetry-propagator-b3==1.27.0", # opentelemetry-proto1.28.0 depends on protobuf (>=5.0,<6.0), @@ -60,7 +58,6 @@ dependencies = [ "opentelemetry-semantic-conventions==0.48b0", "opentelemetry-util-http==0.48b0", "pandas[excel,output-formatting,performance]~=2.2.2", - "pandoc~=2.4", "psycogreen~=1.0.2", "psycopg2-binary~=2.9.6", "pycryptodome==3.19.1", @@ -113,7 +110,7 @@ dev = [ "lxml-stubs~=0.5.1", "ty~=0.0.1a19", "basedpyright~=1.31.0", - "ruff~=0.12.3", + "ruff~=0.14.0", "pytest~=8.3.2", "pytest-benchmark~=4.0.0", "pytest-cov~=4.1.0", @@ -148,8 +145,6 @@ dev = [ "types-pywin32~=310.0.0", "types-pyyaml~=6.0.12", "types-regex~=2024.11.6", - "types-requests~=2.32.0", - "types-requests-oauthlib~=2.0.0", "types-shapely~=2.0.0", "types-simplejson>=3.20.0", "types-six>=1.17.0", @@ -178,10 +173,10 @@ dev = [ # Required for storage clients ############################################################ storage = [ - "azure-storage-blob==12.13.0", + "azure-storage-blob==12.26.0", "bce-python-sdk~=0.9.23", - "cos-python-sdk-v5==1.9.30", - "esdk-obs-python==3.24.6.1", + "cos-python-sdk-v5==1.9.38", + "esdk-obs-python==3.25.8", "google-cloud-storage==2.16.0", "opendal~=0.46.0", "oss2==2.18.5", @@ -207,7 +202,7 @@ vdb = [ "couchbase~=4.3.0", "elasticsearch==8.14.0", "opensearch-py==2.4.0", - "oracledb==3.0.0", + "oracledb==3.3.0", "pgvecto-rs[sqlalchemy]~=0.2.1", "pgvector==0.2.5", "pymilvus~=2.5.0", @@ -219,7 +214,8 @@ vdb = [ "tidb-vector==0.0.9", "upstash-vector==0.6.0", "volcengine-compat~=1.0.0", - "weaviate-client~=3.24.0", + "weaviate-client>=4.0.0,<5.0.0", "xinference-client~=1.2.2", "mo-vector~=0.1.13", + "mysql-connector-python>=9.3.0", ] diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 61ed3ac3b4..bf4ec2314e 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -1,19 +1,10 @@ { "include": ["."], "exclude": [ - ".venv", "tests/", + ".venv", "migrations/", - "core/rag", - "extensions", - "libs", - "controllers/console/datasets", - "controllers/service_api/dataset", - "core/ops", - "core/tools", - "core/model_runtime", - "core/workflow/nodes", - "core/app/app_config/easy_ui_based_app/dataset" + "core/rag" ], "typeCheckingMode": "strict", "allowedUntypedLibraries": [ @@ -21,9 +12,11 @@ "flask_login", "opentelemetry.instrumentation.celery", "opentelemetry.instrumentation.flask", + "opentelemetry.instrumentation.httpx", "opentelemetry.instrumentation.requests", "opentelemetry.instrumentation.sqlalchemy", - "opentelemetry.instrumentation.redis" + "opentelemetry.instrumentation.redis", + "opentelemetry.instrumentation.httpx" ], "reportUnknownMemberType": "hint", "reportUnknownParameterType": "hint", @@ -32,13 +25,11 @@ "reportUnknownLambdaType": "hint", "reportMissingParameterType": "hint", "reportMissingTypeArgument": "hint", - "reportUnnecessaryContains": "hint", "reportUnnecessaryComparison": "hint", - "reportUnnecessaryCast": "hint", "reportUnnecessaryIsInstance": "hint", "reportUntypedFunctionDecorator": "hint", "reportAttributeAccessIssue": "hint", "pythonVersion": "3.11", "pythonPlatform": "All" -} +} \ No newline at end of file diff --git a/api/pytest.ini b/api/pytest.ini index eb49619481..afb53b47cc 100644 --- a/api/pytest.ini +++ b/api/pytest.ini @@ -7,7 +7,7 @@ env = CHATGLM_API_BASE = http://a.abc.com:11451 CODE_EXECUTION_API_KEY = dify-sandbox CODE_EXECUTION_ENDPOINT = http://127.0.0.1:8194 - CODE_MAX_STRING_LENGTH = 80000 + CODE_MAX_STRING_LENGTH = 400000 PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi PLUGIN_DAEMON_URL=http://127.0.0.1:5002 PLUGIN_MAX_PACKAGE_SIZE=15728640 diff --git a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py index 9bc6acc41f..7e2173acdd 100644 --- a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py @@ -7,8 +7,10 @@ using SQLAlchemy 2.0 style queries for WorkflowNodeExecutionModel operations. from collections.abc import Sequence from datetime import datetime +from typing import cast from sqlalchemy import asc, delete, desc, select +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, sessionmaker from models.workflow import WorkflowNodeExecutionModel @@ -181,7 +183,7 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut # Delete the batch delete_stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() total_deleted += result.rowcount @@ -228,7 +230,7 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut # Delete the batch delete_stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() total_deleted += result.rowcount @@ -285,6 +287,6 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut with self._session_maker() as session: stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) session.commit() return result.rowcount diff --git a/api/repositories/sqlalchemy_api_workflow_run_repository.py b/api/repositories/sqlalchemy_api_workflow_run_repository.py index 205f8c87ee..6154273f33 100644 --- a/api/repositories/sqlalchemy_api_workflow_run_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_run_repository.py @@ -22,8 +22,10 @@ Implementation Notes: import logging from collections.abc import Sequence from datetime import datetime +from typing import cast from sqlalchemy import delete, select +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, sessionmaker from libs.infinite_scroll_pagination import InfiniteScrollPagination @@ -150,7 +152,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): with self._session_maker() as session: stmt = delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids)) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) session.commit() deleted_count = result.rowcount @@ -186,7 +188,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): # Delete the batch delete_stmt = delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() batch_deleted = result.rowcount diff --git a/api/schedule/check_upgradable_plugin_task.py b/api/schedule/check_upgradable_plugin_task.py index 08a5cfce79..e91ce07be3 100644 --- a/api/schedule/check_upgradable_plugin_task.py +++ b/api/schedule/check_upgradable_plugin_task.py @@ -1,3 +1,4 @@ +import math import time import click @@ -5,9 +6,10 @@ import click import app from extensions.ext_database import db from models.account import TenantPluginAutoUpgradeStrategy -from tasks.process_tenant_plugin_autoupgrade_check_task import process_tenant_plugin_autoupgrade_check_task +from tasks import process_tenant_plugin_autoupgrade_check_task as check_task AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes +MAX_CONCURRENT_CHECK_TASKS = 20 @app.celery.task(queue="plugin") @@ -30,15 +32,29 @@ def check_upgradable_plugin_task(): .all() ) - for strategy in strategies: - process_tenant_plugin_autoupgrade_check_task.delay( - strategy.tenant_id, - strategy.strategy_setting, - strategy.upgrade_time_of_day, - strategy.upgrade_mode, - strategy.exclude_plugins, - strategy.include_plugins, - ) + total_strategies = len(strategies) + click.echo(click.style(f"Total strategies: {total_strategies}", fg="green")) + + batch_chunk_count = math.ceil( + total_strategies / MAX_CONCURRENT_CHECK_TASKS + ) # make sure all strategies are checked in this interval + batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0 + + for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS): + batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS] + for strategy in batch_strategies: + check_task.process_tenant_plugin_autoupgrade_check_task.delay( + strategy.tenant_id, + strategy.strategy_setting, + strategy.upgrade_time_of_day, + strategy.upgrade_mode, + strategy.exclude_plugins, + strategy.include_plugins, + ) + + # Only sleep if batch_interval_time > 0.0001 AND current batch is not the last one + if batch_interval_time > 0.0001 and i + MAX_CONCURRENT_CHECK_TASKS < total_strategies: + time.sleep(batch_interval_time) end_at = time.perf_counter() click.echo( diff --git a/api/schedule/clean_workflow_runlogs_precise.py b/api/schedule/clean_workflow_runlogs_precise.py index 485a79782c..db4198720d 100644 --- a/api/schedule/clean_workflow_runlogs_precise.py +++ b/api/schedule/clean_workflow_runlogs_precise.py @@ -1,8 +1,11 @@ import datetime import logging import time +from collections.abc import Sequence import click +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker import app from configs import dify_config @@ -35,50 +38,53 @@ def clean_workflow_runlogs_precise(): retention_days = dify_config.WORKFLOW_LOG_RETENTION_DAYS cutoff_date = datetime.datetime.now() - datetime.timedelta(days=retention_days) + session_factory = sessionmaker(db.engine, expire_on_commit=False) try: - total_workflow_runs = db.session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count() - if total_workflow_runs == 0: - logger.info("No expired workflow run logs found") - return - logger.info("Found %s expired workflow run logs to clean", total_workflow_runs) + with session_factory.begin() as session: + total_workflow_runs = session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count() + if total_workflow_runs == 0: + logger.info("No expired workflow run logs found") + return + logger.info("Found %s expired workflow run logs to clean", total_workflow_runs) total_deleted = 0 failed_batches = 0 batch_count = 0 - while True: - workflow_runs = ( - db.session.query(WorkflowRun.id).where(WorkflowRun.created_at < cutoff_date).limit(BATCH_SIZE).all() - ) + with session_factory.begin() as session: + workflow_run_ids = session.scalars( + select(WorkflowRun.id) + .where(WorkflowRun.created_at < cutoff_date) + .order_by(WorkflowRun.created_at, WorkflowRun.id) + .limit(BATCH_SIZE) + ).all() - if not workflow_runs: - break - - workflow_run_ids = [run.id for run in workflow_runs] - batch_count += 1 - - success = _delete_batch_with_retry(workflow_run_ids, failed_batches) - - if success: - total_deleted += len(workflow_run_ids) - failed_batches = 0 - else: - failed_batches += 1 - if failed_batches >= MAX_RETRIES: - logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES) + if not workflow_run_ids: break + + batch_count += 1 + + success = _delete_batch(session, workflow_run_ids, failed_batches) + + if success: + total_deleted += len(workflow_run_ids) + failed_batches = 0 else: - # Calculate incremental delay times: 5, 10, 15 minutes - retry_delay_minutes = failed_batches * 5 - logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes) - time.sleep(retry_delay_minutes * 60) - continue + failed_batches += 1 + if failed_batches >= MAX_RETRIES: + logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES) + break + else: + # Calculate incremental delay times: 5, 10, 15 minutes + retry_delay_minutes = failed_batches * 5 + logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes) + time.sleep(retry_delay_minutes * 60) + continue logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted) except Exception: - db.session.rollback() logger.exception("Unexpected error in workflow log cleanup") raise @@ -87,69 +93,56 @@ def clean_workflow_runlogs_precise(): click.echo(click.style(f"Cleaned workflow run logs from db success latency: {execution_time:.2f}s", fg="green")) -def _delete_batch_with_retry(workflow_run_ids: list[str], attempt_count: int) -> bool: - """Delete a single batch with a retry mechanism and complete cascading deletion""" +def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_count: int) -> bool: + """Delete a single batch of workflow runs and all related data within a nested transaction.""" try: - with db.session.begin_nested(): + with session.begin_nested(): message_data = ( - db.session.query(Message.id, Message.conversation_id) + session.query(Message.id, Message.conversation_id) .where(Message.workflow_run_id.in_(workflow_run_ids)) .all() ) message_id_list = [msg.id for msg in message_data] conversation_id_list = list({msg.conversation_id for msg in message_data if msg.conversation_id}) if message_id_list: - db.session.query(AppAnnotationHitHistory).where( - AppAnnotationHitHistory.message_id.in_(message_id_list) - ).delete(synchronize_session=False) + message_related_models = [ + AppAnnotationHitHistory, + MessageAgentThought, + MessageChain, + MessageFile, + MessageAnnotation, + MessageFeedback, + ] + for model in message_related_models: + session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore + # error: "DeclarativeAttributeIntercept" has no attribute "message_id". But this type is only in lib + # and these 6 types all have the message_id field. - db.session.query(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_id_list)).delete( + session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete( synchronize_session=False ) - db.session.query(MessageChain).where(MessageChain.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageFile).where(MessageFile.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageFeedback).where(MessageFeedback.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete( - synchronize_session=False - ) - - db.session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete( + session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete( synchronize_session=False ) - db.session.query(WorkflowNodeExecutionModel).where( + session.query(WorkflowNodeExecutionModel).where( WorkflowNodeExecutionModel.workflow_run_id.in_(workflow_run_ids) ).delete(synchronize_session=False) if conversation_id_list: - db.session.query(ConversationVariable).where( + session.query(ConversationVariable).where( ConversationVariable.conversation_id.in_(conversation_id_list) ).delete(synchronize_session=False) - db.session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete( + session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete( synchronize_session=False ) - db.session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False) + session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False) - db.session.commit() - return True + return True except Exception: - db.session.rollback() logger.exception("Batch deletion failed (attempt %s)", attempt_count + 1) return False diff --git a/api/schedule/mail_clean_document_notify_task.py b/api/schedule/mail_clean_document_notify_task.py index ef6edd6709..b70707b17e 100644 --- a/api/schedule/mail_clean_document_notify_task.py +++ b/api/schedule/mail_clean_document_notify_task.py @@ -10,7 +10,7 @@ from configs import dify_config from extensions.ext_database import db from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service -from models.account import Account, Tenant, TenantAccountJoin +from models import Account, Tenant, TenantAccountJoin from models.dataset import Dataset, DatasetAutoDisableLog from services.feature_service import FeatureService diff --git a/api/services/account_service.py b/api/services/account_service.py index 0e699d16da..106bc0e77e 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -127,7 +127,7 @@ class AccountService: if not account: return None - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise Unauthorized("Account is banned.") current_tenant = db.session.query(TenantAccountJoin).filter_by(account_id=account.id, current=True).first() @@ -178,7 +178,7 @@ class AccountService: if not account: raise AccountPasswordError("Invalid email or password.") - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise AccountLoginError("Account is banned.") if password and invite_token and account.password is None: @@ -193,8 +193,8 @@ class AccountService: if account.password is None or not compare_password(password, account.password, account.password_salt): raise AccountPasswordError("Invalid email or password.") - if account.status == AccountStatus.PENDING.value: - account.status = AccountStatus.ACTIVE.value + if account.status == AccountStatus.PENDING: + account.status = AccountStatus.ACTIVE account.initialized_at = naive_utc_now() db.session.commit() @@ -246,10 +246,8 @@ class AccountService: ) ) - account = Account() - account.email = email - account.name = name - + password_to_set = None + salt_to_set = None if password: valid_password(password) @@ -261,14 +259,18 @@ class AccountService: password_hashed = hash_password(password, salt) base64_password_hashed = base64.b64encode(password_hashed).decode() - account.password = base64_password_hashed - account.password_salt = base64_salt + password_to_set = base64_password_hashed + salt_to_set = base64_salt - account.interface_language = interface_language - account.interface_theme = interface_theme - - # Set timezone based on language - account.timezone = language_timezone_mapping.get(interface_language, "UTC") + account = Account( + name=name, + email=email, + password=password_to_set, + password_salt=salt_to_set, + interface_language=interface_language, + interface_theme=interface_theme, + timezone=language_timezone_mapping.get(interface_language, "UTC"), + ) db.session.add(account) db.session.commit() @@ -355,7 +357,7 @@ class AccountService: @staticmethod def close_account(account: Account): """Close account""" - account.status = AccountStatus.CLOSED.value + account.status = AccountStatus.CLOSED db.session.commit() @staticmethod @@ -395,8 +397,8 @@ class AccountService: if ip_address: AccountService.update_login_info(account=account, ip_address=ip_address) - if account.status == AccountStatus.PENDING.value: - account.status = AccountStatus.ACTIVE.value + if account.status == AccountStatus.PENDING: + account.status = AccountStatus.ACTIVE db.session.commit() access_token = AccountService.get_account_jwt_token(account=account) @@ -764,7 +766,7 @@ class AccountService: if not account: return None - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise Unauthorized("Account is banned.") return account @@ -1028,7 +1030,7 @@ class TenantService: @staticmethod def create_tenant_member(tenant: Tenant, account: Account, role: str = "normal") -> TenantAccountJoin: """Create tenant member""" - if role == TenantAccountRole.OWNER.value: + if role == TenantAccountRole.OWNER: if TenantService.has_roles(tenant, [TenantAccountRole.OWNER]): logger.error("Tenant %s has already an owner.", tenant.id) raise Exception("Tenant already has an owner.") @@ -1313,7 +1315,7 @@ class RegisterService: password=password, is_setup=is_setup, ) - account.status = AccountStatus.ACTIVE.value if not status else status.value + account.status = status or AccountStatus.ACTIVE account.initialized_at = naive_utc_now() if open_id is not None and provider is not None: @@ -1374,7 +1376,7 @@ class RegisterService: TenantService.create_tenant_member(tenant, account, role) # Support resend invitation email when the account is pending status - if account.status != AccountStatus.PENDING.value: + if account.status != AccountStatus.PENDING: raise AccountAlreadyInTenantError("Account already in tenant.") token = cls.generate_invite_token(tenant, account) diff --git a/api/services/agent_service.py b/api/services/agent_service.py index d631ce812f..b2db895a5a 100644 --- a/api/services/agent_service.py +++ b/api/services/agent_service.py @@ -10,7 +10,7 @@ from core.plugin.impl.exc import PluginDaemonClientSideError from core.tools.tool_manager import ToolManager from extensions.ext_database import db from libs.login import current_user -from models.account import Account +from models import Account from models.model import App, Conversation, EndUser, Message, MessageAgentThought diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index 9feca7337f..c0d26cdd27 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -8,8 +8,7 @@ from werkzeug.exceptions import NotFound from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now -from libs.login import current_user -from models.account import Account +from libs.login import current_account_with_tenant from models.model import App, AppAnnotationHitHistory, AppAnnotationSetting, Message, MessageAnnotation from services.feature_service import FeatureService from tasks.annotation.add_annotation_to_index_task import add_annotation_to_index_task @@ -24,10 +23,10 @@ class AppAnnotationService: @classmethod def up_insert_app_annotation_from_message(cls, args: dict, app_id: str) -> MessageAnnotation: # get app info - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -63,12 +62,12 @@ class AppAnnotationService: db.session.commit() # if annotation reply is enabled , add annotation to index annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() - assert current_user.current_tenant_id is not None + assert current_tenant_id is not None if annotation_setting: add_annotation_to_index_task.delay( annotation.id, args["question"], - current_user.current_tenant_id, + current_tenant_id, app_id, annotation_setting.collection_binding_id, ) @@ -86,13 +85,12 @@ class AppAnnotationService: enable_app_annotation_job_key = f"enable_app_annotation_job_{str(job_id)}" # send batch add segments task redis_client.setnx(enable_app_annotation_job_key, "waiting") - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() enable_annotation_reply_task.delay( str(job_id), app_id, current_user.id, - current_user.current_tenant_id, + current_tenant_id, args["score_threshold"], args["embedding_provider_name"], args["embedding_model_name"], @@ -101,8 +99,7 @@ class AppAnnotationService: @classmethod def disable_app_annotation(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() disable_app_annotation_key = f"disable_app_annotation_{str(app_id)}" cache_result = redis_client.get(disable_app_annotation_key) if cache_result is not None: @@ -113,17 +110,16 @@ class AppAnnotationService: disable_app_annotation_job_key = f"disable_app_annotation_job_{str(job_id)}" # send batch add segments task redis_client.setnx(disable_app_annotation_job_key, "waiting") - disable_annotation_reply_task.delay(str(job_id), app_id, current_user.current_tenant_id) + disable_annotation_reply_task.delay(str(job_id), app_id, current_tenant_id) return {"job_id": job_id, "job_status": "waiting"} @classmethod def get_annotation_list_by_app_id(cls, app_id: str, page: int, limit: int, keyword: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -153,11 +149,10 @@ class AppAnnotationService: @classmethod def export_annotation_list_by_app_id(cls, app_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -174,11 +169,10 @@ class AppAnnotationService: @classmethod def insert_app_annotation_directly(cls, args: dict, app_id: str) -> MessageAnnotation: # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -196,7 +190,7 @@ class AppAnnotationService: add_annotation_to_index_task.delay( annotation.id, args["question"], - current_user.current_tenant_id, + current_tenant_id, app_id, annotation_setting.collection_binding_id, ) @@ -205,11 +199,10 @@ class AppAnnotationService: @classmethod def update_app_annotation_directly(cls, args: dict, app_id: str, annotation_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -234,7 +227,7 @@ class AppAnnotationService: update_annotation_to_index_task.delay( annotation.id, annotation.question, - current_user.current_tenant_id, + current_tenant_id, app_id, app_annotation_setting.collection_binding_id, ) @@ -244,11 +237,10 @@ class AppAnnotationService: @classmethod def delete_app_annotation(cls, app_id: str, annotation_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -277,17 +269,16 @@ class AppAnnotationService: if app_annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, app_annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, app_annotation_setting.collection_binding_id ) @classmethod def delete_app_annotations_in_batch(cls, app_id: str, annotation_ids: list[str]): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -317,7 +308,7 @@ class AppAnnotationService: for annotation, annotation_setting in annotations_to_delete: if annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, annotation_setting.collection_binding_id ) # Step 4: Bulk delete annotations in a single query @@ -333,11 +324,10 @@ class AppAnnotationService: @classmethod def batch_import_app_annotations(cls, app_id, file: FileStorage): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -354,7 +344,7 @@ class AppAnnotationService: if len(result) == 0: raise ValueError("The CSV file is empty.") # check annotation limit - features = FeatureService.get_features(current_user.current_tenant_id) + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: annotation_quota_limit = features.annotation_quota_limit if annotation_quota_limit.limit < len(result) + annotation_quota_limit.size: @@ -364,21 +354,18 @@ class AppAnnotationService: indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" # send batch add segments task redis_client.setnx(indexing_cache_key, "waiting") - batch_import_annotations_task.delay( - str(job_id), result, app_id, current_user.current_tenant_id, current_user.id - ) + batch_import_annotations_task.delay(str(job_id), result, app_id, current_tenant_id, current_user.id) except Exception as e: return {"error_msg": str(e)} return {"job_id": job_id, "job_status": "waiting"} @classmethod def get_annotation_hit_histories(cls, app_id: str, annotation_id: str, page, limit): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -445,12 +432,11 @@ class AppAnnotationService: @classmethod def get_app_annotation_setting_by_app_id(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -481,12 +467,11 @@ class AppAnnotationService: @classmethod def update_app_annotation_setting(cls, app_id: str, annotation_setting_id: str, args: dict): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -531,11 +516,10 @@ class AppAnnotationService: @classmethod def clear_all_annotations(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -558,7 +542,7 @@ class AppAnnotationService: # if annotation reply is enabled, delete annotation index if app_annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, app_annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, app_annotation_setting.collection_binding_id ) db.session.delete(annotation) diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index 8701fe4f4e..e2915ebfbb 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -29,6 +29,7 @@ from core.workflow.nodes.tool.entities import ToolNodeData from events.app_event import app_model_config_was_updated, app_was_created from extensions.ext_redis import redis_client from factories import variable_factory +from libs.datetime_utils import naive_utc_now from models import Account, App, AppMode from models.model import AppModelConfig from models.workflow import Workflow @@ -439,6 +440,7 @@ class AppDslService: app.icon = icon app.icon_background = icon_background or app_data.get("icon_background", app.icon_background) app.updated_by = account.id + app.updated_at = naive_utc_now() else: if account.current_tenant_id is None: raise ValueError("Current tenant is not set") @@ -494,7 +496,7 @@ class AppDslService: unique_hash = None graph = workflow_data.get("graph", {}) for node in graph.get("nodes", []): - if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL.value: + if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL: dataset_ids = node["data"].get("dataset_ids", []) node["data"]["dataset_ids"] = [ decrypted_id @@ -584,17 +586,17 @@ class AppDslService: if not node_data: continue data_type = node_data.get("type", "") - if data_type == NodeType.KNOWLEDGE_RETRIEVAL.value: + if data_type == NodeType.KNOWLEDGE_RETRIEVAL: dataset_ids = node_data.get("dataset_ids", []) node_data["dataset_ids"] = [ cls.encrypt_dataset_id(dataset_id=dataset_id, tenant_id=app_model.tenant_id) for dataset_id in dataset_ids ] # filter credential id from tool node - if not include_secret and data_type == NodeType.TOOL.value: + if not include_secret and data_type == NodeType.TOOL: node_data.pop("credential_id", None) # filter credential id from agent node - if not include_secret and data_type == NodeType.AGENT.value: + if not include_secret and data_type == NodeType.AGENT: for tool in node_data.get("agent_parameters", {}).get("tools", {}).get("value", []): tool.pop("credential_id", None) @@ -658,32 +660,32 @@ class AppDslService: try: typ = node.get("data", {}).get("type") match typ: - case NodeType.TOOL.value: - tool_entity = ToolNodeData(**node["data"]) + case NodeType.TOOL: + tool_entity = ToolNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_tool_dependency(tool_entity.provider_id), ) - case NodeType.LLM.value: - llm_entity = LLMNodeData(**node["data"]) + case NodeType.LLM: + llm_entity = LLMNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency(llm_entity.model.provider), ) - case NodeType.QUESTION_CLASSIFIER.value: - question_classifier_entity = QuestionClassifierNodeData(**node["data"]) + case NodeType.QUESTION_CLASSIFIER: + question_classifier_entity = QuestionClassifierNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency( question_classifier_entity.model.provider ), ) - case NodeType.PARAMETER_EXTRACTOR.value: - parameter_extractor_entity = ParameterExtractorNodeData(**node["data"]) + case NodeType.PARAMETER_EXTRACTOR: + parameter_extractor_entity = ParameterExtractorNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency( parameter_extractor_entity.model.provider ), ) - case NodeType.KNOWLEDGE_RETRIEVAL.value: - knowledge_retrieval_entity = KnowledgeRetrievalNodeData(**node["data"]) + case NodeType.KNOWLEDGE_RETRIEVAL: + knowledge_retrieval_entity = KnowledgeRetrievalNodeData.model_validate(node["data"]) if knowledge_retrieval_entity.retrieval_mode == "multiple": if knowledge_retrieval_entity.multiple_retrieval_config: if ( @@ -773,7 +775,7 @@ class AppDslService: """ Returns the leaked dependencies in current workspace """ - dependencies = [PluginDependency(**dep) for dep in dsl_dependencies] + dependencies = [PluginDependency.model_validate(dep) for dep in dsl_dependencies] if not dependencies: return [] diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 8911da4728..b462ddf236 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -2,8 +2,6 @@ import uuid from collections.abc import Generator, Mapping from typing import Any, Union -from openai._exceptions import RateLimitError - from configs import dify_config from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator @@ -122,8 +120,6 @@ class AppGenerateService: ) else: raise ValueError(f"Invalid app mode {app_model.mode}") - except RateLimitError as e: - raise InvokeRateLimitError(str(e)) except Exception: rate_limit.exit(request_id) raise diff --git a/api/services/app_service.py b/api/services/app_service.py index 4fc6cf2494..5f8c5089c9 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -18,7 +18,7 @@ from events.app_event import app_was_created from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account +from models import Account from models.model import App, AppMode, AppModelConfig, Site from models.tools import ApiToolProvider from services.billing_service import BillingService diff --git a/api/services/auth/api_key_auth_service.py b/api/services/auth/api_key_auth_service.py index 055cf65816..56aaf407ee 100644 --- a/api/services/auth/api_key_auth_service.py +++ b/api/services/auth/api_key_auth_service.py @@ -26,10 +26,9 @@ class ApiKeyAuthService: api_key = encrypter.encrypt_token(tenant_id, args["credentials"]["config"]["api_key"]) args["credentials"]["config"]["api_key"] = api_key - data_source_api_key_binding = DataSourceApiKeyAuthBinding() - data_source_api_key_binding.tenant_id = tenant_id - data_source_api_key_binding.category = args["category"] - data_source_api_key_binding.provider = args["provider"] + data_source_api_key_binding = DataSourceApiKeyAuthBinding( + tenant_id=tenant_id, category=args["category"], provider=args["provider"] + ) data_source_api_key_binding.credentials = json.dumps(args["credentials"], ensure_ascii=False) db.session.add(data_source_api_key_binding) db.session.commit() @@ -48,6 +47,8 @@ class ApiKeyAuthService: ) if not data_source_api_key_bindings: return None + if not data_source_api_key_bindings.credentials: + return None credentials = json.loads(data_source_api_key_bindings.credentials) return credentials diff --git a/api/services/billing_service.py b/api/services/billing_service.py index 9d6c5b4b31..a6851d2638 100644 --- a/api/services/billing_service.py +++ b/api/services/billing_service.py @@ -7,7 +7,7 @@ from tenacity import retry, retry_if_exception_type, stop_before_delay, wait_fix from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.helper import RateLimiter -from models.account import Account, TenantAccountJoin, TenantAccountRole +from models import Account, TenantAccountJoin, TenantAccountRole class BillingService: diff --git a/api/services/conversation_service.py b/api/services/conversation_service.py index a8e51a426d..39d6c81621 100644 --- a/api/services/conversation_service.py +++ b/api/services/conversation_service.py @@ -14,8 +14,7 @@ from extensions.ext_database import db from factories import variable_factory from libs.datetime_utils import naive_utc_now from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models import ConversationVariable -from models.account import Account +from models import Account, ConversationVariable from models.model import App, Conversation, EndUser, Message from services.errors.conversation import ( ConversationNotExistsError, diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index c9dd78ddd1..f4047da6b8 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -29,7 +29,7 @@ from extensions.ext_redis import redis_client from libs import helper from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account, TenantAccountRole +from models import Account, TenantAccountRole from models.dataset import ( AppDatasetJoin, ChildChunk, @@ -93,7 +93,7 @@ logger = logging.getLogger(__name__) class DatasetService: @staticmethod def get_datasets(page, per_page, tenant_id=None, user=None, search=None, tag_ids=None, include_all=False): - query = select(Dataset).where(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc()) + query = select(Dataset).where(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc(), Dataset.id) if user: # get permitted dataset ids @@ -1470,7 +1470,7 @@ class DocumentService: dataset.collection_binding_id = dataset_collection_binding.id if not dataset.retrieval_model: default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -1752,7 +1752,7 @@ class DocumentService: # dataset.collection_binding_id = dataset_collection_binding.id # if not dataset.retrieval_model: # default_retrieval_model = { - # "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + # "search_method": RetrievalMethod.SEMANTIC_SEARCH, # "reranking_enable": False, # "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, # "top_k": 2, @@ -2205,7 +2205,7 @@ class DocumentService: retrieval_model = knowledge_config.retrieval_model else: retrieval_model = RetrievalModel( - search_method=RetrievalMethod.SEMANTIC_SEARCH.value, + search_method=RetrievalMethod.SEMANTIC_SEARCH, reranking_enable=False, reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""), top_k=4, diff --git a/api/services/datasource_provider_service.py b/api/services/datasource_provider_service.py index 89a5d89f61..fcb6ab1d40 100644 --- a/api/services/datasource_provider_service.py +++ b/api/services/datasource_provider_service.py @@ -3,7 +3,6 @@ import time from collections.abc import Mapping from typing import Any -from flask_login import current_user from sqlalchemy.orm import Session from configs import dify_config @@ -18,6 +17,7 @@ from core.tools.entities.tool_entities import CredentialType from core.tools.utils.encryption import ProviderConfigCache, ProviderConfigEncrypter, create_provider_encrypter from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.login import current_account_with_tenant from models.oauth import DatasourceOauthParamConfig, DatasourceOauthTenantParamConfig, DatasourceProvider from models.provider_ids import DatasourceProviderID from services.plugin.plugin_service import PluginService @@ -93,6 +93,8 @@ class DatasourceProviderService: """ get credential by id """ + current_user, _ = current_account_with_tenant() + with Session(db.engine) as session: if credential_id: datasource_provider = ( @@ -157,6 +159,8 @@ class DatasourceProviderService: """ get all datasource credentials by provider """ + current_user, _ = current_account_with_tenant() + with Session(db.engine) as session: datasource_providers = ( session.query(DatasourceProvider) @@ -604,6 +608,8 @@ class DatasourceProviderService: """ provider_name = provider_id.provider_name plugin_id = provider_id.plugin_id + current_user, _ = current_account_with_tenant() + with Session(db.engine) as session: lock = f"datasource_provider_create_lock:{tenant_id}_{provider_id}_{CredentialType.API_KEY}" with redis_client.lock(lock, timeout=20): @@ -646,7 +652,7 @@ class DatasourceProviderService: name=db_provider_name, provider=provider_name, plugin_id=plugin_id, - auth_type=CredentialType.API_KEY.value, + auth_type=CredentialType.API_KEY, encrypted_credentials=credentials, ) session.add(datasource_provider) @@ -674,7 +680,7 @@ class DatasourceProviderService: secret_input_form_variables = [] for credential_form_schema in credential_form_schemas: - if credential_form_schema.type.value == FormType.SECRET_INPUT.value: + if credential_form_schema.type.value == FormType.SECRET_INPUT: secret_input_form_variables.append(credential_form_schema.name) return secret_input_form_variables @@ -901,6 +907,8 @@ class DatasourceProviderService: """ update datasource credentials. """ + current_user, _ = current_account_with_tenant() + with Session(db.engine) as session: datasource_provider = ( session.query(DatasourceProvider) diff --git a/api/services/enterprise/base.py b/api/services/enterprise/base.py index edb76408e8..bdc960aa2d 100644 --- a/api/services/enterprise/base.py +++ b/api/services/enterprise/base.py @@ -1,10 +1,12 @@ import os +from collections.abc import Mapping +from typing import Any -import requests +import httpx class BaseRequest: - proxies = { + proxies: Mapping[str, str] | None = { "http": "", "https": "", } @@ -13,10 +15,31 @@ class BaseRequest: secret_key_header = "" @classmethod - def send_request(cls, method, endpoint, json=None, params=None): + def _build_mounts(cls) -> dict[str, httpx.BaseTransport] | None: + if not cls.proxies: + return None + + mounts: dict[str, httpx.BaseTransport] = {} + for scheme, value in cls.proxies.items(): + if not value: + continue + key = f"{scheme}://" if not scheme.endswith("://") else scheme + mounts[key] = httpx.HTTPTransport(proxy=value) + return mounts or None + + @classmethod + def send_request( + cls, + method: str, + endpoint: str, + json: Any | None = None, + params: Mapping[str, Any] | None = None, + ) -> Any: headers = {"Content-Type": "application/json", cls.secret_key_header: cls.secret_key} url = f"{cls.base_url}{endpoint}" - response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies) + mounts = cls._build_mounts() + with httpx.Client(mounts=mounts) as client: + response = client.request(method, url, json=json, params=params, headers=headers) return response.json() diff --git a/api/services/enterprise/enterprise_service.py b/api/services/enterprise/enterprise_service.py index f8612456d6..4fbf33fd6f 100644 --- a/api/services/enterprise/enterprise_service.py +++ b/api/services/enterprise/enterprise_service.py @@ -70,7 +70,7 @@ class EnterpriseService: data = EnterpriseRequest.send_request("GET", "/webapp/access-mode/id", params=params) if not data: raise ValueError("No data found.") - return WebAppSettings(**data) + return WebAppSettings.model_validate(data) @classmethod def batch_get_app_access_mode_by_id(cls, app_ids: list[str]) -> dict[str, WebAppSettings]: @@ -100,7 +100,7 @@ class EnterpriseService: data = EnterpriseRequest.send_request("GET", "/webapp/access-mode/code", params=params) if not data: raise ValueError("No data found.") - return WebAppSettings(**data) + return WebAppSettings.model_validate(data) @classmethod def update_app_access_mode(cls, app_id: str, access_mode: str): diff --git a/api/services/entities/knowledge_entities/knowledge_entities.py b/api/services/entities/knowledge_entities/knowledge_entities.py index 33f65bde58..b9a210740d 100644 --- a/api/services/entities/knowledge_entities/knowledge_entities.py +++ b/api/services/entities/knowledge_entities/knowledge_entities.py @@ -3,6 +3,8 @@ from typing import Literal from pydantic import BaseModel +from core.rag.retrieval.retrieval_methods import RetrievalMethod + class ParentMode(StrEnum): FULL_DOC = "full-doc" @@ -95,7 +97,7 @@ class WeightModel(BaseModel): class RetrievalModel(BaseModel): - search_method: Literal["hybrid_search", "semantic_search", "full_text_search", "keyword_search"] + search_method: RetrievalMethod reranking_enable: bool reranking_model: RerankingModel | None = None reranking_mode: str | None = None diff --git a/api/services/entities/knowledge_entities/rag_pipeline_entities.py b/api/services/entities/knowledge_entities/rag_pipeline_entities.py index 860bfde401..a97ccab914 100644 --- a/api/services/entities/knowledge_entities/rag_pipeline_entities.py +++ b/api/services/entities/knowledge_entities/rag_pipeline_entities.py @@ -2,6 +2,8 @@ from typing import Literal from pydantic import BaseModel, field_validator +from core.rag.retrieval.retrieval_methods import RetrievalMethod + class IconInfo(BaseModel): icon: str @@ -83,7 +85,7 @@ class RetrievalSetting(BaseModel): Retrieval Setting. """ - search_method: Literal["semantic_search", "full_text_search", "keyword_search", "hybrid_search"] + search_method: RetrievalMethod top_k: int score_threshold: float | None = 0.5 score_threshold_enabled: bool = False diff --git a/api/services/entities/model_provider_entities.py b/api/services/entities/model_provider_entities.py index 49d48f044c..d07badefa7 100644 --- a/api/services/entities/model_provider_entities.py +++ b/api/services/entities/model_provider_entities.py @@ -1,6 +1,7 @@ -from enum import Enum +from collections.abc import Sequence +from enum import StrEnum -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, model_validator from configs import dify_config from core.entities.model_entities import ( @@ -26,7 +27,7 @@ from core.model_runtime.entities.provider_entities import ( from models.provider import ProviderType -class CustomConfigurationStatus(Enum): +class CustomConfigurationStatus(StrEnum): """ Enum class for custom configuration status. """ @@ -71,7 +72,7 @@ class ProviderResponse(BaseModel): icon_large: I18nObject | None = None background: str | None = None help: ProviderHelpEntity | None = None - supported_model_types: list[ModelType] + supported_model_types: Sequence[ModelType] configurate_methods: list[ConfigurateMethod] provider_credential_schema: ProviderCredentialSchema | None = None model_credential_schema: ModelCredentialSchema | None = None @@ -82,9 +83,8 @@ class ProviderResponse(BaseModel): # pydantic configs model_config = ConfigDict(protected_namespaces=()) - def __init__(self, **data): - super().__init__(**data) - + @model_validator(mode="after") + def _(self): url_prefix = ( dify_config.CONSOLE_API_URL + f"/console/api/workspaces/{self.tenant_id}/model-providers/{self.provider}" ) @@ -97,6 +97,7 @@ class ProviderResponse(BaseModel): self.icon_large = I18nObject( en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" ) + return self class ProviderWithModelsResponse(BaseModel): @@ -112,9 +113,8 @@ class ProviderWithModelsResponse(BaseModel): status: CustomConfigurationStatus models: list[ProviderModelWithStatusEntity] - def __init__(self, **data): - super().__init__(**data) - + @model_validator(mode="after") + def _(self): url_prefix = ( dify_config.CONSOLE_API_URL + f"/console/api/workspaces/{self.tenant_id}/model-providers/{self.provider}" ) @@ -127,6 +127,7 @@ class ProviderWithModelsResponse(BaseModel): self.icon_large = I18nObject( en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" ) + return self class SimpleProviderEntityResponse(SimpleProviderEntity): @@ -136,9 +137,8 @@ class SimpleProviderEntityResponse(SimpleProviderEntity): tenant_id: str - def __init__(self, **data): - super().__init__(**data) - + @model_validator(mode="after") + def _(self): url_prefix = ( dify_config.CONSOLE_API_URL + f"/console/api/workspaces/{self.tenant_id}/model-providers/{self.provider}" ) @@ -151,6 +151,7 @@ class SimpleProviderEntityResponse(SimpleProviderEntity): self.icon_large = I18nObject( en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" ) + return self class DefaultModelResponse(BaseModel): diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index b6ba3bafea..5cd3b471f9 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -88,9 +88,9 @@ class ExternalDatasetService: else: raise ValueError(f"invalid endpoint: {endpoint}") try: - response = httpx.post(endpoint, headers={"Authorization": f"Bearer {api_key}"}) - except Exception: - raise ValueError(f"failed to connect to the endpoint: {endpoint}") + response = ssrf_proxy.post(endpoint, headers={"Authorization": f"Bearer {api_key}"}) + except Exception as e: + raise ValueError(f"failed to connect to the endpoint: {endpoint}") from e if response.status_code == 502: raise ValueError(f"Bad Gateway: failed to connect to the endpoint: {endpoint}") if response.status_code == 404: diff --git a/api/services/file_service.py b/api/services/file_service.py index f0bb68766d..dd6a829ea2 100644 --- a/api/services/file_service.py +++ b/api/services/file_service.py @@ -19,7 +19,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from extensions.ext_storage import storage from libs.datetime_utils import naive_utc_now from libs.helper import extract_tenant_id -from models.account import Account +from models import Account from models.enums import CreatorUserRole from models.model import EndUser, UploadFile diff --git a/api/services/hit_testing_service.py b/api/services/hit_testing_service.py index 00ec3babf3..7fa82c6d22 100644 --- a/api/services/hit_testing_service.py +++ b/api/services/hit_testing_service.py @@ -9,13 +9,13 @@ from core.rag.models.document import Document from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db -from models.account import Account +from models import Account from models.dataset import Dataset, DatasetQuery logger = logging.getLogger(__name__) default_retrieval_model = { - "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, + "search_method": RetrievalMethod.SEMANTIC_SEARCH, "reranking_enable": False, "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""}, "top_k": 4, @@ -46,7 +46,7 @@ class HitTestingService: from core.app.app_config.entities import MetadataFilteringCondition - metadata_filtering_conditions = MetadataFilteringCondition(**metadata_filtering_conditions) + metadata_filtering_conditions = MetadataFilteringCondition.model_validate(metadata_filtering_conditions) metadata_filter_document_ids, metadata_condition = dataset_retrieval.get_metadata_filter_condition( dataset_ids=[dataset.id], @@ -63,7 +63,7 @@ class HitTestingService: if metadata_condition and not document_ids_filter: return cls.compact_retrieve_response(query, []) all_documents = RetrievalService.retrieve( - retrieval_method=retrieval_model.get("search_method", "semantic_search"), + retrieval_method=RetrievalMethod(retrieval_model.get("search_method", RetrievalMethod.SEMANTIC_SEARCH)), dataset_id=dataset.id, query=query, top_k=retrieval_model.get("top_k", 4), diff --git a/api/services/message_service.py b/api/services/message_service.py index 5e356bf925..9fdff18622 100644 --- a/api/services/message_service.py +++ b/api/services/message_service.py @@ -12,7 +12,7 @@ from core.ops.ops_trace_manager import TraceQueueManager, TraceTask from core.ops.utils import measure_time from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, AppMode, AppModelConfig, EndUser, Message, MessageFeedback from services.conversation_service import ConversationService from services.errors.message import ( diff --git a/api/services/metadata_service.py b/api/services/metadata_service.py index 6add830813..5f280c9e57 100644 --- a/api/services/metadata_service.py +++ b/api/services/metadata_service.py @@ -1,12 +1,11 @@ import copy import logging -from flask_login import current_user - from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now +from libs.login import current_account_with_tenant from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding from services.dataset_service import DocumentService from services.entities.knowledge_entities.knowledge_entities import ( @@ -23,11 +22,11 @@ class MetadataService: # check if metadata name is too long if len(metadata_args.name) > 255: raise ValueError("Metadata name cannot exceed 255 characters.") - + current_user, current_tenant_id = current_account_with_tenant() # check if metadata name already exists if ( db.session.query(DatasetMetadata) - .filter_by(tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=metadata_args.name) + .filter_by(tenant_id=current_tenant_id, dataset_id=dataset_id, name=metadata_args.name) .first() ): raise ValueError("Metadata name already exists.") @@ -35,7 +34,7 @@ class MetadataService: if field.value == metadata_args.name: raise ValueError("Metadata name already exists in Built-in fields.") metadata = DatasetMetadata( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, dataset_id=dataset_id, type=metadata_args.type, name=metadata_args.name, @@ -53,9 +52,10 @@ class MetadataService: lock_key = f"dataset_metadata_lock_{dataset_id}" # check if metadata name already exists + current_user, current_tenant_id = current_account_with_tenant() if ( db.session.query(DatasetMetadata) - .filter_by(tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=name) + .filter_by(tenant_id=current_tenant_id, dataset_id=dataset_id, name=name) .first() ): raise ValueError("Metadata name already exists.") @@ -220,9 +220,10 @@ class MetadataService: db.session.commit() # deal metadata binding db.session.query(DatasetMetadataBinding).filter_by(document_id=operation.document_id).delete() + current_user, current_tenant_id = current_account_with_tenant() for metadata_value in operation.metadata_list: dataset_metadata_binding = DatasetMetadataBinding( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, dataset_id=dataset.id, document_id=operation.document_id, metadata_id=metadata_value.id, diff --git a/api/services/oauth_server.py b/api/services/oauth_server.py index b722dbee22..b05b43d76e 100644 --- a/api/services/oauth_server.py +++ b/api/services/oauth_server.py @@ -7,7 +7,7 @@ from werkzeug.exceptions import BadRequest from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account +from models import Account from models.model import OAuthProviderApp from services.account_service import AccountService diff --git a/api/services/ops_service.py b/api/services/ops_service.py index c214640653..e490b7ed3c 100644 --- a/api/services/ops_service.py +++ b/api/services/ops_service.py @@ -102,6 +102,15 @@ class OpsService: except Exception: new_decrypt_tracing_config.update({"project_url": "https://arms.console.aliyun.com/"}) + if tracing_provider == "tencent" and ( + "project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url") + ): + try: + project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider) + new_decrypt_tracing_config.update({"project_url": project_url}) + except Exception: + new_decrypt_tracing_config.update({"project_url": "https://console.cloud.tencent.com/apm"}) + trace_config_data.tracing_config = new_decrypt_tracing_config return trace_config_data.to_dict() @@ -123,7 +132,7 @@ class OpsService: config_class: type[BaseTracingConfig] = provider_config["config_class"] other_keys: list[str] = provider_config["other_keys"] - default_config_instance: BaseTracingConfig = config_class(**tracing_config) + default_config_instance = config_class.model_validate(tracing_config) for key in other_keys: if key in tracing_config and tracing_config[key] == "": tracing_config[key] = getattr(default_config_instance, key, None) @@ -144,7 +153,7 @@ class OpsService: project_url = f"{tracing_config.get('host')}/project/{project_key}" except Exception: project_url = None - elif tracing_provider in ("langsmith", "opik"): + elif tracing_provider in ("langsmith", "opik", "tencent"): try: project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) except Exception: diff --git a/api/services/plugin/plugin_migration.py b/api/services/plugin/plugin_migration.py index 99946d8fa9..dec92a6faa 100644 --- a/api/services/plugin/plugin_migration.py +++ b/api/services/plugin/plugin_migration.py @@ -242,7 +242,7 @@ class PluginMigration: if data.get("type") == "tool": provider_name = data.get("provider_name") provider_type = data.get("provider_type") - if provider_name not in excluded_providers and provider_type == ToolProviderType.BUILT_IN.value: + if provider_name not in excluded_providers and provider_type == ToolProviderType.BUILT_IN: result.append(ToolProviderID(provider_name).plugin_id) return result @@ -269,9 +269,9 @@ class PluginMigration: for tool in agent_config["tools"]: if isinstance(tool, dict): try: - tool_entity = AgentToolEntity(**tool) + tool_entity = AgentToolEntity.model_validate(tool) if ( - tool_entity.provider_type == ToolProviderType.BUILT_IN.value + tool_entity.provider_type == ToolProviderType.BUILT_IN and tool_entity.provider_id not in excluded_providers ): result.append(ToolProviderID(tool_entity.provider_id).plugin_id) diff --git a/api/services/plugin/plugin_service.py b/api/services/plugin/plugin_service.py index 604adeb7b5..525ccc9417 100644 --- a/api/services/plugin/plugin_service.py +++ b/api/services/plugin/plugin_service.py @@ -336,6 +336,8 @@ class PluginService: pkg, verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, ) + PluginService._check_plugin_installation_scope(response.verification) + return response @staticmethod @@ -358,6 +360,8 @@ class PluginService: pkg, verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, ) + PluginService._check_plugin_installation_scope(response.verification) + return response @staticmethod @@ -377,6 +381,10 @@ class PluginService: manager = PluginInstaller() + for plugin_unique_identifier in plugin_unique_identifiers: + resp = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier) + PluginService._check_plugin_installation_scope(resp.verification) + return manager.install_from_identifiers( tenant_id, plugin_unique_identifiers, @@ -393,6 +401,9 @@ class PluginService: PluginService._check_marketplace_only_permission() manager = PluginInstaller() + plugin_decode_response = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier) + PluginService._check_plugin_installation_scope(plugin_decode_response.verification) + return manager.install_from_identifiers( tenant_id, [plugin_unique_identifier], diff --git a/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py b/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py index ca871bcaa1..4ac2e0792b 100644 --- a/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py +++ b/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py @@ -1,7 +1,7 @@ import yaml -from flask_login import current_user from extensions.ext_database import db +from libs.login import current_account_with_tenant from models.dataset import PipelineCustomizedTemplate from services.rag_pipeline.pipeline_template.pipeline_template_base import PipelineTemplateRetrievalBase from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType @@ -13,9 +13,8 @@ class CustomizedPipelineTemplateRetrieval(PipelineTemplateRetrievalBase): """ def get_pipeline_templates(self, language: str) -> dict: - result = self.fetch_pipeline_templates_from_customized( - tenant_id=current_user.current_tenant_id, language=language - ) + _, current_tenant_id = current_account_with_tenant() + result = self.fetch_pipeline_templates_from_customized(tenant_id=current_tenant_id, language=language) return result def get_pipeline_template_detail(self, template_id: str): diff --git a/api/services/rag_pipeline/pipeline_template/remote/remote_retrieval.py b/api/services/rag_pipeline/pipeline_template/remote/remote_retrieval.py index 8f96842337..571ca6c7a6 100644 --- a/api/services/rag_pipeline/pipeline_template/remote/remote_retrieval.py +++ b/api/services/rag_pipeline/pipeline_template/remote/remote_retrieval.py @@ -1,6 +1,6 @@ import logging -import requests +import httpx from configs import dify_config from services.rag_pipeline.pipeline_template.database.database_retrieval import DatabasePipelineTemplateRetrieval @@ -43,7 +43,7 @@ class RemotePipelineTemplateRetrieval(PipelineTemplateRetrievalBase): """ domain = dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN url = f"{domain}/pipeline-templates/{template_id}" - response = requests.get(url, timeout=(3, 10)) + response = httpx.get(url, timeout=httpx.Timeout(10.0, connect=3.0)) if response.status_code != 200: return None data: dict = response.json() @@ -58,7 +58,7 @@ class RemotePipelineTemplateRetrieval(PipelineTemplateRetrievalBase): """ domain = dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN url = f"{domain}/pipeline-templates?language={language}" - response = requests.get(url, timeout=(3, 10)) + response = httpx.get(url, timeout=httpx.Timeout(10.0, connect=3.0)) if response.status_code != 200: raise ValueError(f"fetch pipeline templates failed, status code: {response.status_code}") diff --git a/api/services/rag_pipeline/rag_pipeline.py b/api/services/rag_pipeline/rag_pipeline.py index fdaaa73bcc..d2ba462a37 100644 --- a/api/services/rag_pipeline/rag_pipeline.py +++ b/api/services/rag_pipeline/rag_pipeline.py @@ -54,7 +54,7 @@ from core.workflow.system_variable import SystemVariable from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.dataset import ( # type: ignore Dataset, Document, @@ -358,7 +358,7 @@ class RagPipelineService: for node in nodes: if node.get("data", {}).get("type") == "knowledge-index": knowledge_configuration = node.get("data", {}) - knowledge_configuration = KnowledgeConfiguration(**knowledge_configuration) + knowledge_configuration = KnowledgeConfiguration.model_validate(knowledge_configuration) # update dataset dataset = pipeline.retrieve_dataset(session=session) @@ -873,7 +873,7 @@ class RagPipelineService: variable_pool = node_instance.graph_runtime_state.variable_pool invoke_from = variable_pool.get(["sys", SystemVariableKey.INVOKE_FROM]) if invoke_from: - if invoke_from.value == InvokeFrom.PUBLISHED.value: + if invoke_from.value == InvokeFrom.PUBLISHED: document_id = variable_pool.get(["sys", SystemVariableKey.DOCUMENT_ID]) if document_id: document = db.session.query(Document).where(Document.id == document_id.value).first() diff --git a/api/services/rag_pipeline/rag_pipeline_dsl_service.py b/api/services/rag_pipeline/rag_pipeline_dsl_service.py index f74de1bcab..c02fad4dc6 100644 --- a/api/services/rag_pipeline/rag_pipeline_dsl_service.py +++ b/api/services/rag_pipeline/rag_pipeline_dsl_service.py @@ -288,7 +288,7 @@ class RagPipelineDslService: dataset_id = None for node in nodes: if node.get("data", {}).get("type") == "knowledge-index": - knowledge_configuration = KnowledgeConfiguration(**node.get("data", {})) + knowledge_configuration = KnowledgeConfiguration.model_validate(node.get("data", {})) if ( dataset and pipeline.is_published @@ -426,7 +426,7 @@ class RagPipelineDslService: dataset_id = None for node in nodes: if node.get("data", {}).get("type") == "knowledge-index": - knowledge_configuration = KnowledgeConfiguration(**node.get("data", {})) + knowledge_configuration = KnowledgeConfiguration.model_validate(node.get("data", {})) if not dataset: dataset = Dataset( tenant_id=account.current_tenant_id, @@ -556,7 +556,7 @@ class RagPipelineDslService: graph = workflow_data.get("graph", {}) for node in graph.get("nodes", []): - if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL.value: + if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL: dataset_ids = node["data"].get("dataset_ids", []) node["data"]["dataset_ids"] = [ decrypted_id @@ -613,7 +613,7 @@ class RagPipelineDslService: tenant_id=pipeline.tenant_id, app_id=pipeline.id, features="{}", - type=WorkflowType.RAG_PIPELINE.value, + type=WorkflowType.RAG_PIPELINE, version="draft", graph=json.dumps(graph), created_by=account.id, @@ -689,17 +689,17 @@ class RagPipelineDslService: if not node_data: continue data_type = node_data.get("type", "") - if data_type == NodeType.KNOWLEDGE_RETRIEVAL.value: + if data_type == NodeType.KNOWLEDGE_RETRIEVAL: dataset_ids = node_data.get("dataset_ids", []) node["data"]["dataset_ids"] = [ self.encrypt_dataset_id(dataset_id=dataset_id, tenant_id=pipeline.tenant_id) for dataset_id in dataset_ids ] # filter credential id from tool node - if not include_secret and data_type == NodeType.TOOL.value: + if not include_secret and data_type == NodeType.TOOL: node_data.pop("credential_id", None) # filter credential id from agent node - if not include_secret and data_type == NodeType.AGENT.value: + if not include_secret and data_type == NodeType.AGENT: for tool in node_data.get("agent_parameters", {}).get("tools", {}).get("value", []): tool.pop("credential_id", None) @@ -733,36 +733,36 @@ class RagPipelineDslService: try: typ = node.get("data", {}).get("type") match typ: - case NodeType.TOOL.value: - tool_entity = ToolNodeData(**node["data"]) + case NodeType.TOOL: + tool_entity = ToolNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_tool_dependency(tool_entity.provider_id), ) - case NodeType.DATASOURCE.value: - datasource_entity = DatasourceNodeData(**node["data"]) + case NodeType.DATASOURCE: + datasource_entity = DatasourceNodeData.model_validate(node["data"]) if datasource_entity.provider_type != "local_file": dependencies.append(datasource_entity.plugin_id) - case NodeType.LLM.value: - llm_entity = LLMNodeData(**node["data"]) + case NodeType.LLM: + llm_entity = LLMNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency(llm_entity.model.provider), ) - case NodeType.QUESTION_CLASSIFIER.value: - question_classifier_entity = QuestionClassifierNodeData(**node["data"]) + case NodeType.QUESTION_CLASSIFIER: + question_classifier_entity = QuestionClassifierNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency( question_classifier_entity.model.provider ), ) - case NodeType.PARAMETER_EXTRACTOR.value: - parameter_extractor_entity = ParameterExtractorNodeData(**node["data"]) + case NodeType.PARAMETER_EXTRACTOR: + parameter_extractor_entity = ParameterExtractorNodeData.model_validate(node["data"]) dependencies.append( DependenciesAnalysisService.analyze_model_provider_dependency( parameter_extractor_entity.model.provider ), ) - case NodeType.KNOWLEDGE_INDEX.value: - knowledge_index_entity = KnowledgeConfiguration(**node["data"]) + case NodeType.KNOWLEDGE_INDEX: + knowledge_index_entity = KnowledgeConfiguration.model_validate(node["data"]) if knowledge_index_entity.indexing_technique == "high_quality": if knowledge_index_entity.embedding_model_provider: dependencies.append( @@ -782,8 +782,8 @@ class RagPipelineDslService: knowledge_index_entity.retrieval_model.reranking_model.reranking_provider_name ), ) - case NodeType.KNOWLEDGE_RETRIEVAL.value: - knowledge_retrieval_entity = KnowledgeRetrievalNodeData(**node["data"]) + case NodeType.KNOWLEDGE_RETRIEVAL: + knowledge_retrieval_entity = KnowledgeRetrievalNodeData.model_validate(node["data"]) if knowledge_retrieval_entity.retrieval_mode == "multiple": if knowledge_retrieval_entity.multiple_retrieval_config: if ( @@ -873,7 +873,7 @@ class RagPipelineDslService: """ Returns the leaked dependencies in current workspace """ - dependencies = [PluginDependency(**dep) for dep in dsl_dependencies] + dependencies = [PluginDependency.model_validate(dep) for dep in dsl_dependencies] if not dependencies: return [] @@ -927,7 +927,7 @@ class RagPipelineDslService: account = cast(Account, current_user) rag_pipeline_import_info: RagPipelineImportInfo = self.import_rag_pipeline( account=account, - import_mode=ImportMode.YAML_CONTENT.value, + import_mode=ImportMode.YAML_CONTENT, yaml_content=rag_pipeline_dataset_create_entity.yaml_content, dataset=None, dataset_name=rag_pipeline_dataset_create_entity.name, diff --git a/api/services/rag_pipeline/rag_pipeline_transform_service.py b/api/services/rag_pipeline/rag_pipeline_transform_service.py index db9508824b..d79ab71668 100644 --- a/api/services/rag_pipeline/rag_pipeline_transform_service.py +++ b/api/services/rag_pipeline/rag_pipeline_transform_service.py @@ -9,6 +9,7 @@ from flask_login import current_user from constants import DOCUMENT_EXTENSIONS from core.plugin.impl.plugin import PluginInstaller +from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from factories import variable_factory from models.dataset import Dataset, Document, DocumentPipelineExecutionLog, Pipeline @@ -149,23 +150,22 @@ class RagPipelineTransformService: file_extensions = node.get("data", {}).get("fileExtensions", []) if not file_extensions: return node - file_extensions = [file_extension.lower() for file_extension in file_extensions] - node["data"]["fileExtensions"] = DOCUMENT_EXTENSIONS + node["data"]["fileExtensions"] = [ext.lower() for ext in file_extensions if ext in DOCUMENT_EXTENSIONS] return node def _deal_knowledge_index( self, dataset: Dataset, doc_form: str, indexing_technique: str | None, retrieval_model: dict, node: dict ): knowledge_configuration_dict = node.get("data", {}) - knowledge_configuration = KnowledgeConfiguration(**knowledge_configuration_dict) + knowledge_configuration = KnowledgeConfiguration.model_validate(knowledge_configuration_dict) if indexing_technique == "high_quality": knowledge_configuration.embedding_model = dataset.embedding_model knowledge_configuration.embedding_model_provider = dataset.embedding_model_provider if retrieval_model: - retrieval_setting = RetrievalSetting(**retrieval_model) + retrieval_setting = RetrievalSetting.model_validate(retrieval_model) if indexing_technique == "economy": - retrieval_setting.search_method = "keyword_search" + retrieval_setting.search_method = RetrievalMethod.KEYWORD_SEARCH knowledge_configuration.retrieval_model = retrieval_setting else: dataset.retrieval_model = knowledge_configuration.retrieval_model.model_dump() @@ -215,7 +215,7 @@ class RagPipelineTransformService: tenant_id=pipeline.tenant_id, app_id=pipeline.id, features="{}", - type=WorkflowType.RAG_PIPELINE.value, + type=WorkflowType.RAG_PIPELINE, version="draft", graph=json.dumps(graph), created_by=current_user.id, @@ -227,7 +227,7 @@ class RagPipelineTransformService: tenant_id=pipeline.tenant_id, app_id=pipeline.id, features="{}", - type=WorkflowType.RAG_PIPELINE.value, + type=WorkflowType.RAG_PIPELINE, version=str(datetime.now(UTC).replace(tzinfo=None)), graph=json.dumps(graph), created_by=current_user.id, diff --git a/api/services/recommend_app/remote/remote_retrieval.py b/api/services/recommend_app/remote/remote_retrieval.py index 2d57769f63..b217c9026a 100644 --- a/api/services/recommend_app/remote/remote_retrieval.py +++ b/api/services/recommend_app/remote/remote_retrieval.py @@ -1,6 +1,6 @@ import logging -import requests +import httpx from configs import dify_config from services.recommend_app.buildin.buildin_retrieval import BuildInRecommendAppRetrieval @@ -43,7 +43,7 @@ class RemoteRecommendAppRetrieval(RecommendAppRetrievalBase): """ domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN url = f"{domain}/apps/{app_id}" - response = requests.get(url, timeout=(3, 10)) + response = httpx.get(url, timeout=httpx.Timeout(10.0, connect=3.0)) if response.status_code != 200: return None data: dict = response.json() @@ -58,7 +58,7 @@ class RemoteRecommendAppRetrieval(RecommendAppRetrievalBase): """ domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN url = f"{domain}/apps?language={language}" - response = requests.get(url, timeout=(3, 10)) + response = httpx.get(url, timeout=httpx.Timeout(10.0, connect=3.0)) if response.status_code != 200: raise ValueError(f"fetch recommended apps failed, status code: {response.status_code}") diff --git a/api/services/saved_message_service.py b/api/services/saved_message_service.py index 67a0106bbd..4dd6c8107b 100644 --- a/api/services/saved_message_service.py +++ b/api/services/saved_message_service.py @@ -2,7 +2,7 @@ from typing import Union from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, EndUser from models.web import SavedMessage from services.message_service import MessageService diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index f86d7e51bf..bb024cc846 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -148,7 +148,7 @@ class ApiToolManageService: description=extra_info.get("description", ""), schema_type_str=schema_type, tools_str=json.dumps(jsonable_encoder(tool_bundles)), - credentials_str={}, + credentials_str="{}", privacy_policy=privacy_policy, custom_disclaimer=custom_disclaimer, ) @@ -277,7 +277,7 @@ class ApiToolManageService: provider.icon = json.dumps(icon) provider.schema = schema provider.description = extra_info.get("description", "") - provider.schema_type_str = ApiProviderSchemaType.OPENAPI.value + provider.schema_type_str = ApiProviderSchemaType.OPENAPI provider.tools_str = json.dumps(jsonable_encoder(tool_bundles)) provider.privacy_policy = privacy_policy provider.custom_disclaimer = custom_disclaimer @@ -393,7 +393,7 @@ class ApiToolManageService: icon="", schema=schema, description="", - schema_type_str=ApiProviderSchemaType.OPENAPI.value, + schema_type_str=ApiProviderSchemaType.OPENAPI, tools_str=json.dumps(jsonable_encoder(tool_bundles)), credentials_str=json.dumps(credentials), ) diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 6b0b6b0f0e..b5dcec17d0 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -349,14 +349,10 @@ class BuiltinToolManageService: provider_controller = ToolManager.get_builtin_provider(default_provider.provider, tenant_id) credentials: list[ToolProviderCredentialApiEntity] = [] - encrypters = {} for provider in providers: - credential_type = provider.credential_type - if credential_type not in encrypters: - encrypters[credential_type] = BuiltinToolManageService.create_tool_encrypter( - tenant_id, provider, provider.provider, provider_controller - )[0] - encrypter = encrypters[credential_type] + encrypter, _ = BuiltinToolManageService.create_tool_encrypter( + tenant_id, provider, provider.provider, provider_controller + ) decrypt_credential = encrypter.mask_tool_credentials(encrypter.decrypt(provider.credentials)) credential_entity = ToolTransformService.convert_builtin_provider_to_credential_entity( provider=provider, @@ -687,7 +683,7 @@ class BuiltinToolManageService: cache=NoOpProviderCredentialCache(), ) original_params = encrypter.decrypt(custom_client_params.oauth_params) - new_params: dict = { + new_params = { key: value if value != HIDDEN_VALUE else original_params.get(key, UNKNOWN_VALUE) for key, value in client_params.items() } diff --git a/api/services/tools/mcp_tools_manage_service.py b/api/services/tools/mcp_tools_manage_service.py index dd626dd615..54133d3801 100644 --- a/api/services/tools/mcp_tools_manage_service.py +++ b/api/services/tools/mcp_tools_manage_service.py @@ -1,7 +1,7 @@ import hashlib import json from datetime import datetime -from typing import Any, cast +from typing import Any from sqlalchemy import or_ from sqlalchemy.exc import IntegrityError @@ -55,7 +55,7 @@ class MCPToolManageService: cache=NoOpProviderCredentialCache(), ) - return cast(dict[str, str], encrypter_instance.encrypt(headers)) + return encrypter_instance.encrypt(headers) @staticmethod def get_mcp_provider_by_provider_id(provider_id: str, tenant_id: str) -> MCPToolProvider: @@ -188,6 +188,8 @@ class MCPToolManageService: raise user = mcp_provider.load_user() + if not mcp_provider.icon: + raise ValueError("MCP provider icon is required") return ToolProviderApiEntity( id=mcp_provider.id, name=mcp_provider.name, diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 6b36ed0eb7..b7850ea150 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -50,16 +50,16 @@ class ToolTransformService: URL(dify_config.CONSOLE_API_URL or "/") / "console" / "api" / "workspaces" / "current" / "tool-provider" ) - if provider_type == ToolProviderType.BUILT_IN.value: + if provider_type == ToolProviderType.BUILT_IN: return str(url_prefix / "builtin" / provider_name / "icon") - elif provider_type in {ToolProviderType.API.value, ToolProviderType.WORKFLOW.value}: + elif provider_type in {ToolProviderType.API, ToolProviderType.WORKFLOW}: try: if isinstance(icon, str): return json.loads(icon) return icon except Exception: return {"background": "#252525", "content": "\ud83d\ude01"} - elif provider_type == ToolProviderType.MCP.value: + elif provider_type == ToolProviderType.MCP: return icon return "" @@ -152,7 +152,8 @@ class ToolTransformService: if decrypt_credentials: credentials = db_provider.credentials - + if not db_provider.tenant_id: + raise ValueError(f"Required tenant_id is missing for BuiltinToolProvider with id {db_provider.id}") # init tool configuration encrypter, _ = create_provider_encrypter( tenant_id=db_provider.tenant_id, @@ -242,7 +243,7 @@ class ToolTransformService: is_team_authorization=db_provider.authed, server_url=db_provider.masked_server_url, tools=ToolTransformService.mcp_tool_to_user_tool( - db_provider, [MCPTool(**tool) for tool in json.loads(db_provider.tools)] + db_provider, [MCPTool.model_validate(tool) for tool in json.loads(db_provider.tools)] ), updated_at=int(db_provider.updated_at.timestamp()), label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name), @@ -387,6 +388,7 @@ class ToolTransformService: labels=labels or [], ) else: + assert tool.operation_id return ToolApiEntity( author=tool.author, name=tool.operation_id or "", diff --git a/api/services/vector_service.py b/api/services/vector_service.py index 1c559f2c2b..abc92a0181 100644 --- a/api/services/vector_service.py +++ b/api/services/vector_service.py @@ -134,7 +134,7 @@ class VectorService: ) # use full doc mode to generate segment's child chunk processing_rule_dict = processing_rule.to_dict() - processing_rule_dict["rules"]["parent_mode"] = ParentMode.FULL_DOC.value + processing_rule_dict["rules"]["parent_mode"] = ParentMode.FULL_DOC documents = index_processor.transform( [document], embedding_model_instance=embedding_model_instance, diff --git a/api/services/web_conversation_service.py b/api/services/web_conversation_service.py index 0f54e838f3..560aec2330 100644 --- a/api/services/web_conversation_service.py +++ b/api/services/web_conversation_service.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, EndUser from models.web import PinnedConversation from services.conversation_service import ConversationService diff --git a/api/services/webapp_auth_service.py b/api/services/webapp_auth_service.py index 066dc9d741..693bfb95b6 100644 --- a/api/services/webapp_auth_service.py +++ b/api/services/webapp_auth_service.py @@ -10,7 +10,7 @@ from extensions.ext_database import db from libs.helper import TokenManager from libs.passport import PassportService from libs.password import compare_password -from models.account import Account, AccountStatus +from models import Account, AccountStatus from models.model import App, EndUser, Site from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -36,7 +36,7 @@ class WebAppAuthService: if not account: raise AccountNotFoundError() - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise AccountLoginError("Account is banned.") if account.password is None or not compare_password(password, account.password, account.password_salt): @@ -56,7 +56,7 @@ class WebAppAuthService: if not account: return None - if account.status == AccountStatus.BANNED.value: + if account.status == AccountStatus.BANNED: raise Unauthorized("Account is banned.") return account diff --git a/api/services/website_service.py b/api/services/website_service.py index 37588d6ba5..a23f01ec71 100644 --- a/api/services/website_service.py +++ b/api/services/website_service.py @@ -23,6 +23,7 @@ class CrawlOptions: only_main_content: bool = False includes: str | None = None excludes: str | None = None + prompt: str | None = None max_depth: int | None = None use_sitemap: bool = True @@ -70,6 +71,7 @@ class WebsiteCrawlApiRequest: only_main_content=self.options.get("only_main_content", False), includes=self.options.get("includes"), excludes=self.options.get("excludes"), + prompt=self.options.get("prompt"), max_depth=self.options.get("max_depth"), use_sitemap=self.options.get("use_sitemap", True), ) @@ -174,6 +176,7 @@ class WebsiteService: def _crawl_with_firecrawl(cls, request: CrawlRequest, api_key: str, config: dict) -> dict[str, Any]: firecrawl_app = FirecrawlApp(api_key=api_key, base_url=config.get("base_url")) + params: dict[str, Any] if not request.options.crawl_sub_pages: params = { "includePaths": [], @@ -188,8 +191,10 @@ class WebsiteService: "limit": request.options.limit, "scrapeOptions": {"onlyMainContent": request.options.only_main_content}, } - if request.options.max_depth: - params["maxDepth"] = request.options.max_depth + + # Add optional prompt for Firecrawl v2 crawl-params compatibility + if request.options.prompt: + params["prompt"] = request.options.prompt job_id = firecrawl_app.crawl_url(request.url, params) website_crawl_time_cache_key = f"website_crawl_{job_id}" diff --git a/api/services/workflow/workflow_converter.py b/api/services/workflow/workflow_converter.py index dccd891981..e70b2b5c95 100644 --- a/api/services/workflow/workflow_converter.py +++ b/api/services/workflow/workflow_converter.py @@ -22,7 +22,7 @@ from core.prompt.utils.prompt_template_parser import PromptTemplateParser from core.workflow.nodes import NodeType from events.app_event import app_was_created from extensions.ext_database import db -from models.account import Account +from models import Account from models.api_based_extension import APIBasedExtension, APIBasedExtensionPoint from models.model import App, AppMode, AppModelConfig from models.workflow import Workflow, WorkflowType @@ -79,7 +79,6 @@ class WorkflowConverter: new_app.updated_by = account.id db.session.add(new_app) db.session.flush() - db.session.commit() workflow.app_id = new_app.id db.session.commit() @@ -229,7 +228,7 @@ class WorkflowConverter: "position": None, "data": { "title": "START", - "type": NodeType.START.value, + "type": NodeType.START, "variables": [jsonable_encoder(v) for v in variables], }, } @@ -274,7 +273,7 @@ class WorkflowConverter: inputs[v.variable] = "{{#start." + v.variable + "#}}" request_body = { - "point": APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY.value, + "point": APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY, "params": { "app_id": app_model.id, "tool_variable": tool_variable, @@ -291,7 +290,7 @@ class WorkflowConverter: "position": None, "data": { "title": f"HTTP REQUEST {api_based_extension.name}", - "type": NodeType.HTTP_REQUEST.value, + "type": NodeType.HTTP_REQUEST, "method": "post", "url": api_based_extension.api_endpoint, "authorization": {"type": "api-key", "config": {"type": "bearer", "api_key": api_key}}, @@ -309,7 +308,7 @@ class WorkflowConverter: "position": None, "data": { "title": f"Parse {api_based_extension.name} Response", - "type": NodeType.CODE.value, + "type": NodeType.CODE, "variables": [{"variable": "response_json", "value_selector": [http_request_node["id"], "body"]}], "code_language": "python3", "code": "import json\n\ndef main(response_json: str) -> str:\n response_body = json.loads(" @@ -349,7 +348,7 @@ class WorkflowConverter: "position": None, "data": { "title": "KNOWLEDGE RETRIEVAL", - "type": NodeType.KNOWLEDGE_RETRIEVAL.value, + "type": NodeType.KNOWLEDGE_RETRIEVAL, "query_variable_selector": query_variable_selector, "dataset_ids": dataset_config.dataset_ids, "retrieval_mode": retrieve_config.retrieve_strategy.value, @@ -397,16 +396,16 @@ class WorkflowConverter: :param external_data_variable_node_mapping: external data variable node mapping """ # fetch start and knowledge retrieval node - start_node = next(filter(lambda n: n["data"]["type"] == NodeType.START.value, graph["nodes"])) + start_node = next(filter(lambda n: n["data"]["type"] == NodeType.START, graph["nodes"])) knowledge_retrieval_node = next( - filter(lambda n: n["data"]["type"] == NodeType.KNOWLEDGE_RETRIEVAL.value, graph["nodes"]), None + filter(lambda n: n["data"]["type"] == NodeType.KNOWLEDGE_RETRIEVAL, graph["nodes"]), None ) role_prefix = None prompts: Any | None = None # Chat Model - if model_config.mode == LLMMode.CHAT.value: + if model_config.mode == LLMMode.CHAT: if prompt_template.prompt_type == PromptTemplateEntity.PromptType.SIMPLE: if not prompt_template.simple_prompt_template: raise ValueError("Simple prompt template is required") @@ -518,7 +517,7 @@ class WorkflowConverter: "position": None, "data": { "title": "LLM", - "type": NodeType.LLM.value, + "type": NodeType.LLM, "model": { "provider": model_config.provider, "name": model_config.model, @@ -573,7 +572,7 @@ class WorkflowConverter: "position": None, "data": { "title": "END", - "type": NodeType.END.value, + "type": NodeType.END, "outputs": [{"variable": "result", "value_selector": ["llm", "text"]}], }, } @@ -587,7 +586,7 @@ class WorkflowConverter: return { "id": "answer", "position": None, - "data": {"title": "ANSWER", "type": NodeType.ANSWER.value, "answer": "{{#llm.text#}}"}, + "data": {"title": "ANSWER", "type": NodeType.ANSWER, "answer": "{{#llm.text#}}"}, } def _create_edge(self, source: str, target: str): diff --git a/api/services/workflow_app_service.py b/api/services/workflow_app_service.py index ced6dca324..23dd436675 100644 --- a/api/services/workflow_app_service.py +++ b/api/services/workflow_app_service.py @@ -86,12 +86,16 @@ class WorkflowAppService: ), ) if created_by_account: + account = session.scalar(select(Account).where(Account.email == created_by_account)) + if not account: + raise ValueError(f"Account not found: {created_by_account}") + stmt = stmt.join( Account, and_( WorkflowAppLog.created_by == Account.id, WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT, - Account.email == created_by_account, + Account.id == account.id, ), ) diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py index 1378c20128..5e63a83bb1 100644 --- a/api/services/workflow_draft_variable_service.py +++ b/api/services/workflow_draft_variable_service.py @@ -32,8 +32,7 @@ from factories.file_factory import StorageKeyLoader from factories.variable_factory import build_segment, segment_to_variable from libs.datetime_utils import naive_utc_now from libs.uuid_utils import uuidv7 -from models import App, Conversation -from models.account import Account +from models import Account, App, Conversation from models.enums import DraftVariableType from models.workflow import Workflow, WorkflowDraftVariable, WorkflowDraftVariableFile, is_system_variable_editable from repositories.factory import DifyAPIRepositoryFactory @@ -569,7 +568,7 @@ class WorkflowDraftVariableService: system_instruction="", system_instruction_tokens=0, status="normal", - invoke_from=InvokeFrom.DEBUGGER.value, + invoke_from=InvokeFrom.DEBUGGER, from_source="console", from_end_user_id=None, from_account_id=account_id, diff --git a/api/services/workflow_run_service.py b/api/services/workflow_run_service.py index 79d91cab4c..6a2edd912a 100644 --- a/api/services/workflow_run_service.py +++ b/api/services/workflow_run_service.py @@ -74,7 +74,7 @@ class WorkflowRunService: return self._workflow_run_repo.get_paginated_workflow_runs( tenant_id=app_model.tenant_id, app_id=app_model.id, - triggered_from=WorkflowRunTriggeredFrom.DEBUGGING.value, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, limit=limit, last_id=last_id, ) diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 359fdb85fd..f765c229ab 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -30,7 +30,7 @@ from extensions.ext_database import db from extensions.ext_storage import storage from factories.file_factory import build_from_mapping, build_from_mappings from libs.datetime_utils import naive_utc_now -from models.account import Account +from models import Account from models.model import App, AppMode from models.tools import WorkflowToolProvider from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowType @@ -1006,7 +1006,7 @@ def _setup_variable_pool( ) # Only add chatflow-specific variables for non-workflow types - if workflow.type != WorkflowType.WORKFLOW.value: + if workflow.type != WorkflowType.WORKFLOW: system_variable.query = query system_variable.conversation_id = conversation_id system_variable.dialogue_count = 1 diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index 951b9e5653..b528728364 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -8,7 +8,6 @@ import click import pandas as pd from celery import shared_task from sqlalchemy import func -from sqlalchemy.orm import Session from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType @@ -50,54 +49,48 @@ def batch_create_segment_to_index_task( indexing_cache_key = f"segment_batch_import_{job_id}" try: - with Session(db.engine) as session: - dataset = session.get(Dataset, dataset_id) - if not dataset: - raise ValueError("Dataset not exist.") + dataset = db.session.get(Dataset, dataset_id) + if not dataset: + raise ValueError("Dataset not exist.") - dataset_document = session.get(Document, document_id) - if not dataset_document: - raise ValueError("Document not exist.") + dataset_document = db.session.get(Document, document_id) + if not dataset_document: + raise ValueError("Document not exist.") - if ( - not dataset_document.enabled - or dataset_document.archived - or dataset_document.indexing_status != "completed" - ): - raise ValueError("Document is not available.") + if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": + raise ValueError("Document is not available.") - upload_file = session.get(UploadFile, upload_file_id) - if not upload_file: - raise ValueError("UploadFile not found.") + upload_file = db.session.get(UploadFile, upload_file_id) + if not upload_file: + raise ValueError("UploadFile not found.") - with tempfile.TemporaryDirectory() as temp_dir: - suffix = Path(upload_file.key).suffix - # FIXME mypy: Cannot determine type of 'tempfile._get_candidate_names' better not use it here - file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore - storage.download(upload_file.key, file_path) + with tempfile.TemporaryDirectory() as temp_dir: + suffix = Path(upload_file.key).suffix + file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore + storage.download(upload_file.key, file_path) - # Skip the first row - df = pd.read_csv(file_path) - content = [] - for _, row in df.iterrows(): - if dataset_document.doc_form == "qa_model": - data = {"content": row.iloc[0], "answer": row.iloc[1]} - else: - data = {"content": row.iloc[0]} - content.append(data) - if len(content) == 0: - raise ValueError("The CSV file is empty.") + df = pd.read_csv(file_path) + content = [] + for _, row in df.iterrows(): + if dataset_document.doc_form == "qa_model": + data = {"content": row.iloc[0], "answer": row.iloc[1]} + else: + data = {"content": row.iloc[0]} + content.append(data) + if len(content) == 0: + raise ValueError("The CSV file is empty.") + + document_segments = [] + embedding_model = None + if dataset.indexing_technique == "high_quality": + model_manager = ModelManager() + embedding_model = model_manager.get_model_instance( + tenant_id=dataset.tenant_id, + provider=dataset.embedding_model_provider, + model_type=ModelType.TEXT_EMBEDDING, + model=dataset.embedding_model, + ) - document_segments = [] - embedding_model = None - if dataset.indexing_technique == "high_quality": - model_manager = ModelManager() - embedding_model = model_manager.get_model_instance( - tenant_id=dataset.tenant_id, - provider=dataset.embedding_model_provider, - model_type=ModelType.TEXT_EMBEDDING, - model=dataset.embedding_model, - ) word_count_change = 0 if embedding_model: tokens_list = embedding_model.get_text_embedding_num_tokens( @@ -105,6 +98,7 @@ def batch_create_segment_to_index_task( ) else: tokens_list = [0] * len(content) + for segment, tokens in zip(content, tokens_list): content = segment["content"] doc_id = str(uuid.uuid4()) @@ -135,11 +129,11 @@ def batch_create_segment_to_index_task( word_count_change += segment_document.word_count db.session.add(segment_document) document_segments.append(segment_document) - # update document word count + assert dataset_document.word_count is not None dataset_document.word_count += word_count_change db.session.add(dataset_document) - # add index to db + VectorService.create_segments_vector(None, document_segments, dataset, dataset_document.doc_form) db.session.commit() redis_client.setex(indexing_cache_key, 600, "completed") diff --git a/api/tasks/delete_account_task.py b/api/tasks/delete_account_task.py index 611aef86ad..fb5eb1d691 100644 --- a/api/tasks/delete_account_task.py +++ b/api/tasks/delete_account_task.py @@ -3,7 +3,7 @@ import logging from celery import shared_task from extensions.ext_database import db -from models.account import Account +from models import Account from services.billing_service import BillingService from tasks.mail_account_deletion_task import send_deletion_success_task diff --git a/api/tasks/ops_trace_task.py b/api/tasks/ops_trace_task.py index 7b254ac3b5..72e3b42ca7 100644 --- a/api/tasks/ops_trace_task.py +++ b/api/tasks/ops_trace_task.py @@ -36,7 +36,7 @@ def process_trace_tasks(file_info): if trace_info.get("workflow_data"): trace_info["workflow_data"] = WorkflowRun.from_dict(data=trace_info["workflow_data"]) if trace_info.get("documents"): - trace_info["documents"] = [Document(**doc) for doc in trace_info["documents"]] + trace_info["documents"] = [Document.model_validate(doc) for doc in trace_info["documents"]] try: if trace_instance: diff --git a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py index bae8f1c4db..124971e8e2 100644 --- a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py +++ b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py @@ -1,5 +1,5 @@ +import json import operator -import traceback import typing import click @@ -9,38 +9,106 @@ from core.helper import marketplace from core.helper.marketplace import MarketplacePluginDeclaration from core.plugin.entities.plugin import PluginInstallationSource from core.plugin.impl.plugin import PluginInstaller +from extensions.ext_redis import redis_client from models.account import TenantPluginAutoUpgradeStrategy RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3 +CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:" +CACHE_REDIS_TTL = 60 * 15 # 15 minutes -cached_plugin_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {} +def _get_redis_cache_key(plugin_id: str) -> str: + """Generate Redis cache key for plugin manifest.""" + return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}" + + +def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]: + """ + Get cached plugin manifest from Redis. + Returns: + - MarketplacePluginDeclaration: if found in cache + - None: if cached as not found (marketplace returned no result) + - False: if not in cache at all + """ + try: + key = _get_redis_cache_key(plugin_id) + cached_data = redis_client.get(key) + if cached_data is None: + return False + + cached_json = json.loads(cached_data) + if cached_json is None: + return None + + return MarketplacePluginDeclaration.model_validate(cached_json) + except Exception: + return False + + +def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None: + """ + Cache plugin manifest in Redis. + Args: + plugin_id: The plugin ID + manifest: The manifest to cache, or None if not found in marketplace + """ + try: + key = _get_redis_cache_key(plugin_id) + if manifest is None: + # Cache the fact that this plugin was not found + redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None)) + else: + # Cache the manifest data + redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json()) + except Exception: + # If Redis fails, continue without caching + # traceback.print_exc() + pass def marketplace_batch_fetch_plugin_manifests( plugin_ids_plain_list: list[str], ) -> list[MarketplacePluginDeclaration]: - global cached_plugin_manifests - # return marketplace.batch_fetch_plugin_manifests(plugin_ids_plain_list) - not_included_plugin_ids = [ - plugin_id for plugin_id in plugin_ids_plain_list if plugin_id not in cached_plugin_manifests - ] - if not_included_plugin_ids: - manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_included_plugin_ids) + """Fetch plugin manifests with Redis caching support.""" + cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {} + not_cached_plugin_ids: list[str] = [] + + # Check Redis cache for each plugin + for plugin_id in plugin_ids_plain_list: + cached_result = _get_cached_manifest(plugin_id) + if cached_result is False: + # Not in cache, need to fetch + not_cached_plugin_ids.append(plugin_id) + else: + # Either found manifest or cached as None (not found in marketplace) + # At this point, cached_result is either MarketplacePluginDeclaration or None + if isinstance(cached_result, bool): + # This should never happen due to the if condition above, but for type safety + continue + cached_manifests[plugin_id] = cached_result + + # Fetch uncached plugins from marketplace + if not_cached_plugin_ids: + manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids) + + # Cache the fetched manifests for manifest in manifests: - cached_plugin_manifests[manifest.plugin_id] = manifest + cached_manifests[manifest.plugin_id] = manifest + _set_cached_manifest(manifest.plugin_id, manifest) - if ( - len(manifests) == 0 - ): # this indicates that the plugin not found in marketplace, should set None in cache to prevent future check - for plugin_id in not_included_plugin_ids: - cached_plugin_manifests[plugin_id] = None + # Cache plugins that were not found in marketplace + fetched_plugin_ids = {manifest.plugin_id for manifest in manifests} + for plugin_id in not_cached_plugin_ids: + if plugin_id not in fetched_plugin_ids: + cached_manifests[plugin_id] = None + _set_cached_manifest(plugin_id, None) + # Build result list from cached manifests result: list[MarketplacePluginDeclaration] = [] for plugin_id in plugin_ids_plain_list: - final_manifest = cached_plugin_manifests.get(plugin_id) - if final_manifest is not None: - result.append(final_manifest) + cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id) + if cached_manifest is not None: + result.append(cached_manifest) return result @@ -157,10 +225,10 @@ def process_tenant_plugin_autoupgrade_check_task( ) except Exception as e: click.echo(click.style(f"Error when upgrading plugin: {e}", fg="red")) - traceback.print_exc() + # traceback.print_exc() break except Exception as e: click.echo(click.style(f"Error when checking upgradable plugin: {e}", fg="red")) - traceback.print_exc() + # traceback.print_exc() return diff --git a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py index 028f635188..6de95a3b85 100644 --- a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py @@ -16,7 +16,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom, RagPipelineGenerat from core.app.entities.rag_pipeline_invoke_entities import RagPipelineInvokeEntity from core.repositories.factory import DifyCoreRepositoryFactory from extensions.ext_database import db -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Pipeline from models.enums import WorkflowRunTriggeredFrom from models.workflow import Workflow, WorkflowNodeExecutionTriggeredFrom @@ -29,23 +29,10 @@ def priority_rag_pipeline_run_task( tenant_id: str, ): """ - Async Run rag pipeline - :param rag_pipeline_invoke_entities: Rag pipeline invoke entities - rag_pipeline_invoke_entities include: - :param pipeline_id: Pipeline ID - :param user_id: User ID - :param tenant_id: Tenant ID - :param workflow_id: Workflow ID - :param invoke_from: Invoke source (debugger, published, etc.) - :param streaming: Whether to stream results - :param datasource_type: Type of datasource - :param datasource_info: Datasource information dict - :param batch: Batch identifier - :param document_id: Document ID (optional) - :param start_node_id: Starting node ID - :param inputs: Input parameters dict - :param workflow_execution_id: Workflow execution ID - :param workflow_thread_pool_id: Thread pool ID for workflow execution + Async Run rag pipeline task using high priority queue. + + :param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities + :param tenant_id: Tenant ID for the pipeline execution """ # run with threading, thread pool size is 10 @@ -92,7 +79,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], # Create Flask application context for this thread with flask_app.app_context(): try: - rag_pipeline_invoke_entity_model = RagPipelineInvokeEntity(**rag_pipeline_invoke_entity) + rag_pipeline_invoke_entity_model = RagPipelineInvokeEntity.model_validate(rag_pipeline_invoke_entity) user_id = rag_pipeline_invoke_entity_model.user_id tenant_id = rag_pipeline_invoke_entity_model.tenant_id pipeline_id = rag_pipeline_invoke_entity_model.pipeline_id @@ -125,7 +112,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], workflow_execution_id = str(uuid.uuid4()) # Create application generate entity from dict - entity = RagPipelineGenerateEntity(**application_generate_entity) + entity = RagPipelineGenerateEntity.model_validate(application_generate_entity) # Create workflow repositories session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) diff --git a/api/tasks/rag_pipeline/rag_pipeline_run_task.py b/api/tasks/rag_pipeline/rag_pipeline_run_task.py index ee904c4649..f4a092d97e 100644 --- a/api/tasks/rag_pipeline/rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/rag_pipeline_run_task.py @@ -17,7 +17,7 @@ from core.app.entities.rag_pipeline_invoke_entities import RagPipelineInvokeEnti from core.repositories.factory import DifyCoreRepositoryFactory from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Pipeline from models.enums import WorkflowRunTriggeredFrom from models.workflow import Workflow, WorkflowNodeExecutionTriggeredFrom @@ -30,23 +30,10 @@ def rag_pipeline_run_task( tenant_id: str, ): """ - Async Run rag pipeline - :param rag_pipeline_invoke_entities: Rag pipeline invoke entities - rag_pipeline_invoke_entities include: - :param pipeline_id: Pipeline ID - :param user_id: User ID - :param tenant_id: Tenant ID - :param workflow_id: Workflow ID - :param invoke_from: Invoke source (debugger, published, etc.) - :param streaming: Whether to stream results - :param datasource_type: Type of datasource - :param datasource_info: Datasource information dict - :param batch: Batch identifier - :param document_id: Document ID (optional) - :param start_node_id: Starting node ID - :param inputs: Input parameters dict - :param workflow_execution_id: Workflow execution ID - :param workflow_thread_pool_id: Thread pool ID for workflow execution + Async Run rag pipeline task using regular priority queue. + + :param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities + :param tenant_id: Tenant ID for the pipeline execution """ # run with threading, thread pool size is 10 @@ -113,7 +100,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], # Create Flask application context for this thread with flask_app.app_context(): try: - rag_pipeline_invoke_entity_model = RagPipelineInvokeEntity(**rag_pipeline_invoke_entity) + rag_pipeline_invoke_entity_model = RagPipelineInvokeEntity.model_validate(rag_pipeline_invoke_entity) user_id = rag_pipeline_invoke_entity_model.user_id tenant_id = rag_pipeline_invoke_entity_model.tenant_id pipeline_id = rag_pipeline_invoke_entity_model.pipeline_id @@ -146,7 +133,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], workflow_execution_id = str(uuid.uuid4()) # Create application generate entity from dict - entity = RagPipelineGenerateEntity(**application_generate_entity) + entity = RagPipelineGenerateEntity.model_validate(application_generate_entity) # Create workflow repositories session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) diff --git a/api/tasks/retry_document_indexing_task.py b/api/tasks/retry_document_indexing_task.py index 9c12696824..9d208647e6 100644 --- a/api/tasks/retry_document_indexing_task.py +++ b/api/tasks/retry_document_indexing_task.py @@ -10,7 +10,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Dataset, Document, DocumentSegment from services.feature_service import FeatureService from services.rag_pipeline.rag_pipeline import RagPipelineService diff --git a/api/tasks/workflow_draft_var_tasks.py b/api/tasks/workflow_draft_var_tasks.py index 457d46a9d8..fcb98ec39e 100644 --- a/api/tasks/workflow_draft_var_tasks.py +++ b/api/tasks/workflow_draft_var_tasks.py @@ -5,15 +5,10 @@ These tasks provide asynchronous storage capabilities for workflow execution dat improving performance by offloading storage operations to background workers. """ -import logging - from celery import shared_task # type: ignore[import-untyped] from sqlalchemy.orm import Session from extensions.ext_database import db - -_logger = logging.getLogger(__name__) - from services.workflow_draft_variable_service import DraftVarFileDeletion, WorkflowDraftVariableService diff --git a/api/templates/without-brand/change_mail_confirm_new_template_en-US.html b/api/templates/without-brand/change_mail_confirm_new_template_en-US.html index 69a8978f42..861b1bcdb6 100644 --- a/api/templates/without-brand/change_mail_confirm_new_template_en-US.html +++ b/api/templates/without-brand/change_mail_confirm_new_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

Confirm Your New Email Address

-

You’re updating the email address linked to your Dify account.

+

You're updating the email address linked to your account.

To confirm this action, please use the verification code below.

This code will only be valid for the next 5 minutes:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html b/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html index e3e9e7c45a..e411680e89 100644 --- a/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html +++ b/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

确认您的邮箱地址变更

-

您正在更新与您的 Dify 账户关联的邮箱地址。

+

您正在更新与您的账户关联的邮箱地址。

为了确认此操作,请使用以下验证码。

此验证码仅在接下来的5分钟内有效:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_old_template_en-US.html b/api/templates/without-brand/change_mail_confirm_old_template_en-US.html index 9d79fa7ff9..9fe52255a5 100644 --- a/api/templates/without-brand/change_mail_confirm_old_template_en-US.html +++ b/api/templates/without-brand/change_mail_confirm_old_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

Verify Your Request to Change Email

-

We received a request to change the email address associated with your Dify account.

+

We received a request to change the email address associated with your account.

To confirm this action, please use the verification code below.

This code will only be valid for the next 5 minutes:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html b/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html index 41f0839190..98cbd2f0c6 100644 --- a/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html +++ b/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

验证您的邮箱变更请求

-

我们收到了一个变更您 Dify 账户关联邮箱地址的请求。

+

我们收到了一个变更您账户关联邮箱地址的请求。

此验证码仅在接下来的5分钟内有效:

@@ -117,5 +120,4 @@
- - + \ No newline at end of file diff --git a/api/templates/without-brand/invite_member_mail_template_en-US.html b/api/templates/without-brand/invite_member_mail_template_en-US.html index fc7f3679ba..f9157284fa 100644 --- a/api/templates/without-brand/invite_member_mail_template_en-US.html +++ b/api/templates/without-brand/invite_member_mail_template_en-US.html @@ -1,5 +1,6 @@ + +
-
- - Dify Logo -
+

Dear {{ to }},

-

{{ inviter_name }} is pleased to invite you to join our workspace on {{application_title}}, a platform specifically designed for LLM application development. On {{application_title}}, you can explore, create, and collaborate to build and operate AI applications.

+

{{ inviter_name }} is pleased to invite you to join our workspace on {{application_title}}, a + platform specifically designed for LLM application development. On {{application_title}}, you can explore, + create, and collaborate to build and operate AI applications.

Click the button below to log in to {{application_title}} and join the workspace.

-

Login Here

+

Login Here

Best regards,

{{application_title}} Team

- + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html index a5758a2184..659c285324 100644 --- a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html +++ b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -80,10 +82,9 @@

You have been assigned as the new owner of the workspace "{{WorkspaceName}}".

As the new owner, you now have full administrative privileges for this workspace.

-

If you have any questions, please contact support@dify.ai.

+

If you have any questions, please contact support.

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html index 53bab92552..f710dbb289 100644 --- a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html +++ b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -80,10 +82,9 @@

您已被分配为工作空间“{{WorkspaceName}}”的新所有者。

作为新所有者,您现在对该工作空间拥有完全的管理权限。

-

如果您有任何问题,请联系support@dify.ai。

+

如果您有任何问题,请联系支持团队。

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html index 3e7faeb01e..149ec77aea 100644 --- a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html +++ b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -97,7 +99,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -108,12 +111,14 @@

Workspace ownership has been transferred

-

You have successfully transferred ownership of the workspace "{{WorkspaceName}}" to {{NewOwnerEmail}}.

-

You no longer have owner privileges for this workspace. Your access level has been changed to Admin.

-

If you did not initiate this transfer or have concerns about this change, please contact support@dify.ai immediately.

+

You have successfully transferred ownership of the workspace "{{WorkspaceName}}" to + {{NewOwnerEmail}}.

+

You no longer have owner privileges for this workspace. Your access level has been changed to + Admin.

+

If you did not initiate this transfer or have concerns about this change, please contact + support immediately.

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html index 31e3c23140..d7aed40068 100644 --- a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html +++ b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -97,7 +99,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -110,10 +113,9 @@

您已成功将工作空间“{{WorkspaceName}}”的所有权转移给{{NewOwnerEmail}}。

您不再拥有此工作空间的拥有者权限。您的访问级别已更改为管理员。

-

如果您没有发起此转移或对此变更有任何疑问,请立即联系support@dify.ai。

+

如果您没有发起此转移或对此变更有任何疑问,请立即联系支持团队。

- - + \ No newline at end of file diff --git a/api/tests/fixtures/workflow/test-answer-order.yml b/api/tests/fixtures/workflow/test-answer-order.yml new file mode 100644 index 0000000000..3c6631aebb --- /dev/null +++ b/api/tests/fixtures/workflow/test-answer-order.yml @@ -0,0 +1,222 @@ +app: + description: 'this is a chatflow with 2 answer nodes. + + + it''s outouts should like: + + + ``` + + --- answer 1 --- + + + foo + + --- answer 2 --- + + + + + ```' + icon: 🤖 + icon_background: '#FFEAD5' + mode: advanced-chat + name: test-answer-order + use_icon_as_answer_icon: false +dependencies: +- current_identifier: null + type: marketplace + value: + marketplace_plugin_unique_identifier: langgenius/openai:0.2.6@e2665624a156f52160927bceac9e169bd7e5ae6b936ae82575e14c90af390e6e + version: null +kind: app +version: 0.4.0 +workflow: + conversation_variables: [] + environment_variables: [] + features: + file_upload: + allowed_file_extensions: + - .JPG + - .JPEG + - .PNG + - .GIF + - .WEBP + - .SVG + allowed_file_types: + - image + allowed_file_upload_methods: + - local_file + - remote_url + enabled: false + fileUploadConfig: + audio_file_size_limit: 50 + batch_count_limit: 5 + file_size_limit: 15 + image_file_size_limit: 10 + video_file_size_limit: 100 + workflow_file_upload_limit: 10 + image: + enabled: false + number_limits: 3 + transfer_methods: + - local_file + - remote_url + number_limits: 3 + opening_statement: '' + retriever_resource: + enabled: true + sensitive_word_avoidance: + enabled: false + speech_to_text: + enabled: false + suggested_questions: [] + suggested_questions_after_answer: + enabled: false + text_to_speech: + enabled: false + language: '' + voice: '' + graph: + edges: + - data: + isInIteration: false + isInLoop: false + sourceType: answer + targetType: answer + id: 1759052466526-source-1759052469368-target + source: '1759052466526' + sourceHandle: source + target: '1759052469368' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: start + targetType: llm + id: 1759052439553-source-1759052580454-target + source: '1759052439553' + sourceHandle: source + target: '1759052580454' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: llm + targetType: answer + id: 1759052580454-source-1759052466526-target + source: '1759052580454' + sourceHandle: source + target: '1759052466526' + targetHandle: target + type: custom + zIndex: 0 + nodes: + - data: + selected: false + title: Start + type: start + variables: [] + height: 52 + id: '1759052439553' + position: + x: 30 + y: 242 + positionAbsolute: + x: 30 + y: 242 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + answer: '--- answer 1 --- + + + foo + + ' + selected: false + title: Answer + type: answer + variables: [] + height: 100 + id: '1759052466526' + position: + x: 632 + y: 242 + positionAbsolute: + x: 632 + y: 242 + selected: true + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + answer: '--- answer 2 --- + + + {{#1759052580454.text#}} + + ' + selected: false + title: Answer 2 + type: answer + variables: [] + height: 103 + id: '1759052469368' + position: + x: 934 + y: 242 + positionAbsolute: + x: 934 + y: 242 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + context: + enabled: false + variable_selector: [] + model: + completion_params: + temperature: 0.7 + mode: chat + name: gpt-4o + provider: langgenius/openai/openai + prompt_template: + - id: 5c1d873b-06b2-4dce-939e-672882bbd7c0 + role: system + text: '' + - role: user + text: '{{#sys.query#}}' + selected: false + title: LLM + type: llm + vision: + enabled: false + height: 88 + id: '1759052580454' + position: + x: 332 + y: 242 + positionAbsolute: + x: 332 + y: 242 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + viewport: + x: 126.2797574512839 + y: 289.55932160537446 + zoom: 1.0743222672006216 + rag_pipeline_variables: [] diff --git a/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml b/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml new file mode 100644 index 0000000000..ffc6eb9120 --- /dev/null +++ b/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml @@ -0,0 +1,316 @@ +app: + description: 'This chatflow receives a sys.query, writes it into the `answer` variable, + and then outputs the `answer` variable. + + + `answer` is a conversation variable with a blank default value; it will be updated + in an iteration node. + + + if this chatflow works correctly, it will output the `sys.query` as the same.' + icon: 🤖 + icon_background: '#FFEAD5' + mode: advanced-chat + name: update-conversation-variable-in-iteration + use_icon_as_answer_icon: false +dependencies: [] +kind: app +version: 0.4.0 +workflow: + conversation_variables: + - description: '' + id: c30af82d-b2ec-417d-a861-4dd78584faa4 + name: answer + selector: + - conversation + - answer + value: '' + value_type: string + environment_variables: [] + features: + file_upload: + allowed_file_extensions: + - .JPG + - .JPEG + - .PNG + - .GIF + - .WEBP + - .SVG + allowed_file_types: + - image + allowed_file_upload_methods: + - local_file + - remote_url + enabled: false + fileUploadConfig: + audio_file_size_limit: 50 + batch_count_limit: 5 + file_size_limit: 15 + image_file_size_limit: 10 + video_file_size_limit: 100 + workflow_file_upload_limit: 10 + image: + enabled: false + number_limits: 3 + transfer_methods: + - local_file + - remote_url + number_limits: 3 + opening_statement: '' + retriever_resource: + enabled: true + sensitive_word_avoidance: + enabled: false + speech_to_text: + enabled: false + suggested_questions: [] + suggested_questions_after_answer: + enabled: false + text_to_speech: + enabled: false + language: '' + voice: '' + graph: + edges: + - data: + isInIteration: false + isInLoop: false + sourceType: start + targetType: code + id: 1759032354471-source-1759032363865-target + source: '1759032354471' + sourceHandle: source + target: '1759032363865' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: code + targetType: iteration + id: 1759032363865-source-1759032379989-target + source: '1759032363865' + sourceHandle: source + target: '1759032379989' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + sourceType: iteration-start + targetType: assigner + id: 1759032379989start-source-1759032394460-target + source: 1759032379989start + sourceHandle: source + target: '1759032394460' + targetHandle: target + type: custom + zIndex: 1002 + - data: + isInIteration: false + isInLoop: false + sourceType: iteration + targetType: answer + id: 1759032379989-source-1759032410331-target + source: '1759032379989' + sourceHandle: source + target: '1759032410331' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + sourceType: assigner + targetType: code + id: 1759032394460-source-1759032476318-target + source: '1759032394460' + sourceHandle: source + target: '1759032476318' + targetHandle: target + type: custom + zIndex: 1002 + nodes: + - data: + selected: false + title: Start + type: start + variables: [] + height: 52 + id: '1759032354471' + position: + x: 30 + y: 302 + positionAbsolute: + x: 30 + y: 302 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + code: "\ndef main():\n return {\n \"result\": [1],\n }\n" + code_language: python3 + outputs: + result: + children: null + type: array[number] + selected: false + title: Code + type: code + variables: [] + height: 52 + id: '1759032363865' + position: + x: 332 + y: 302 + positionAbsolute: + x: 332 + y: 302 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + error_handle_mode: terminated + height: 204 + is_parallel: false + iterator_input_type: array[number] + iterator_selector: + - '1759032363865' + - result + output_selector: + - '1759032476318' + - result + output_type: array[string] + parallel_nums: 10 + selected: false + start_node_id: 1759032379989start + title: Iteration + type: iteration + width: 808 + height: 204 + id: '1759032379989' + position: + x: 634 + y: 302 + positionAbsolute: + x: 634 + y: 302 + selected: true + sourcePosition: right + targetPosition: left + type: custom + width: 808 + zIndex: 1 + - data: + desc: '' + isInIteration: true + selected: false + title: '' + type: iteration-start + draggable: false + height: 48 + id: 1759032379989start + parentId: '1759032379989' + position: + x: 60 + y: 78 + positionAbsolute: + x: 694 + y: 380 + selectable: false + sourcePosition: right + targetPosition: left + type: custom-iteration-start + width: 44 + zIndex: 1002 + - data: + isInIteration: true + isInLoop: false + items: + - input_type: variable + operation: over-write + value: + - sys + - query + variable_selector: + - conversation + - answer + write_mode: over-write + iteration_id: '1759032379989' + selected: false + title: Variable Assigner + type: assigner + version: '2' + height: 84 + id: '1759032394460' + parentId: '1759032379989' + position: + x: 204 + y: 60 + positionAbsolute: + x: 838 + y: 362 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + zIndex: 1002 + - data: + answer: '{{#conversation.answer#}}' + selected: false + title: Answer + type: answer + variables: [] + height: 104 + id: '1759032410331' + position: + x: 1502 + y: 302 + positionAbsolute: + x: 1502 + y: 302 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + code: "\ndef main():\n return {\n \"result\": '',\n }\n" + code_language: python3 + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + outputs: + result: + children: null + type: string + selected: false + title: Code 2 + type: code + variables: [] + height: 52 + id: '1759032476318' + parentId: '1759032379989' + position: + x: 506 + y: 76 + positionAbsolute: + x: 1140 + y: 378 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + zIndex: 1002 + viewport: + x: 120.39999999999998 + y: 85.20000000000005 + zoom: 0.7 + rag_pipeline_variables: [] diff --git a/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py b/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py index c8d353ad0a..498ac56d5d 100644 --- a/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py +++ b/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py @@ -11,8 +11,8 @@ from controllers.console.app import completion as completion_api from controllers.console.app import message as message_api from controllers.console.app import wraps from libs.datetime_utils import naive_utc_now -from models import Account, App, Tenant -from models.account import TenantAccountRole +from models import App, Tenant +from models.account import Account, TenantAccountJoin, TenantAccountRole from models.model import AppMode from services.app_generate_service import AppGenerateService @@ -25,29 +25,42 @@ class TestChatMessageApiPermissions: """Create a mock App model for testing.""" app = App() app.id = str(uuid.uuid4()) - app.mode = AppMode.CHAT.value + app.mode = AppMode.CHAT app.tenant_id = str(uuid.uuid4()) app.status = "normal" return app @pytest.fixture - def mock_account(self): + def mock_account(self, monkeypatch: pytest.MonkeyPatch): """Create a mock Account for testing.""" - account = Account() - account.id = str(uuid.uuid4()) - account.name = "Test User" - account.email = "test@example.com" + account = Account( + name="Test User", + email="test@example.com", + ) account.last_active_at = naive_utc_now() account.created_at = naive_utc_now() account.updated_at = naive_utc_now() + account.id = str(uuid.uuid4()) # Create mock tenant - tenant = Tenant() + tenant = Tenant(name="Test Tenant") tenant.id = str(uuid.uuid4()) - tenant.name = "Test Tenant" - account._current_tenant = tenant + mock_session_instance = mock.Mock() + + mock_tenant_join = TenantAccountJoin(role=TenantAccountRole.OWNER) + monkeypatch.setattr(mock_session_instance, "scalar", mock.Mock(return_value=mock_tenant_join)) + + mock_scalars_result = mock.Mock() + mock_scalars_result.one.return_value = tenant + monkeypatch.setattr(mock_session_instance, "scalars", mock.Mock(return_value=mock_scalars_result)) + + mock_session_context = mock.Mock() + mock_session_context.__enter__.return_value = mock_session_instance + monkeypatch.setattr("models.account.Session", lambda _, expire_on_commit: mock_session_context) + + account.current_tenant = tenant return account @pytest.mark.parametrize( diff --git a/api/tests/integration_tests/controllers/console/app/test_description_validation.py b/api/tests/integration_tests/controllers/console/app/test_description_validation.py index 2d0ceac760..8160807e48 100644 --- a/api/tests/integration_tests/controllers/console/app/test_description_validation.py +++ b/api/tests/integration_tests/controllers/console/app/test_description_validation.py @@ -18,124 +18,87 @@ class TestAppDescriptionValidationUnit: """Unit tests for description validation function""" def test_validate_description_length_function(self): - """Test the _validate_description_length function directly""" - from controllers.console.app.app import _validate_description_length + """Test the validate_description_length function directly""" + from libs.validators import validate_description_length # Test valid descriptions - assert _validate_description_length("") == "" - assert _validate_description_length("x" * 400) == "x" * 400 - assert _validate_description_length(None) is None + assert validate_description_length("") == "" + assert validate_description_length("x" * 400) == "x" * 400 + assert validate_description_length(None) is None # Test invalid descriptions with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 401) + validate_description_length("x" * 401) assert "Description cannot exceed 400 characters." in str(exc_info.value) with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 500) + validate_description_length("x" * 500) assert "Description cannot exceed 400 characters." in str(exc_info.value) with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 1000) + validate_description_length("x" * 1000) assert "Description cannot exceed 400 characters." in str(exc_info.value) - def test_validation_consistency_with_dataset(self): - """Test that App and Dataset validation functions are consistent""" - from controllers.console.app.app import _validate_description_length as app_validate - from controllers.console.datasets.datasets import _validate_description_length as dataset_validate - from controllers.service_api.dataset.dataset import _validate_description_length as service_dataset_validate - - # Test same valid inputs - valid_desc = "x" * 400 - assert app_validate(valid_desc) == dataset_validate(valid_desc) == service_dataset_validate(valid_desc) - assert app_validate("") == dataset_validate("") == service_dataset_validate("") - assert app_validate(None) == dataset_validate(None) == service_dataset_validate(None) - - # Test same invalid inputs produce same error - invalid_desc = "x" * 401 - - app_error = None - dataset_error = None - service_dataset_error = None - - try: - app_validate(invalid_desc) - except ValueError as e: - app_error = str(e) - - try: - dataset_validate(invalid_desc) - except ValueError as e: - dataset_error = str(e) - - try: - service_dataset_validate(invalid_desc) - except ValueError as e: - service_dataset_error = str(e) - - assert app_error == dataset_error == service_dataset_error - assert app_error == "Description cannot exceed 400 characters." - def test_boundary_values(self): """Test boundary values for description validation""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test exact boundary exactly_400 = "x" * 400 - assert _validate_description_length(exactly_400) == exactly_400 + assert validate_description_length(exactly_400) == exactly_400 # Test just over boundary just_over_400 = "x" * 401 with pytest.raises(ValueError): - _validate_description_length(just_over_400) + validate_description_length(just_over_400) # Test just under boundary just_under_400 = "x" * 399 - assert _validate_description_length(just_under_400) == just_under_400 + assert validate_description_length(just_under_400) == just_under_400 def test_edge_cases(self): """Test edge cases for description validation""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test None input - assert _validate_description_length(None) is None + assert validate_description_length(None) is None # Test empty string - assert _validate_description_length("") == "" + assert validate_description_length("") == "" # Test single character - assert _validate_description_length("a") == "a" + assert validate_description_length("a") == "a" # Test unicode characters unicode_desc = "测试" * 200 # 400 characters in Chinese - assert _validate_description_length(unicode_desc) == unicode_desc + assert validate_description_length(unicode_desc) == unicode_desc # Test unicode over limit unicode_over = "测试" * 201 # 402 characters with pytest.raises(ValueError): - _validate_description_length(unicode_over) + validate_description_length(unicode_over) def test_whitespace_handling(self): """Test how validation handles whitespace""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test description with spaces spaces_400 = " " * 400 - assert _validate_description_length(spaces_400) == spaces_400 + assert validate_description_length(spaces_400) == spaces_400 # Test description with spaces over limit spaces_401 = " " * 401 with pytest.raises(ValueError): - _validate_description_length(spaces_401) + validate_description_length(spaces_401) # Test mixed content mixed_400 = "a" * 200 + " " * 200 - assert _validate_description_length(mixed_400) == mixed_400 + assert validate_description_length(mixed_400) == mixed_400 # Test mixed over limit mixed_401 = "a" * 200 + " " * 201 with pytest.raises(ValueError): - _validate_description_length(mixed_401) + validate_description_length(mixed_401) if __name__ == "__main__": diff --git a/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py b/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py index ca4d452963..04945e57a0 100644 --- a/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py +++ b/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py @@ -9,8 +9,8 @@ from flask.testing import FlaskClient from controllers.console.app import model_config as model_config_api from controllers.console.app import wraps from libs.datetime_utils import naive_utc_now -from models import Account, App, Tenant -from models.account import TenantAccountRole +from models import App, Tenant +from models.account import Account, TenantAccountJoin, TenantAccountRole from models.model import AppMode from services.app_model_config_service import AppModelConfigService @@ -23,30 +23,40 @@ class TestModelConfigResourcePermissions: """Create a mock App model for testing.""" app = App() app.id = str(uuid.uuid4()) - app.mode = AppMode.CHAT.value + app.mode = AppMode.CHAT app.tenant_id = str(uuid.uuid4()) app.status = "normal" app.app_model_config_id = str(uuid.uuid4()) return app @pytest.fixture - def mock_account(self): + def mock_account(self, monkeypatch: pytest.MonkeyPatch): """Create a mock Account for testing.""" - account = Account() + account = Account(name="Test User", email="test@example.com") account.id = str(uuid.uuid4()) - account.name = "Test User" - account.email = "test@example.com" account.last_active_at = naive_utc_now() account.created_at = naive_utc_now() account.updated_at = naive_utc_now() # Create mock tenant - tenant = Tenant() + tenant = Tenant(name="Test Tenant") tenant.id = str(uuid.uuid4()) - tenant.name = "Test Tenant" - account._current_tenant = tenant + mock_session_instance = mock.Mock() + + mock_tenant_join = TenantAccountJoin(role=TenantAccountRole.OWNER) + monkeypatch.setattr(mock_session_instance, "scalar", mock.Mock(return_value=mock_tenant_join)) + + mock_scalars_result = mock.Mock() + mock_scalars_result.one.return_value = tenant + monkeypatch.setattr(mock_session_instance, "scalars", mock.Mock(return_value=mock_scalars_result)) + + mock_session_context = mock.Mock() + mock_session_context.__enter__.return_value = mock_session_instance + monkeypatch.setattr("models.account.Session", lambda _, expire_on_commit: mock_session_context) + + account.current_tenant = tenant return account @pytest.mark.parametrize( diff --git a/api/tests/integration_tests/services/test_workflow_draft_variable_service.py b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py index aeee882750..f3a5ba0d11 100644 --- a/api/tests/integration_tests/services/test_workflow_draft_variable_service.py +++ b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py @@ -542,7 +542,7 @@ class TestWorkflowDraftVariableServiceResetVariable(unittest.TestCase): index=1, node_execution_id=str(uuid.uuid4()), node_id=self._node_id, - node_type=NodeType.LLM.value, + node_type=NodeType.LLM, title="Test Node", inputs='{"input": "test input"}', process_data='{"test_var": "process_value", "other_var": "other_process"}', diff --git a/api/tests/integration_tests/tools/api_tool/test_api_tool.py b/api/tests/integration_tests/tools/api_tool/test_api_tool.py index 7c1a200c8f..e637530265 100644 --- a/api/tests/integration_tests/tools/api_tool/test_api_tool.py +++ b/api/tests/integration_tests/tools/api_tool/test_api_tool.py @@ -36,7 +36,7 @@ def test_api_tool(setup_http_mock): entity=ToolEntity( identity=ToolIdentity(provider="", author="", name="", label=I18nObject(en_US="test tool")), ), - api_bundle=ApiToolBundle(**tool_bundle), + api_bundle=ApiToolBundle.model_validate(tool_bundle), runtime=ToolRuntime(tenant_id="", credentials={"auth_type": "none"}), provider_id="test_tool", ) diff --git a/api/tests/integration_tests/vdb/__mock/baiduvectordb.py b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py index 6d2aff5197..8a43d03a43 100644 --- a/api/tests/integration_tests/vdb/__mock/baiduvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py @@ -1,5 +1,6 @@ import os from collections import UserDict +from typing import Any from unittest.mock import MagicMock import pytest @@ -9,7 +10,6 @@ from pymochow.model.database import Database # type: ignore from pymochow.model.enum import IndexState, IndexType, MetricType, ReadConsistency, TableState # type: ignore from pymochow.model.schema import HNSWParams, VectorIndex # type: ignore from pymochow.model.table import Table # type: ignore -from requests.adapters import HTTPAdapter class AttrDict(UserDict): @@ -21,7 +21,7 @@ class MockBaiduVectorDBClass: def mock_vector_db_client( self, config=None, - adapter: HTTPAdapter | None = None, + adapter: Any | None = None, ): self.conn = MagicMock() self._config = MagicMock() diff --git a/api/tests/integration_tests/vdb/__mock/huaweicloudvectordb.py b/api/tests/integration_tests/vdb/__mock/huaweicloudvectordb.py index 9706c52455..9e24672317 100644 --- a/api/tests/integration_tests/vdb/__mock/huaweicloudvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/huaweicloudvectordb.py @@ -44,25 +44,25 @@ class MockClient: "hits": [ { "_source": { - Field.CONTENT_KEY.value: "abcdef", - Field.VECTOR.value: [1, 2], - Field.METADATA_KEY.value: {}, + Field.CONTENT_KEY: "abcdef", + Field.VECTOR: [1, 2], + Field.METADATA_KEY: {}, }, "_score": 1.0, }, { "_source": { - Field.CONTENT_KEY.value: "123456", - Field.VECTOR.value: [2, 2], - Field.METADATA_KEY.value: {}, + Field.CONTENT_KEY: "123456", + Field.VECTOR: [2, 2], + Field.METADATA_KEY: {}, }, "_score": 0.9, }, { "_source": { - Field.CONTENT_KEY.value: "a1b2c3", - Field.VECTOR.value: [3, 2], - Field.METADATA_KEY.value: {}, + Field.CONTENT_KEY: "a1b2c3", + Field.VECTOR: [3, 2], + Field.METADATA_KEY: {}, }, "_score": 0.8, }, diff --git a/api/tests/integration_tests/vdb/__mock/tcvectordb.py b/api/tests/integration_tests/vdb/__mock/tcvectordb.py index e0b908cece..5130fcfe17 100644 --- a/api/tests/integration_tests/vdb/__mock/tcvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/tcvectordb.py @@ -1,9 +1,8 @@ import os -from typing import Union +from typing import Any, Union import pytest from _pytest.monkeypatch import MonkeyPatch -from requests.adapters import HTTPAdapter from tcvectordb import RPCVectorDBClient # type: ignore from tcvectordb.model import enum from tcvectordb.model.collection import FilterIndexConfig @@ -23,7 +22,7 @@ class MockTcvectordbClass: key="", read_consistency: ReadConsistency = ReadConsistency.EVENTUAL_CONSISTENCY, timeout=10, - adapter: HTTPAdapter | None = None, + adapter: Any | None = None, pool_size: int = 2, proxies: dict | None = None, password: str | None = None, diff --git a/api/tests/integration_tests/vdb/__mock/vikingdb.py b/api/tests/integration_tests/vdb/__mock/vikingdb.py index 3ad72e5550..f351df8d5b 100644 --- a/api/tests/integration_tests/vdb/__mock/vikingdb.py +++ b/api/tests/integration_tests/vdb/__mock/vikingdb.py @@ -40,13 +40,13 @@ class MockVikingDBClass: collection_name=collection_name, description="Collection For Dify", viking_db_service=self._viking_db_service, - primary_key=vdb_Field.PRIMARY_KEY.value, + primary_key=vdb_Field.PRIMARY_KEY, fields=[ - Field(field_name=vdb_Field.PRIMARY_KEY.value, field_type=FieldType.String, is_primary_key=True), - Field(field_name=vdb_Field.METADATA_KEY.value, field_type=FieldType.String), - Field(field_name=vdb_Field.GROUP_KEY.value, field_type=FieldType.String), - Field(field_name=vdb_Field.CONTENT_KEY.value, field_type=FieldType.Text), - Field(field_name=vdb_Field.VECTOR.value, field_type=FieldType.Vector, dim=768), + Field(field_name=vdb_Field.PRIMARY_KEY, field_type=FieldType.String, is_primary_key=True), + Field(field_name=vdb_Field.METADATA_KEY, field_type=FieldType.String), + Field(field_name=vdb_Field.GROUP_KEY, field_type=FieldType.String), + Field(field_name=vdb_Field.CONTENT_KEY, field_type=FieldType.Text), + Field(field_name=vdb_Field.VECTOR, field_type=FieldType.Vector, dim=768), ], indexes=[ Index( @@ -71,7 +71,7 @@ class MockVikingDBClass: return Collection( collection_name=collection_name, description=description, - primary_key=vdb_Field.PRIMARY_KEY.value, + primary_key=vdb_Field.PRIMARY_KEY, viking_db_service=self._viking_db_service, fields=fields, ) @@ -126,11 +126,11 @@ class MockVikingDBClass: def fetch_data(self, id: Union[str, list[str], int, list[int]]): return Data( fields={ - vdb_Field.GROUP_KEY.value: "test_group", - vdb_Field.METADATA_KEY.value: "{}", - vdb_Field.CONTENT_KEY.value: "content", - vdb_Field.PRIMARY_KEY.value: id, - vdb_Field.VECTOR.value: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], + vdb_Field.GROUP_KEY: "test_group", + vdb_Field.METADATA_KEY: "{}", + vdb_Field.CONTENT_KEY: "content", + vdb_Field.PRIMARY_KEY: id, + vdb_Field.VECTOR: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], }, id=id, ) @@ -151,16 +151,16 @@ class MockVikingDBClass: return [ Data( fields={ - vdb_Field.GROUP_KEY.value: "test_group", - vdb_Field.METADATA_KEY.value: '\ + vdb_Field.GROUP_KEY: "test_group", + vdb_Field.METADATA_KEY: '\ {"source": "/var/folders/ml/xxx/xxx.txt", \ "document_id": "test_document_id", \ "dataset_id": "test_dataset_id", \ "doc_id": "test_id", \ "doc_hash": "test_hash"}', - vdb_Field.CONTENT_KEY.value: "content", - vdb_Field.PRIMARY_KEY.value: "test_id", - vdb_Field.VECTOR.value: vector, + vdb_Field.CONTENT_KEY: "content", + vdb_Field.PRIMARY_KEY: "test_id", + vdb_Field.VECTOR: vector, }, id="test_id", score=0.10, @@ -173,16 +173,16 @@ class MockVikingDBClass: return [ Data( fields={ - vdb_Field.GROUP_KEY.value: "test_group", - vdb_Field.METADATA_KEY.value: '\ + vdb_Field.GROUP_KEY: "test_group", + vdb_Field.METADATA_KEY: '\ {"source": "/var/folders/ml/xxx/xxx.txt", \ "document_id": "test_document_id", \ "dataset_id": "test_dataset_id", \ "doc_id": "test_id", \ "doc_hash": "test_hash"}', - vdb_Field.CONTENT_KEY.value: "content", - vdb_Field.PRIMARY_KEY.value: "test_id", - vdb_Field.VECTOR.value: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], + vdb_Field.CONTENT_KEY: "content", + vdb_Field.PRIMARY_KEY: "test_id", + vdb_Field.VECTOR: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], }, id="test_id", score=0.10, diff --git a/api/tests/integration_tests/vdb/opensearch/test_opensearch.py b/api/tests/integration_tests/vdb/opensearch/test_opensearch.py index 2d44dd2924..192c995ce5 100644 --- a/api/tests/integration_tests/vdb/opensearch/test_opensearch.py +++ b/api/tests/integration_tests/vdb/opensearch/test_opensearch.py @@ -129,8 +129,8 @@ class TestOpenSearchVector: "hits": [ { "_source": { - Field.CONTENT_KEY.value: get_example_text(), - Field.METADATA_KEY.value: {"document_id": self.example_doc_id}, + Field.CONTENT_KEY: get_example_text(), + Field.METADATA_KEY: {"document_id": self.example_doc_id}, }, "_score": 1.0, } diff --git a/api/tests/integration_tests/workflow/nodes/test_code.py b/api/tests/integration_tests/workflow/nodes/test_code.py index e2f3a74bf9..b62d8aa544 100644 --- a/api/tests/integration_tests/workflow/nodes/test_code.py +++ b/api/tests/integration_tests/workflow/nodes/test_code.py @@ -1,9 +1,9 @@ import time import uuid -from os import getenv import pytest +from configs import dify_config from core.app.entities.app_invoke_entities import InvokeFrom from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool from core.workflow.enums import WorkflowNodeExecutionStatus @@ -15,7 +15,7 @@ from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock -CODE_MAX_STRING_LENGTH = int(getenv("CODE_MAX_STRING_LENGTH", "10000")) +CODE_MAX_STRING_LENGTH = dify_config.CODE_MAX_STRING_LENGTH def init_code_node(code_config: dict): diff --git a/api/tests/test_containers_integration_tests/conftest.py b/api/tests/test_containers_integration_tests/conftest.py index 243c8d1d62..180ee1c963 100644 --- a/api/tests/test_containers_integration_tests/conftest.py +++ b/api/tests/test_containers_integration_tests/conftest.py @@ -18,6 +18,7 @@ from flask.testing import FlaskClient from sqlalchemy import Engine, text from sqlalchemy.orm import Session from testcontainers.core.container import DockerContainer +from testcontainers.core.network import Network from testcontainers.core.waiting_utils import wait_for_logs from testcontainers.postgres import PostgresContainer from testcontainers.redis import RedisContainer @@ -41,6 +42,7 @@ class DifyTestContainers: def __init__(self): """Initialize container management with default configurations.""" + self.network: Network | None = None self.postgres: PostgresContainer | None = None self.redis: RedisContainer | None = None self.dify_sandbox: DockerContainer | None = None @@ -62,12 +64,18 @@ class DifyTestContainers: logger.info("Starting test containers for Dify integration tests...") + # Create Docker network for container communication + logger.info("Creating Docker network for container communication...") + self.network = Network() + self.network.create() + logger.info("Docker network created successfully with name: %s", self.network.name) + # Start PostgreSQL container for main application database # PostgreSQL is used for storing user data, workflows, and application state logger.info("Initializing PostgreSQL container...") self.postgres = PostgresContainer( image="postgres:14-alpine", - ) + ).with_network(self.network) self.postgres.start() db_host = self.postgres.get_container_host_ip() db_port = self.postgres.get_exposed_port(5432) @@ -137,7 +145,7 @@ class DifyTestContainers: # Start Redis container for caching and session management # Redis is used for storing session data, cache entries, and temporary data logger.info("Initializing Redis container...") - self.redis = RedisContainer(image="redis:6-alpine", port=6379) + self.redis = RedisContainer(image="redis:6-alpine", port=6379).with_network(self.network) self.redis.start() redis_host = self.redis.get_container_host_ip() redis_port = self.redis.get_exposed_port(6379) @@ -153,7 +161,7 @@ class DifyTestContainers: # Start Dify Sandbox container for code execution environment # Dify Sandbox provides a secure environment for executing user code logger.info("Initializing Dify Sandbox container...") - self.dify_sandbox = DockerContainer(image="langgenius/dify-sandbox:latest") + self.dify_sandbox = DockerContainer(image="langgenius/dify-sandbox:latest").with_network(self.network) self.dify_sandbox.with_exposed_ports(8194) self.dify_sandbox.env = { "API_KEY": "test_api_key", @@ -173,22 +181,28 @@ class DifyTestContainers: # Start Dify Plugin Daemon container for plugin management # Dify Plugin Daemon provides plugin lifecycle management and execution logger.info("Initializing Dify Plugin Daemon container...") - self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local") + self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local").with_network( + self.network + ) self.dify_plugin_daemon.with_exposed_ports(5002) + # Get container internal network addresses + postgres_container_name = self.postgres.get_wrapped_container().name + redis_container_name = self.redis.get_wrapped_container().name + self.dify_plugin_daemon.env = { - "DB_HOST": db_host, - "DB_PORT": str(db_port), + "DB_HOST": postgres_container_name, # Use container name for internal network communication + "DB_PORT": "5432", # Use internal port "DB_USERNAME": self.postgres.username, "DB_PASSWORD": self.postgres.password, "DB_DATABASE": "dify_plugin", - "REDIS_HOST": redis_host, - "REDIS_PORT": str(redis_port), + "REDIS_HOST": redis_container_name, # Use container name for internal network communication + "REDIS_PORT": "6379", # Use internal port "REDIS_PASSWORD": "", "SERVER_PORT": "5002", "SERVER_KEY": "test_plugin_daemon_key", "MAX_PLUGIN_PACKAGE_SIZE": "52428800", "PPROF_ENABLED": "false", - "DIFY_INNER_API_URL": f"http://{db_host}:5001", + "DIFY_INNER_API_URL": f"http://{postgres_container_name}:5001", "DIFY_INNER_API_KEY": "test_inner_api_key", "PLUGIN_REMOTE_INSTALLING_HOST": "0.0.0.0", "PLUGIN_REMOTE_INSTALLING_PORT": "5003", @@ -253,6 +267,15 @@ class DifyTestContainers: # Log error but don't fail the test cleanup logger.warning("Failed to stop container %s: %s", container, e) + # Stop and remove the network + if self.network: + try: + logger.info("Removing Docker network...") + self.network.remove() + logger.info("Successfully removed Docker network") + except Exception as e: + logger.warning("Failed to remove Docker network: %s", e) + self._containers_started = False logger.info("All test containers stopped and cleaned up successfully") diff --git a/api/tests/test_containers_integration_tests/services/test_account_service.py b/api/tests/test_containers_integration_tests/services/test_account_service.py index c98406d845..c59fc50f08 100644 --- a/api/tests/test_containers_integration_tests/services/test_account_service.py +++ b/api/tests/test_containers_integration_tests/services/test_account_service.py @@ -8,7 +8,7 @@ from werkzeug.exceptions import Unauthorized from configs import dify_config from controllers.console.error import AccountNotFound, NotAllowedCreateWorkspace -from models.account import AccountStatus, TenantAccountJoin +from models import AccountStatus, TenantAccountJoin from services.account_service import AccountService, RegisterService, TenantService, TokenPair from services.errors.account import ( AccountAlreadyInTenantError, @@ -16,6 +16,7 @@ from services.errors.account import ( AccountPasswordError, AccountRegisterError, CurrentPasswordIncorrectError, + TenantNotFoundError, ) from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkspacesLimitExceededError @@ -63,7 +64,7 @@ class TestAccountService: password=password, ) assert account.email == email - assert account.status == AccountStatus.ACTIVE.value + assert account.status == AccountStatus.ACTIVE # Login with correct password logged_in = AccountService.authenticate(email, password) @@ -184,7 +185,7 @@ class TestAccountService: ) # Ban the account - account.status = AccountStatus.BANNED.value + account.status = AccountStatus.BANNED from extensions.ext_database import db db.session.commit() @@ -268,14 +269,14 @@ class TestAccountService: interface_language="en-US", password=password, ) - account.status = AccountStatus.PENDING.value + account.status = AccountStatus.PENDING from extensions.ext_database import db db.session.commit() # Authenticate should activate the account authenticated_account = AccountService.authenticate(email, password) - assert authenticated_account.status == AccountStatus.ACTIVE.value + assert authenticated_account.status == AccountStatus.ACTIVE assert authenticated_account.initialized_at is not None def test_update_account_password_success(self, db_session_with_containers, mock_external_service_dependencies): @@ -469,7 +470,7 @@ class TestAccountService: # Verify integration was created from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider="new-google").first() assert integration is not None @@ -504,7 +505,7 @@ class TestAccountService: # Verify integration was updated from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = ( db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider="exists-google").first() @@ -538,7 +539,7 @@ class TestAccountService: from extensions.ext_database import db db.session.refresh(account) - assert account.status == AccountStatus.CLOSED.value + assert account.status == AccountStatus.CLOSED def test_update_account_fields(self, db_session_with_containers, mock_external_service_dependencies): """ @@ -678,7 +679,7 @@ class TestAccountService: interface_language="en-US", password=password, ) - account.status = AccountStatus.PENDING.value + account.status = AccountStatus.PENDING from extensions.ext_database import db db.session.commit() @@ -687,7 +688,7 @@ class TestAccountService: token_pair = AccountService.login(account) db.session.refresh(account) - assert account.status == AccountStatus.ACTIVE.value + assert account.status == AccountStatus.ACTIVE def test_logout(self, db_session_with_containers, mock_external_service_dependencies): """ @@ -859,7 +860,7 @@ class TestAccountService: ) # Ban the account - account.status = AccountStatus.BANNED.value + account.status = AccountStatus.BANNED from extensions.ext_database import db db.session.commit() @@ -989,7 +990,7 @@ class TestAccountService: ) # Ban the account - account.status = AccountStatus.BANNED.value + account.status = AccountStatus.BANNED from extensions.ext_database import db db.session.commit() @@ -1414,7 +1415,7 @@ class TestTenantService: ) # Try to get current tenant (should fail) - with pytest.raises(AttributeError): + with pytest.raises((AttributeError, TenantNotFoundError)): TenantService.get_current_tenant_by_account(account) def test_switch_tenant_success(self, db_session_with_containers, mock_external_service_dependencies): @@ -2302,7 +2303,7 @@ class TestRegisterService: # Verify account was created from extensions.ext_database import db - from models.account import Account + from models import Account from models.model import DifySetup account = db.session.query(Account).filter_by(email=admin_email).first() @@ -2351,7 +2352,7 @@ class TestRegisterService: # Verify no entities were created (rollback worked) from extensions.ext_database import db - from models.account import Account, Tenant, TenantAccountJoin + from models import Account, Tenant, TenantAccountJoin from models.model import DifySetup account = db.session.query(Account).filter_by(email=admin_email).first() @@ -2445,7 +2446,7 @@ class TestRegisterService: # Verify OAuth integration was created from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider=provider).first() assert integration is not None @@ -2471,7 +2472,7 @@ class TestRegisterService: mock_external_service_dependencies["billing_service"].is_email_in_freeze.return_value = False # Execute registration with pending status - from models.account import AccountStatus + from models import AccountStatus account = RegisterService.register( email=email, @@ -2660,7 +2661,7 @@ class TestRegisterService: # Verify new account was created with pending status from extensions.ext_database import db - from models.account import Account, TenantAccountJoin + from models import Account, TenantAccountJoin new_account = db.session.query(Account).filter_by(email=new_member_email).first() assert new_account is not None diff --git a/api/tests/test_containers_integration_tests/services/test_agent_service.py b/api/tests/test_containers_integration_tests/services/test_agent_service.py index c572ddc925..ca513319b2 100644 --- a/api/tests/test_containers_integration_tests/services/test_agent_service.py +++ b/api/tests/test_containers_integration_tests/services/test_agent_service.py @@ -5,7 +5,7 @@ import pytest from faker import Faker from core.plugin.impl.exc import PluginDaemonClientSideError -from models.account import Account +from models import Account from models.model import AppModelConfig, Conversation, EndUser, Message, MessageAgentThought from services.account_service import AccountService, TenantService from services.agent_service import AgentService diff --git a/api/tests/test_containers_integration_tests/services/test_annotation_service.py b/api/tests/test_containers_integration_tests/services/test_annotation_service.py index 3cb7424df8..2b03ec1c26 100644 --- a/api/tests/test_containers_integration_tests/services/test_annotation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_annotation_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from werkzeug.exceptions import NotFound -from models.account import Account +from models import Account from models.model import MessageAnnotation from services.annotation_service import AppAnnotationService from services.app_service import AppService @@ -25,9 +25,7 @@ class TestAnnotationService: patch("services.annotation_service.enable_annotation_reply_task") as mock_enable_task, patch("services.annotation_service.disable_annotation_reply_task") as mock_disable_task, patch("services.annotation_service.batch_import_annotations_task") as mock_batch_import_task, - patch( - "services.annotation_service.current_user", create_autospec(Account, instance=True) - ) as mock_current_user, + patch("services.annotation_service.current_account_with_tenant") as mock_current_account_with_tenant, ): # Setup default mock returns mock_account_feature_service.get_features.return_value.billing.enabled = False @@ -38,6 +36,9 @@ class TestAnnotationService: mock_disable_task.delay.return_value = None mock_batch_import_task.delay.return_value = None + # Create mock user that will be returned by current_account_with_tenant + mock_user = create_autospec(Account, instance=True) + yield { "account_feature_service": mock_account_feature_service, "feature_service": mock_feature_service, @@ -47,7 +48,8 @@ class TestAnnotationService: "enable_task": mock_enable_task, "disable_task": mock_disable_task, "batch_import_task": mock_batch_import_task, - "current_user": mock_current_user, + "current_account_with_tenant": mock_current_account_with_tenant, + "current_user": mock_user, } def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies): @@ -107,6 +109,11 @@ class TestAnnotationService: """ mock_external_service_dependencies["current_user"].id = account_id mock_external_service_dependencies["current_user"].current_tenant_id = tenant_id + # Configure current_account_with_tenant to return (user, tenant_id) + mock_external_service_dependencies["current_account_with_tenant"].return_value = ( + mock_external_service_dependencies["current_user"], + tenant_id, + ) def _create_test_conversation(self, app, account, fake): """ diff --git a/api/tests/test_containers_integration_tests/services/test_app_generate_service.py b/api/tests/test_containers_integration_tests/services/test_app_generate_service.py index ca0f309fd4..9386687a04 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_generate_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_generate_service.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from openai._exceptions import RateLimitError from core.app.entities.app_invoke_entities import InvokeFrom from models.model import EndUser @@ -484,36 +483,6 @@ class TestAppGenerateService: # Verify error message assert "Rate limit exceeded" in str(exc_info.value) - def test_generate_with_rate_limit_error_from_openai( - self, db_session_with_containers, mock_external_service_dependencies - ): - """ - Test generation when OpenAI rate limit error occurs. - """ - fake = Faker() - app, account = self._create_test_app_and_account( - db_session_with_containers, mock_external_service_dependencies, mode="completion" - ) - - # Setup completion generator to raise RateLimitError - mock_response = MagicMock() - mock_response.request = MagicMock() - mock_external_service_dependencies["completion_generator"].return_value.generate.side_effect = RateLimitError( - "Rate limit exceeded", response=mock_response, body=None - ) - - # Setup test arguments - args = {"inputs": {"query": fake.text(max_nb_chars=50)}, "response_mode": "streaming"} - - # Execute the method under test and expect rate limit error - with pytest.raises(InvokeRateLimitError) as exc_info: - AppGenerateService.generate( - app_model=app, user=account, args=args, invoke_from=InvokeFrom.SERVICE_API, streaming=True - ) - - # Verify error message - assert "Rate limit exceeded" in str(exc_info.value) - def test_generate_with_invalid_app_mode(self, db_session_with_containers, mock_external_service_dependencies): """ Test generation with invalid app mode. diff --git a/api/tests/test_containers_integration_tests/services/test_app_service.py b/api/tests/test_containers_integration_tests/services/test_app_service.py index cbbbbddb21..e53392bcef 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from constants.model_template import default_app_templates -from models.account import Account +from models import Account from models.model import App, Site from services.account_service import AccountService, TenantService from services.app_service import AppService diff --git a/api/tests/test_containers_integration_tests/services/test_file_service.py b/api/tests/test_containers_integration_tests/services/test_file_service.py index 5598c5bc0c..4c94e42f3e 100644 --- a/api/tests/test_containers_integration_tests/services/test_file_service.py +++ b/api/tests/test_containers_integration_tests/services/test_file_service.py @@ -8,7 +8,7 @@ from sqlalchemy import Engine from werkzeug.exceptions import NotFound from configs import dify_config -from models.account import Account, Tenant +from models import Account, Tenant from models.enums import CreatorUserRole from models.model import EndUser, UploadFile from services.errors.file import FileTooLargeError, UnsupportedFileTypeError @@ -86,7 +86,7 @@ class TestFileService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -187,7 +187,7 @@ class TestFileService: assert upload_file.extension == "pdf" assert upload_file.mime_type == mimetype assert upload_file.created_by == account.id - assert upload_file.created_by_role == CreatorUserRole.ACCOUNT.value + assert upload_file.created_by_role == CreatorUserRole.ACCOUNT assert upload_file.used is False assert upload_file.hash == hashlib.sha3_256(content).hexdigest() @@ -216,7 +216,7 @@ class TestFileService: assert upload_file is not None assert upload_file.created_by == end_user.id - assert upload_file.created_by_role == CreatorUserRole.END_USER.value + assert upload_file.created_by_role == CreatorUserRole.END_USER def test_upload_file_with_datasets_source( self, db_session_with_containers, engine, mock_external_service_dependencies diff --git a/api/tests/test_containers_integration_tests/services/test_metadata_service.py b/api/tests/test_containers_integration_tests/services/test_metadata_service.py index d0f7e945f1..c8ced3f3a5 100644 --- a/api/tests/test_containers_integration_tests/services/test_metadata_service.py +++ b/api/tests/test_containers_integration_tests/services/test_metadata_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from core.rag.index_processor.constant.built_in_field import BuiltInField -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding, Document from services.entities.knowledge_entities.knowledge_entities import MetadataArgs from services.metadata_service import MetadataService @@ -17,9 +17,7 @@ class TestMetadataService: def mock_external_service_dependencies(self): """Mock setup for external service dependencies.""" with ( - patch( - "services.metadata_service.current_user", create_autospec(Account, instance=True) - ) as mock_current_user, + patch("libs.login.current_user", create_autospec(Account, instance=True)) as mock_current_user, patch("services.metadata_service.redis_client") as mock_redis_client, patch("services.dataset_service.DocumentService") as mock_document_service, ): @@ -72,7 +70,7 @@ class TestMetadataService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py b/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py index 66527dd506..8a72331425 100644 --- a/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py +++ b/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py @@ -103,7 +103,7 @@ class TestModelLoadBalancingService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py index 2196da8b3e..8cb3572c47 100644 --- a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py +++ b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py @@ -5,7 +5,7 @@ from faker import Faker from core.entities.model_entities import ModelStatus from core.model_runtime.entities.model_entities import FetchFrom, ModelType -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.provider import Provider, ProviderModel, ProviderModelSetting, ProviderType from services.model_provider_service import ModelProviderService @@ -67,7 +67,7 @@ class TestModelProviderService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/test_tag_service.py b/api/tests/test_containers_integration_tests/services/test_tag_service.py index 04cff397b2..6732b8d558 100644 --- a/api/tests/test_containers_integration_tests/services/test_tag_service.py +++ b/api/tests/test_containers_integration_tests/services/test_tag_service.py @@ -5,7 +5,7 @@ from faker import Faker from sqlalchemy import select from werkzeug.exceptions import NotFound -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset from models.model import App, Tag, TagBinding from services.tag_service import TagService @@ -66,7 +66,7 @@ class TestTagService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py b/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py index c9ace46c55..bbbf48ede9 100644 --- a/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py @@ -5,7 +5,7 @@ from faker import Faker from sqlalchemy import select from core.app.entities.app_invoke_entities import InvokeFrom -from models.account import Account +from models import Account from models.model import Conversation, EndUser from models.web import PinnedConversation from services.account_service import AccountService, TenantService @@ -144,7 +144,7 @@ class TestWebConversationService: system_instruction=fake.text(max_nb_chars=300), system_instruction_tokens=50, status="normal", - invoke_from=InvokeFrom.WEB_APP.value, + invoke_from=InvokeFrom.WEB_APP, from_source="console" if isinstance(user, Account) else "api", from_end_user_id=user.id if isinstance(user, EndUser) else None, from_account_id=user.id if isinstance(user, Account) else None, diff --git a/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py b/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py index 316cfe1674..9fc16d9eb7 100644 --- a/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py +++ b/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py @@ -7,7 +7,7 @@ from faker import Faker from werkzeug.exceptions import NotFound, Unauthorized from libs.password import hash_password -from models.account import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole from models.model import App, Site from services.errors.account import AccountLoginError, AccountNotFoundError, AccountPasswordError from services.webapp_auth_service import WebAppAuthService, WebAppAuthType @@ -87,7 +87,7 @@ class TestWebAppAuthService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -150,7 +150,7 @@ class TestWebAppAuthService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -232,7 +232,7 @@ class TestWebAppAuthService: assert result.id == account.id assert result.email == account.email assert result.name == account.name - assert result.status == AccountStatus.ACTIVE.value + assert result.status == AccountStatus.ACTIVE # Verify database state from extensions.ext_database import db @@ -280,7 +280,7 @@ class TestWebAppAuthService: email=fake.email(), name=fake.name(), interface_language="en-US", - status=AccountStatus.BANNED.value, + status=AccountStatus.BANNED, ) # Hash password @@ -411,7 +411,7 @@ class TestWebAppAuthService: assert result.id == account.id assert result.email == account.email assert result.name == account.name - assert result.status == AccountStatus.ACTIVE.value + assert result.status == AccountStatus.ACTIVE # Verify database state from extensions.ext_database import db @@ -455,7 +455,7 @@ class TestWebAppAuthService: email=unique_email, name=fake.name(), interface_language="en-US", - status=AccountStatus.BANNED.value, + status=AccountStatus.BANNED, ) from extensions.ext_database import db diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py index 2e18184aea..66bd4d3cd9 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py @@ -199,7 +199,7 @@ class TestWorkflowAppService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), finished_at=datetime.now(UTC), @@ -215,7 +215,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), ) @@ -356,7 +356,7 @@ class TestWorkflowAppService: elapsed_time=1.0 + i, total_tokens=100 + i * 10, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status != "running" else None, @@ -371,7 +371,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -464,7 +464,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=timestamp, finished_at=timestamp + timedelta(minutes=1), @@ -479,7 +479,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=timestamp, ) @@ -571,7 +571,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1), @@ -586,7 +586,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -701,7 +701,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1), @@ -716,7 +716,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -743,7 +743,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.END_USER.value, + created_by_role=CreatorUserRole.END_USER, created_by=end_user.id, created_at=datetime.now(UTC) + timedelta(minutes=i + 10), finished_at=datetime.now(UTC) + timedelta(minutes=i + 11), @@ -758,7 +758,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="web-app", - created_by_role=CreatorUserRole.END_USER.value, + created_by_role=CreatorUserRole.END_USER, created_by=end_user.id, created_at=datetime.now(UTC) + timedelta(minutes=i + 10), ) @@ -780,14 +780,39 @@ class TestWorkflowAppService: limit=20, ) assert result_session_filter["total"] == 2 - assert all(log.created_by_role == CreatorUserRole.END_USER.value for log in result_session_filter["data"]) + assert all(log.created_by_role == CreatorUserRole.END_USER for log in result_session_filter["data"]) # Test filtering by account email result_account_filter = service.get_paginate_workflow_app_logs( session=db_session_with_containers, app_model=app, created_by_account=account.email, page=1, limit=20 ) assert result_account_filter["total"] == 3 - assert all(log.created_by_role == CreatorUserRole.ACCOUNT.value for log in result_account_filter["data"]) + assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_account_filter["data"]) + + # Test filtering by changed account email + original_email = account.email + new_email = "changed@example.com" + account.email = new_email + db_session_with_containers.commit() + + assert account.email == new_email + + # Results for new email, is expected to be the same as the original email + result_with_new_email = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, created_by_account=new_email, page=1, limit=20 + ) + assert result_with_new_email["total"] == 3 + assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_with_new_email["data"]) + + # Old email unbound, is unexpected input, should raise ValueError + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, created_by_account=original_email, page=1, limit=20 + ) + assert "Account not found" in str(exc_info.value) + + account.email = original_email + db_session_with_containers.commit() # Test filtering by non-existent session ID result_no_session = service.get_paginate_workflow_app_logs( @@ -799,15 +824,16 @@ class TestWorkflowAppService: ) assert result_no_session["total"] == 0 - # Test filtering by non-existent account email - result_no_account = service.get_paginate_workflow_app_logs( - session=db_session_with_containers, - app_model=app, - created_by_account="nonexistent@example.com", - page=1, - limit=20, - ) - assert result_no_account["total"] == 0 + # Test filtering by non-existent account email, is unexpected input, should raise ValueError + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account="nonexistent@example.com", + page=1, + limit=20, + ) + assert "Account not found" in str(exc_info.value) def test_get_paginate_workflow_app_logs_with_uuid_keyword_search( self, db_session_with_containers, mock_external_service_dependencies @@ -853,7 +879,7 @@ class TestWorkflowAppService: elapsed_time=1.0, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), finished_at=datetime.now(UTC) + timedelta(minutes=1), @@ -869,7 +895,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), ) @@ -943,7 +969,7 @@ class TestWorkflowAppService: elapsed_time=0.0, # Edge case: 0 elapsed time total_tokens=0, # Edge case: 0 tokens total_steps=0, # Edge case: 0 steps - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), finished_at=datetime.now(UTC), @@ -959,7 +985,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), ) @@ -1057,15 +1083,15 @@ class TestWorkflowAppService: assert len(result_no_session["data"]) == 0 # Test with account email that doesn't exist - result_no_account = service.get_paginate_workflow_app_logs( - session=db_session_with_containers, - app_model=app, - created_by_account="nonexistent@example.com", - page=1, - limit=20, - ) - assert result_no_account["total"] == 0 - assert len(result_no_account["data"]) == 0 + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account="nonexistent@example.com", + page=1, + limit=20, + ) + assert "Account not found" in str(exc_info.value) def test_get_paginate_workflow_app_logs_with_complex_query_combinations( self, db_session_with_containers, mock_external_service_dependencies @@ -1098,7 +1124,7 @@ class TestWorkflowAppService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status == "succeeded" else None, @@ -1113,7 +1139,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -1198,7 +1224,7 @@ class TestWorkflowAppService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status != "running" else None, @@ -1213,7 +1239,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i), ) @@ -1300,7 +1326,7 @@ class TestWorkflowAppService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j), finished_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j + 1), @@ -1315,7 +1341,7 @@ class TestWorkflowAppService: workflow_id=workflow.id, workflow_run_id=workflow_run.id, created_from="service-api", - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j), ) diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py index 4cb21ef6bd..23c4eeb82f 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py @@ -130,7 +130,7 @@ class TestWorkflowRunService: elapsed_time=1.5, total_tokens=100, total_steps=3, - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=created_time, finished_at=created_time, @@ -167,7 +167,7 @@ class TestWorkflowRunService: inputs={}, status="normal", mode="chat", - from_source=CreatorUserRole.ACCOUNT.value, + from_source=CreatorUserRole.ACCOUNT, from_account_id=account.id, ) db.session.add(conversation) @@ -188,7 +188,7 @@ class TestWorkflowRunService: message.answer_price_unit = 0.001 message.currency = "USD" message.status = "normal" - message.from_source = CreatorUserRole.ACCOUNT.value + message.from_source = CreatorUserRole.ACCOUNT message.from_account_id = account.id message.workflow_run_id = workflow_run.id message.inputs = {"input": "test input"} @@ -458,7 +458,7 @@ class TestWorkflowRunService: status="succeeded", elapsed_time=0.5, execution_metadata=json.dumps({"tokens": 50}), - created_by_role=CreatorUserRole.ACCOUNT.value, + created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, created_at=datetime.now(UTC), ) @@ -689,7 +689,7 @@ class TestWorkflowRunService: status="succeeded", elapsed_time=0.5, execution_metadata=json.dumps({"tokens": 50}), - created_by_role=CreatorUserRole.END_USER.value, + created_by_role=CreatorUserRole.END_USER, created_by=end_user.id, created_at=datetime.now(UTC), ) @@ -710,4 +710,4 @@ class TestWorkflowRunService: assert node_exec.app_id == app.id assert node_exec.workflow_run_id == workflow_run.id assert node_exec.created_by == end_user.id - assert node_exec.created_by_role == CreatorUserRole.END_USER.value + assert node_exec.created_by_role == CreatorUserRole.END_USER diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_service.py index 60150667ed..4741eba1f5 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_service.py @@ -44,27 +44,26 @@ class TestWorkflowService: Account: Created test account instance """ fake = fake or Faker() - account = Account() - account.id = fake.uuid4() - account.email = fake.email() - account.name = fake.name() - account.avatar_url = fake.url() - account.tenant_id = fake.uuid4() - account.status = "active" - account.type = "normal" - account.role = "owner" - account.interface_language = "en-US" # Set interface language for Site creation + account = Account( + email=fake.email(), + name=fake.name(), + avatar=fake.url(), + status="active", + interface_language="en-US", # Set interface language for Site creation + ) account.created_at = fake.date_time_this_year() + account.id = fake.uuid4() account.updated_at = account.created_at # Create a tenant for the account from models.account import Tenant - tenant = Tenant() - tenant.id = account.tenant_id - tenant.name = f"Test Tenant {fake.company()}" - tenant.plan = "basic" - tenant.status = "active" + tenant = Tenant( + name=f"Test Tenant {fake.company()}", + plan="basic", + status="active", + ) + tenant.id = account.current_tenant_id tenant.created_at = fake.date_time_this_year() tenant.updated_at = tenant.created_at @@ -91,20 +90,21 @@ class TestWorkflowService: App: Created test app instance """ fake = fake or Faker() - app = App() - app.id = fake.uuid4() - app.tenant_id = fake.uuid4() - app.name = fake.company() - app.description = fake.text() - app.mode = AppMode.WORKFLOW - app.icon_type = "emoji" - app.icon = "🤖" - app.icon_background = "#FFEAD5" - app.enable_site = True - app.enable_api = True - app.created_by = fake.uuid4() + app = App( + id=fake.uuid4(), + tenant_id=fake.uuid4(), + name=fake.company(), + description=fake.text(), + mode=AppMode.WORKFLOW, + icon_type="emoji", + icon="🤖", + icon_background="#FFEAD5", + enable_site=True, + enable_api=True, + created_by=fake.uuid4(), + workflow_id=None, # Will be set when workflow is created + ) app.updated_by = app.created_by - app.workflow_id = None # Will be set when workflow is created from extensions.ext_database import db @@ -126,19 +126,20 @@ class TestWorkflowService: Workflow: Created test workflow instance """ fake = fake or Faker() - workflow = Workflow() - workflow.id = fake.uuid4() - workflow.tenant_id = app.tenant_id - workflow.app_id = app.id - workflow.type = WorkflowType.WORKFLOW.value - workflow.version = Workflow.VERSION_DRAFT - workflow.graph = json.dumps({"nodes": [], "edges": []}) - workflow.features = json.dumps({"features": []}) - # unique_hash is a computed property based on graph and features - workflow.created_by = account.id - workflow.updated_by = account.id - workflow.environment_variables = [] - workflow.conversation_variables = [] + workflow = Workflow( + id=fake.uuid4(), + tenant_id=app.tenant_id, + app_id=app.id, + type=WorkflowType.WORKFLOW, + version=Workflow.VERSION_DRAFT, + graph=json.dumps({"nodes": [], "edges": []}), + features=json.dumps({"features": []}), + # unique_hash is a computed property based on graph and features + created_by=account.id, + updated_by=account.id, + environment_variables=[], + conversation_variables=[], + ) from extensions.ext_database import db @@ -175,7 +176,7 @@ class TestWorkflowService: node_execution.node_type = "test_node" node_execution.title = "Test Node" # Required field node_execution.status = "succeeded" - node_execution.created_by_role = CreatorUserRole.ACCOUNT.value # Required field + node_execution.created_by_role = CreatorUserRole.ACCOUNT # Required field node_execution.created_by = account.id # Required field node_execution.created_at = fake.date_time_this_year() diff --git a/api/tests/test_containers_integration_tests/services/test_workspace_service.py b/api/tests/test_containers_integration_tests/services/test_workspace_service.py index 3fd439256d..4249642bc9 100644 --- a/api/tests/test_containers_integration_tests/services/test_workspace_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workspace_service.py @@ -3,7 +3,7 @@ from unittest.mock import patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from services.workspace_service import WorkspaceService @@ -69,7 +69,7 @@ class TestWorkspaceService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -111,7 +111,7 @@ class TestWorkspaceService: assert result["name"] == tenant.name assert result["plan"] == tenant.plan assert result["status"] == tenant.status - assert result["role"] == TenantAccountRole.OWNER.value + assert result["role"] == TenantAccountRole.OWNER assert result["created_at"] == tenant.created_at assert result["trial_end_reason"] is None @@ -159,7 +159,7 @@ class TestWorkspaceService: assert result["name"] == tenant.name assert result["plan"] == tenant.plan assert result["status"] == tenant.status - assert result["role"] == TenantAccountRole.OWNER.value + assert result["role"] == TenantAccountRole.OWNER assert result["created_at"] == tenant.created_at assert result["trial_end_reason"] is None @@ -194,7 +194,7 @@ class TestWorkspaceService: from extensions.ext_database import db join = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() - join.role = TenantAccountRole.NORMAL.value + join.role = TenantAccountRole.NORMAL db.session.commit() # Setup mocks for feature service @@ -212,7 +212,7 @@ class TestWorkspaceService: assert result["name"] == tenant.name assert result["plan"] == tenant.plan assert result["status"] == tenant.status - assert result["role"] == TenantAccountRole.NORMAL.value + assert result["role"] == TenantAccountRole.NORMAL assert result["created_at"] == tenant.created_at assert result["trial_end_reason"] is None @@ -245,7 +245,7 @@ class TestWorkspaceService: from extensions.ext_database import db join = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() - join.role = TenantAccountRole.ADMIN.value + join.role = TenantAccountRole.ADMIN db.session.commit() # Setup mocks for feature service and tenant service @@ -260,7 +260,7 @@ class TestWorkspaceService: # Assert: Verify the expected outcomes assert result is not None - assert result["role"] == TenantAccountRole.ADMIN.value + assert result["role"] == TenantAccountRole.ADMIN # Verify custom config is included for admin users assert "custom_config" in result @@ -378,7 +378,7 @@ class TestWorkspaceService: from extensions.ext_database import db join = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() - join.role = TenantAccountRole.EDITOR.value + join.role = TenantAccountRole.EDITOR db.session.commit() # Setup mocks for feature service and tenant service @@ -394,7 +394,7 @@ class TestWorkspaceService: # Assert: Verify the expected outcomes assert result is not None - assert result["role"] == TenantAccountRole.EDITOR.value + assert result["role"] == TenantAccountRole.EDITOR # Verify custom config is not included for editor users without admin privileges assert "custom_config" not in result @@ -425,7 +425,7 @@ class TestWorkspaceService: from extensions.ext_database import db join = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() - join.role = TenantAccountRole.DATASET_OPERATOR.value + join.role = TenantAccountRole.DATASET_OPERATOR db.session.commit() # Setup mocks for feature service and tenant service @@ -441,7 +441,7 @@ class TestWorkspaceService: # Assert: Verify the expected outcomes assert result is not None - assert result["role"] == TenantAccountRole.DATASET_OPERATOR.value + assert result["role"] == TenantAccountRole.DATASET_OPERATOR # Verify custom config is not included for dataset operators without admin privileges assert "custom_config" not in result diff --git a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py index a412bdccf8..0871467a05 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py @@ -3,7 +3,7 @@ from unittest.mock import patch import pytest from faker import Faker -from models.account import Account, Tenant +from models import Account, Tenant from models.tools import ApiToolProvider from services.tools.api_tools_manage_service import ApiToolManageService @@ -72,7 +72,7 @@ class TestApiToolManageService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py index dd22dcbfd1..71d55c3ade 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from core.tools.entities.tool_entities import ToolProviderType -from models.account import Account, Tenant +from models import Account, Tenant from models.tools import MCPToolProvider from services.tools.mcp_tools_manage_service import UNCHANGED_SERVER_URL_PLACEHOLDER, MCPToolManageService @@ -72,7 +72,7 @@ class TestMCPToolManageService: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py b/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py index 827f9c010e..ae0c7b7a6b 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py @@ -168,7 +168,7 @@ class TestToolTransformService: """ # Arrange: Setup test data fake = Faker() - provider_type = ToolProviderType.BUILT_IN.value + provider_type = ToolProviderType.BUILT_IN provider_name = fake.company() icon = "🔧" @@ -206,7 +206,7 @@ class TestToolTransformService: """ # Arrange: Setup test data fake = Faker() - provider_type = ToolProviderType.API.value + provider_type = ToolProviderType.API provider_name = fake.company() icon = '{"background": "#FF6B6B", "content": "🔧"}' @@ -231,7 +231,7 @@ class TestToolTransformService: """ # Arrange: Setup test data with invalid JSON fake = Faker() - provider_type = ToolProviderType.API.value + provider_type = ToolProviderType.API provider_name = fake.company() icon = '{"invalid": json}' @@ -257,7 +257,7 @@ class TestToolTransformService: """ # Arrange: Setup test data fake = Faker() - provider_type = ToolProviderType.WORKFLOW.value + provider_type = ToolProviderType.WORKFLOW provider_name = fake.company() icon = {"background": "#FF6B6B", "content": "🔧"} @@ -282,7 +282,7 @@ class TestToolTransformService: """ # Arrange: Setup test data fake = Faker() - provider_type = ToolProviderType.MCP.value + provider_type = ToolProviderType.MCP provider_name = fake.company() icon = {"background": "#FF6B6B", "content": "🔧"} @@ -329,7 +329,7 @@ class TestToolTransformService: # Arrange: Setup test data fake = Faker() tenant_id = fake.uuid4() - provider = {"type": ToolProviderType.BUILT_IN.value, "name": fake.company(), "icon": "🔧"} + provider = {"type": ToolProviderType.BUILT_IN, "name": fake.company(), "icon": "🔧"} # Act: Execute the method under test ToolTransformService.repack_provider(tenant_id, provider) diff --git a/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py b/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py index 18ab4bb73c..2c5e719a58 100644 --- a/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py +++ b/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py @@ -15,7 +15,7 @@ from core.app.app_config.entities import ( ) from core.model_runtime.entities.llm_entities import LLMMode from core.prompt.utils.prompt_template_parser import PromptTemplateParser -from models.account import Account, Tenant +from models import Account, Tenant from models.api_based_extension import APIBasedExtension from models.model import App, AppMode, AppModelConfig from models.workflow import Workflow @@ -66,7 +66,7 @@ class TestWorkflowConverter: mock_config.model = ModelConfigEntity( provider="openai", model="gpt-4", - mode=LLMMode.CHAT.value, + mode=LLMMode.CHAT, parameters={}, stop=[], ) @@ -120,7 +120,7 @@ class TestWorkflowConverter: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -150,7 +150,7 @@ class TestWorkflowConverter: app = App( tenant_id=tenant.id, name=fake.company(), - mode=AppMode.CHAT.value, + mode=AppMode.CHAT, icon_type="emoji", icon="🤖", icon_background="#FF6B6B", @@ -218,7 +218,7 @@ class TestWorkflowConverter: # Assert: Verify the expected outcomes assert new_app is not None assert new_app.name == "Test Workflow App" - assert new_app.mode == AppMode.ADVANCED_CHAT.value + assert new_app.mode == AppMode.ADVANCED_CHAT assert new_app.icon_type == "emoji" assert new_app.icon == "🚀" assert new_app.icon_background == "#4CAF50" @@ -257,7 +257,7 @@ class TestWorkflowConverter: app = App( tenant_id=tenant.id, name=fake.company(), - mode=AppMode.CHAT.value, + mode=AppMode.CHAT, icon_type="emoji", icon="🤖", icon_background="#FF6B6B", @@ -522,7 +522,7 @@ class TestWorkflowConverter: model_config = ModelConfigEntity( provider="openai", model="gpt-4", - mode=LLMMode.CHAT.value, + mode=LLMMode.CHAT, parameters={"temperature": 0.7}, stop=[], ) diff --git a/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py index 4600f2addb..68e485107c 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py @@ -6,7 +6,7 @@ from faker import Faker from core.rag.index_processor.constant.index_type import IndexType from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, DatasetAutoDisableLog, Document, DocumentSegment from tasks.add_document_to_index_task import add_document_to_index_task @@ -63,7 +63,7 @@ class TestAddDocumentToIndexTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py b/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py index 3d17a8ac9d..f94c5b19e6 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py @@ -14,7 +14,7 @@ from faker import Faker from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from models.model import UploadFile from tasks.batch_clean_document_task import batch_clean_document_task @@ -84,7 +84,7 @@ class TestBatchCleanDocumentTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py index fcae93c669..1b844d6357 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py @@ -18,7 +18,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from models.enums import CreatorUserRole from models.model import UploadFile @@ -112,7 +112,7 @@ class TestBatchCreateSegmentToIndexTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py index e0c2da63b9..45eb9d4f78 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py @@ -17,7 +17,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import ( AppDatasetJoin, Dataset, @@ -784,133 +784,6 @@ class TestCleanDatasetTask: print(f"Total cleanup time: {cleanup_duration:.3f} seconds") print(f"Average time per document: {cleanup_duration / len(documents):.3f} seconds") - def test_clean_dataset_task_concurrent_cleanup_scenarios( - self, db_session_with_containers, mock_external_service_dependencies - ): - """ - Test dataset cleanup with concurrent cleanup scenarios and race conditions. - - This test verifies that the task can properly: - 1. Handle multiple cleanup operations on the same dataset - 2. Prevent data corruption during concurrent access - 3. Maintain data consistency across multiple cleanup attempts - 4. Handle race conditions gracefully - 5. Ensure idempotent cleanup operations - """ - # Create test data - account, tenant = self._create_test_account_and_tenant(db_session_with_containers) - dataset = self._create_test_dataset(db_session_with_containers, account, tenant) - document = self._create_test_document(db_session_with_containers, account, tenant, dataset) - segment = self._create_test_segment(db_session_with_containers, account, tenant, dataset, document) - upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant) - - # Update document with file reference - import json - - document.data_source_info = json.dumps({"upload_file_id": upload_file.id}) - from extensions.ext_database import db - - db.session.commit() - - # Save IDs for verification - dataset_id = dataset.id - tenant_id = tenant.id - upload_file_id = upload_file.id - - # Mock storage to simulate slow operations - mock_storage = mock_external_service_dependencies["storage"] - original_delete = mock_storage.delete - - def slow_delete(key): - import time - - time.sleep(0.1) # Simulate slow storage operation - return original_delete(key) - - mock_storage.delete.side_effect = slow_delete - - # Execute multiple cleanup operations concurrently - import threading - - cleanup_results = [] - cleanup_errors = [] - - def run_cleanup(): - try: - clean_dataset_task( - dataset_id=dataset_id, - tenant_id=tenant_id, - indexing_technique="high_quality", - index_struct='{"type": "paragraph"}', - collection_binding_id=str(uuid.uuid4()), - doc_form="paragraph_index", - ) - cleanup_results.append("success") - except Exception as e: - cleanup_errors.append(str(e)) - - # Start multiple cleanup threads - threads = [] - for i in range(3): - thread = threading.Thread(target=run_cleanup) - threads.append(thread) - thread.start() - - # Wait for all threads to complete - for thread in threads: - thread.join() - - # Verify results - # Check that all documents were deleted (only once) - remaining_documents = db.session.query(Document).filter_by(dataset_id=dataset_id).all() - assert len(remaining_documents) == 0 - - # Check that all segments were deleted (only once) - remaining_segments = db.session.query(DocumentSegment).filter_by(dataset_id=dataset_id).all() - assert len(remaining_segments) == 0 - - # Check that upload file was deleted (only once) - # Note: In concurrent scenarios, the first thread deletes documents and segments, - # subsequent threads may not find the related data to clean up upload files - # This demonstrates the idempotent nature of the cleanup process - remaining_files = db.session.query(UploadFile).filter_by(id=upload_file_id).all() - # The upload file should be deleted by the first successful cleanup operation - # However, in concurrent scenarios, this may not always happen due to race conditions - # This test demonstrates the idempotent nature of the cleanup process - if len(remaining_files) > 0: - print(f"Warning: Upload file {upload_file_id} was not deleted in concurrent scenario") - print("This is expected behavior demonstrating the idempotent nature of cleanup") - # We don't assert here as the behavior depends on timing and race conditions - - # Verify that storage.delete was called (may be called multiple times in concurrent scenarios) - # In concurrent scenarios, storage operations may be called multiple times due to race conditions - assert mock_storage.delete.call_count > 0 - - # Verify that index processor was called (may be called multiple times in concurrent scenarios) - mock_index_processor = mock_external_service_dependencies["index_processor"] - assert mock_index_processor.clean.call_count > 0 - - # Check cleanup results - assert len(cleanup_results) == 3, "All cleanup operations should complete" - assert len(cleanup_errors) == 0, "No cleanup errors should occur" - - # Verify idempotency by running cleanup again on the same dataset - # This should not perform any additional operations since data is already cleaned - clean_dataset_task( - dataset_id=dataset_id, - tenant_id=tenant_id, - indexing_technique="high_quality", - index_struct='{"type": "paragraph"}', - collection_binding_id=str(uuid.uuid4()), - doc_form="paragraph_index", - ) - - # Verify that no additional storage operations were performed - # Note: In concurrent scenarios, the exact count may vary due to race conditions - print(f"Final storage delete calls: {mock_storage.delete.call_count}") - print(f"Final index processor calls: {mock_index_processor.clean.call_count}") - print("Note: Multiple calls in concurrent scenarios are expected due to race conditions") - def test_clean_dataset_task_storage_exception_handling( self, db_session_with_containers, mock_external_service_dependencies ): diff --git a/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py index de81295100..8004175b2d 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py @@ -13,7 +13,7 @@ import pytest from faker import Faker from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from tasks.create_segment_to_index_task import create_segment_to_index_task @@ -91,7 +91,7 @@ class TestCreateSegmentToIndexTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_delete_segment_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_delete_segment_from_index_task.py index 7af4f238be..94e9b76965 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_delete_segment_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_delete_segment_from_index_task.py @@ -48,11 +48,8 @@ class TestDeleteSegmentFromIndexTask: Tenant: Created test tenant instance """ fake = fake or Faker() - tenant = Tenant() + tenant = Tenant(name=f"Test Tenant {fake.company()}", plan="basic", status="active") tenant.id = fake.uuid4() - tenant.name = f"Test Tenant {fake.company()}" - tenant.plan = "basic" - tenant.status = "active" tenant.created_at = fake.date_time_this_year() tenant.updated_at = tenant.created_at @@ -73,16 +70,14 @@ class TestDeleteSegmentFromIndexTask: Account: Created test account instance """ fake = fake or Faker() - account = Account() + account = Account( + name=fake.name(), + email=fake.email(), + avatar=fake.url(), + status="active", + interface_language="en-US", + ) account.id = fake.uuid4() - account.email = fake.email() - account.name = fake.name() - account.avatar_url = fake.url() - account.tenant_id = tenant.id - account.status = "active" - account.type = "normal" - account.role = "owner" - account.interface_language = "en-US" account.created_at = fake.date_time_this_year() account.updated_at = account.created_at diff --git a/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py index e1d63e993b..8785c948d1 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py @@ -16,7 +16,7 @@ from faker import Faker from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from tasks.disable_segment_from_index_task import disable_segment_from_index_task @@ -69,7 +69,7 @@ class TestDisableSegmentFromIndexTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py index 5fdb8c617c..0b36e0914a 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py @@ -43,27 +43,30 @@ class TestDisableSegmentsFromIndexTask: Account: Created test account instance """ fake = fake or Faker() - account = Account() + account = Account( + email=fake.email(), + name=fake.name(), + avatar=fake.url(), + status="active", + interface_language="en-US", + ) account.id = fake.uuid4() - account.email = fake.email() - account.name = fake.name() - account.avatar_url = fake.url() + # monkey-patch attributes for test setup account.tenant_id = fake.uuid4() - account.status = "active" account.type = "normal" account.role = "owner" - account.interface_language = "en-US" account.created_at = fake.date_time_this_year() account.updated_at = account.created_at # Create a tenant for the account from models.account import Tenant - tenant = Tenant() + tenant = Tenant( + name=f"Test Tenant {fake.company()}", + plan="basic", + status="active", + ) tenant.id = account.tenant_id - tenant.name = f"Test Tenant {fake.company()}" - tenant.plan = "basic" - tenant.status = "active" tenant.created_at = fake.date_time_this_year() tenant.updated_at = tenant.created_at @@ -91,20 +94,21 @@ class TestDisableSegmentsFromIndexTask: Dataset: Created test dataset instance """ fake = fake or Faker() - dataset = Dataset() - dataset.id = fake.uuid4() - dataset.tenant_id = account.tenant_id - dataset.name = f"Test Dataset {fake.word()}" - dataset.description = fake.text(max_nb_chars=200) - dataset.provider = "vendor" - dataset.permission = "only_me" - dataset.data_source_type = "upload_file" - dataset.indexing_technique = "high_quality" - dataset.created_by = account.id - dataset.updated_by = account.id - dataset.embedding_model = "text-embedding-ada-002" - dataset.embedding_model_provider = "openai" - dataset.built_in_field_enabled = False + dataset = Dataset( + id=fake.uuid4(), + tenant_id=account.tenant_id, + name=f"Test Dataset {fake.word()}", + description=fake.text(max_nb_chars=200), + provider="vendor", + permission="only_me", + data_source_type="upload_file", + indexing_technique="high_quality", + created_by=account.id, + updated_by=account.id, + embedding_model="text-embedding-ada-002", + embedding_model_provider="openai", + built_in_field_enabled=False, + ) from extensions.ext_database import db @@ -128,6 +132,7 @@ class TestDisableSegmentsFromIndexTask: """ fake = fake or Faker() document = DatasetDocument() + document.id = fake.uuid4() document.tenant_id = dataset.tenant_id document.dataset_id = dataset.id @@ -153,7 +158,6 @@ class TestDisableSegmentsFromIndexTask: document.archived = False document.doc_form = "text_model" # Use text_model form for testing document.doc_language = "en" - from extensions.ext_database import db db.session.add(document) diff --git a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py index f75dcf06e1..448f6da5ec 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from extensions.ext_database import db -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document from tasks.document_indexing_task import document_indexing_task @@ -72,7 +72,7 @@ class TestDocumentIndexingTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) @@ -154,7 +154,7 @@ class TestDocumentIndexingTask: join = TenantAccountJoin( tenant_id=tenant.id, account_id=account.id, - role=TenantAccountRole.OWNER.value, + role=TenantAccountRole.OWNER, current=True, ) db.session.add(join) diff --git a/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py new file mode 100644 index 0000000000..798fe091ab --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py @@ -0,0 +1,450 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from core.rag.index_processor.constant.index_type import IndexType +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models.dataset import Dataset, Document, DocumentSegment +from tasks.enable_segments_to_index_task import enable_segments_to_index_task + + +class TestEnableSegmentsToIndexTask: + """Integration tests for enable_segments_to_index_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.enable_segments_to_index_task.IndexProcessorFactory") as mock_index_processor_factory, + ): + # Setup mock index processor + mock_processor = MagicMock() + mock_index_processor_factory.return_value.init_index_processor.return_value = mock_processor + + yield { + "index_processor_factory": mock_index_processor_factory, + "index_processor": mock_processor, + } + + def _create_test_dataset_and_document(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test dataset and document for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (dataset, document) - Created dataset and document instances + """ + fake = Faker() + + # Create account and tenant + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db.session.add(account) + db.session.commit() + + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + current=True, + ) + db.session.add(join) + db.session.commit() + + # Create dataset + dataset = Dataset( + id=fake.uuid4(), + tenant_id=tenant.id, + name=fake.company(), + description=fake.text(max_nb_chars=100), + data_source_type="upload_file", + indexing_technique="high_quality", + created_by=account.id, + ) + db.session.add(dataset) + db.session.commit() + + # Create document + document = Document( + id=fake.uuid4(), + tenant_id=tenant.id, + dataset_id=dataset.id, + position=1, + data_source_type="upload_file", + batch="test_batch", + name=fake.file_name(), + created_from="upload_file", + created_by=account.id, + indexing_status="completed", + enabled=True, + doc_form=IndexType.PARAGRAPH_INDEX, + ) + db.session.add(document) + db.session.commit() + + # Refresh dataset to ensure doc_form property works correctly + db.session.refresh(dataset) + + return dataset, document + + def _create_test_segments( + self, db_session_with_containers, document, dataset, count=3, enabled=False, status="completed" + ): + """ + Helper method to create test document segments. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + document: Document instance + dataset: Dataset instance + count: Number of segments to create + enabled: Whether segments should be enabled + status: Status of the segments + + Returns: + list: List of created DocumentSegment instances + """ + fake = Faker() + segments = [] + + for i in range(count): + text = fake.text(max_nb_chars=200) + segment = DocumentSegment( + id=fake.uuid4(), + tenant_id=document.tenant_id, + dataset_id=dataset.id, + document_id=document.id, + position=i, + content=text, + word_count=len(text.split()), + tokens=len(text.split()) * 2, + index_node_id=f"node_{i}", + index_node_hash=f"hash_{i}", + enabled=enabled, + status=status, + created_by=document.created_by, + ) + db.session.add(segment) + segments.append(segment) + + db.session.commit() + return segments + + def test_enable_segments_to_index_with_different_index_type( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test segments indexing with different index types. + + This test verifies: + - Proper handling of different index types + - Index processor factory integration + - Document processing with various configurations + - Redis cache key deletion + """ + # Arrange: Create test data with different index type + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Update document to use different index type + document.doc_form = IndexType.QA_INDEX + db.session.commit() + + # Refresh dataset to ensure doc_form property reflects the updated document + db.session.refresh(dataset) + + # Create segments + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify different index type handling + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.QA_INDEX) + mock_external_service_dependencies["index_processor"].load.assert_called_once() + + # Verify the load method was called with correct parameters + call_args = mock_external_service_dependencies["index_processor"].load.call_args + assert call_args is not None + documents = call_args[0][1] # Second argument should be documents list + assert len(documents) == 3 + + # Verify Redis cache keys were deleted + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 + + def test_enable_segments_to_index_dataset_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of non-existent dataset. + + This test verifies: + - Proper error handling for missing datasets + - Early return without processing + - Database session cleanup + - No unnecessary index processor calls + """ + # Arrange: Use non-existent dataset ID + fake = Faker() + non_existent_dataset_id = fake.uuid4() + non_existent_document_id = fake.uuid4() + segment_ids = [fake.uuid4()] + + # Act: Execute the task with non-existent dataset + enable_segments_to_index_task(segment_ids, non_existent_dataset_id, non_existent_document_id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_document_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of non-existent document. + + This test verifies: + - Proper error handling for missing documents + - Early return without processing + - Database session cleanup + - No unnecessary index processor calls + """ + # Arrange: Create dataset but use non-existent document ID + dataset, _ = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + fake = Faker() + non_existent_document_id = fake.uuid4() + segment_ids = [fake.uuid4()] + + # Act: Execute the task with non-existent document + enable_segments_to_index_task(segment_ids, dataset.id, non_existent_document_id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_invalid_document_status( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of document with invalid status. + + This test verifies: + - Early return when document is disabled, archived, or not completed + - No index processing for documents not ready for indexing + - Proper database session cleanup + - No unnecessary external service calls + """ + # Arrange: Create test data with invalid document status + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Test different invalid statuses + invalid_statuses = [ + ("disabled", {"enabled": False}), + ("archived", {"archived": True}), + ("not_completed", {"indexing_status": "processing"}), + ] + + for _, status_attrs in invalid_statuses: + # Reset document status + document.enabled = True + document.archived = False + document.indexing_status = "completed" + db.session.commit() + + # Set invalid status + for attr, value in status_attrs.items(): + setattr(document, attr, value) + db.session.commit() + + # Create segments + segments = self._create_test_segments(db_session_with_containers, document, dataset) + segment_ids = [segment.id for segment in segments] + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + # Clean up segments for next iteration + for segment in segments: + db.session.delete(segment) + db.session.commit() + + def test_enable_segments_to_index_segments_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling when no segments are found. + + This test verifies: + - Proper handling when segments don't exist + - Early return without processing + - Database session cleanup + - Index processor is created but load is not called + """ + # Arrange: Create test data + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Use non-existent segment IDs + fake = Faker() + non_existent_segment_ids = [fake.uuid4() for _ in range(3)] + + # Act: Execute the task with non-existent segments + enable_segments_to_index_task(non_existent_segment_ids, dataset.id, document.id) + + # Assert: Verify index processor was created but load was not called + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX) + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_with_parent_child_structure( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test segments indexing with parent-child structure. + + This test verifies: + - Proper handling of PARENT_CHILD_INDEX type + - Child document creation from segments + - Correct document structure for parent-child indexing + - Index processor receives properly structured documents + - Redis cache key deletion + """ + # Arrange: Create test data with parent-child index type + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Update document to use parent-child index type + document.doc_form = IndexType.PARENT_CHILD_INDEX + db.session.commit() + + # Refresh dataset to ensure doc_form property reflects the updated document + db.session.refresh(dataset) + + # Create segments with mock child chunks + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Mock the get_child_chunks method for each segment + with patch.object(DocumentSegment, "get_child_chunks") as mock_get_child_chunks: + # Setup mock to return child chunks for each segment + mock_child_chunks = [] + for i in range(2): # Each segment has 2 child chunks + mock_child = MagicMock() + mock_child.content = f"child_content_{i}" + mock_child.index_node_id = f"child_node_{i}" + mock_child.index_node_hash = f"child_hash_{i}" + mock_child_chunks.append(mock_child) + + mock_get_child_chunks.return_value = mock_child_chunks + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify parent-child index processing + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with( + IndexType.PARENT_CHILD_INDEX + ) + mock_external_service_dependencies["index_processor"].load.assert_called_once() + + # Verify the load method was called with correct parameters + call_args = mock_external_service_dependencies["index_processor"].load.call_args + assert call_args is not None + documents = call_args[0][1] # Second argument should be documents list + assert len(documents) == 3 # 3 segments + + # Verify each document has children + for doc in documents: + assert hasattr(doc, "children") + assert len(doc.children) == 2 # Each document has 2 children + + # Verify Redis cache keys were deleted + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 + + def test_enable_segments_to_index_general_exception_handling( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test general exception handling during indexing process. + + This test verifies: + - Exceptions are properly caught and handled + - Segment status is set to error + - Segments are disabled + - Error information is recorded + - Redis cache is still cleared + - Database session is properly closed + """ + # Arrange: Create test data + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Mock the index processor to raise an exception + mock_external_service_dependencies["index_processor"].load.side_effect = Exception("Index processing failed") + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify error handling + for segment in segments: + db.session.refresh(segment) + assert segment.enabled is False + assert segment.status == "error" + assert segment.error is not None + assert "Index processing failed" in segment.error + assert segment.disabled_at is not None + + # Verify Redis cache keys were still cleared despite error + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py new file mode 100644 index 0000000000..31e9b67421 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py @@ -0,0 +1,242 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from extensions.ext_database import db +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_account_deletion_task import send_account_deletion_verification_code, send_deletion_success_task + + +class TestMailAccountDeletionTask: + """Integration tests for mail account deletion tasks using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_account_deletion_task.mail") as mock_mail, + patch("tasks.mail_account_deletion_task.get_email_i18n_service") as mock_get_email_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email service + mock_email_service = MagicMock() + mock_get_email_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "get_email_service": mock_get_email_service, + "email_service": mock_email_service, + } + + def _create_test_account(self, db_session_with_containers): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + Account: Created account instance + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db.session.add(account) + db.session.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + current=True, + ) + db.session.add(join) + db.session.commit() + + return account + + def test_send_deletion_success_task_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful account deletion success email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls + - Template context is properly formatted + - Email type is correctly specified + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_language = "en-US" + + # Act: Execute the task + send_deletion_success_task(test_email, test_language) + + # Assert: Verify the expected outcomes + # Verify mail service was checked + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify email service was retrieved + mock_external_service_dependencies["get_email_service"].assert_called_once() + + # Verify email was sent with correct parameters + mock_external_service_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.ACCOUNT_DELETION_SUCCESS, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) + + def test_send_deletion_success_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion success email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Setup mail service to return not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + account = self._create_test_account(db_session_with_containers) + test_email = account.email + + # Act: Execute the task + send_deletion_success_task(test_email) + + # Assert: Verify no email service calls were made + mock_external_service_dependencies["get_email_service"].assert_not_called() + mock_external_service_dependencies["email_service"].send_email.assert_not_called() + + def test_send_deletion_success_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion success email when email service raises exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + - Error logging is recorded + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_service"].send_email.side_effect = Exception("Email service failed") + account = self._create_test_account(db_session_with_containers) + test_email = account.email + + # Act: Execute the task (should not raise exception) + send_deletion_success_task(test_email) + + # Assert: Verify email service was called but exception was handled + mock_external_service_dependencies["email_service"].send_email.assert_called_once() + + def test_send_account_deletion_verification_code_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful account deletion verification code email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls + - Template context includes verification code + - Email type is correctly specified + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_account_deletion_verification_code(test_email, test_code, test_language) + + # Assert: Verify the expected outcomes + # Verify mail service was checked + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify email service was retrieved + mock_external_service_dependencies["get_email_service"].assert_called_once() + + # Verify email was sent with correct parameters + mock_external_service_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.ACCOUNT_DELETION_VERIFICATION, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_account_deletion_verification_code_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion verification code email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Setup mail service to return not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + + # Act: Execute the task + send_account_deletion_verification_code(test_email, test_code) + + # Assert: Verify no email service calls were made + mock_external_service_dependencies["get_email_service"].assert_not_called() + mock_external_service_dependencies["email_service"].send_email.assert_not_called() + + def test_send_account_deletion_verification_code_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion verification code email when email service raises exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + - Error logging is recorded + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_service"].send_email.side_effect = Exception("Email service failed") + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + + # Act: Execute the task (should not raise exception) + send_account_deletion_verification_code(test_email, test_code) + + # Assert: Verify email service was called but exception was handled + mock_external_service_dependencies["email_service"].send_email.assert_called_once() diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py new file mode 100644 index 0000000000..1aed7dc7cc --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py @@ -0,0 +1,282 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_change_mail_task import send_change_mail_completed_notification_task, send_change_mail_task + + +class TestMailChangeMailTask: + """Integration tests for mail_change_mail_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_change_mail_task.mail") as mock_mail, + patch("tasks.mail_change_mail_task.get_email_i18n_service") as mock_get_email_i18n_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email i18n service + mock_email_service = MagicMock() + mock_get_email_i18n_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "email_i18n_service": mock_email_service, + "get_email_i18n_service": mock_get_email_i18n_service, + } + + def _create_test_account(self, db_session_with_containers): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + Account: Created account instance + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db_session_with_containers.add(account) + db_session_with_containers.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + current=True, + ) + db_session_with_containers.add(join) + db_session_with_containers.commit() + + return account + + def test_send_change_mail_task_success_old_email_phase( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email task execution for old_email phase. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with old_email phase + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "en-US" + test_email = account.email + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_task_success_new_email_phase( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email task execution for new_email phase. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with new_email phase + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "zh-Hans" + test_email = "new@example.com" + test_code = "789012" + test_phase = "new_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email task when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls when mail is not available + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify no email service calls + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_not_called() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_not_called() + + def test_send_change_mail_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email task when email service raises an exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_i18n_service"].send_change_email.side_effect = Exception( + "Email service failed" + ) + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task (should not raise exception) + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify email service was called despite exception + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_completed_notification_task_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email completed notification task execution. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with CHANGE_EMAIL_COMPLETED type + - Template context is properly constructed + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "en-US" + test_email = account.email + + # Act: Execute the task + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with( + email_type=EmailType.CHANGE_EMAIL_COMPLETED, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) + + def test_send_change_mail_completed_notification_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email completed notification task when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls when mail is not available + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + test_language = "en-US" + test_email = "test@example.com" + + # Act: Execute the task + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify no email service calls + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_not_called() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_not_called() + + def test_send_change_mail_completed_notification_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email completed notification task when email service raises an exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_i18n_service"].send_email.side_effect = Exception( + "Email service failed" + ) + test_language = "en-US" + test_email = "test@example.com" + + # Act: Execute the task (should not raise exception) + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify email service was called despite exception + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with( + email_type=EmailType.CHANGE_EMAIL_COMPLETED, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py new file mode 100644 index 0000000000..e6a804784a --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py @@ -0,0 +1,598 @@ +""" +TestContainers-based integration tests for send_email_code_login_mail_task. + +This module provides comprehensive integration tests for the email code login mail task +using TestContainers infrastructure. The tests ensure that the task properly sends +email verification codes for login with internationalization support and handles +various error scenarios in a real database environment. + +All tests use the testcontainers infrastructure to ensure proper database isolation +and realistic testing scenarios with actual PostgreSQL and Redis instances. +""" + +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_email_code_login import send_email_code_login_mail_task + + +class TestSendEmailCodeLoginMailTask: + """ + Comprehensive integration tests for send_email_code_login_mail_task using testcontainers. + + This test class covers all major functionality of the email code login mail task: + - Successful email sending with different languages + - Email service integration and template rendering + - Error handling for various failure scenarios + - Performance metrics and logging verification + - Edge cases and boundary conditions + + All tests use the testcontainers infrastructure to ensure proper database isolation + and realistic testing environment with actual database interactions. + """ + + @pytest.fixture(autouse=True) + def cleanup_database(self, db_session_with_containers): + """Clean up database before each test to ensure isolation.""" + from extensions.ext_redis import redis_client + + # Clear all test data + db_session_with_containers.query(TenantAccountJoin).delete() + db_session_with_containers.query(Tenant).delete() + db_session_with_containers.query(Account).delete() + db_session_with_containers.commit() + + # Clear Redis cache + redis_client.flushdb() + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_email_code_login.mail") as mock_mail, + patch("tasks.mail_email_code_login.get_email_i18n_service") as mock_email_service, + ): + # Setup default mock returns + mock_mail.is_inited.return_value = True + + # Mock email service + mock_email_service_instance = MagicMock() + mock_email_service_instance.send_email.return_value = None + mock_email_service.return_value = mock_email_service_instance + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "email_service_instance": mock_email_service_instance, + } + + def _create_test_account(self, db_session_with_containers, fake=None): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + fake: Faker instance for generating test data + + Returns: + Account: Created account instance + """ + if fake is None: + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + db_session_with_containers.add(account) + db_session_with_containers.commit() + + return account + + def _create_test_tenant_and_account(self, db_session_with_containers, fake=None): + """ + Helper method to create a test tenant and account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + fake: Faker instance for generating test data + + Returns: + tuple: (Account, Tenant) created instances + """ + if fake is None: + fake = Faker() + + # Create account using the existing helper method + account = self._create_test_account(db_session_with_containers, fake) + + # Create tenant + tenant = Tenant( + name=fake.company(), + plan="basic", + status="active", + ) + + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account relationship + tenant_account_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + ) + + db_session_with_containers.add(tenant_account_join) + db_session_with_containers.commit() + + return account, tenant + + def test_send_email_code_login_mail_task_success_english( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending in English. + + This test verifies that the task can successfully: + 1. Send email code login mail with English language + 2. Use proper email service integration + 3. Pass correct template context to email service + 4. Log performance metrics correctly + 5. Complete task execution without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_mail = mock_external_service_dependencies["mail"] + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was called with correct parameters + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_success_chinese( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending in Chinese. + + This test verifies that the task can successfully: + 1. Send email code login mail with Chinese language + 2. Handle different language codes properly + 3. Use correct template context for Chinese emails + 4. Complete task execution without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "789012" + test_language = "zh-Hans" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called with Chinese language + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_success_multiple_languages( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending with multiple languages. + + This test verifies that the task can successfully: + 1. Handle various language codes correctly + 2. Send emails with different language configurations + 3. Maintain proper template context for each language + 4. Complete multiple task executions without conflicts + """ + # Arrange: Setup test data + fake = Faker() + test_languages = ["en-US", "zh-Hans", "zh-CN", "ja-JP", "ko-KR"] + test_emails = [fake.email() for _ in test_languages] + test_codes = [fake.numerify("######") for _ in test_languages] + + # Act: Execute the task for each language + for i, language in enumerate(test_languages): + send_email_code_login_mail_task( + language=language, + to=test_emails[i], + code=test_codes[i], + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called for each language + assert mock_email_service_instance.send_email.call_count == len(test_languages) + + # Verify each call had correct parameters + for i, language in enumerate(test_languages): + call_args = mock_email_service_instance.send_email.call_args_list[i] + assert call_args[1]["email_type"] == EmailType.EMAIL_CODE_LOGIN + assert call_args[1]["language_code"] == language + assert call_args[1]["to"] == test_emails[i] + assert call_args[1]["template_context"]["code"] == test_codes[i] + + def test_send_email_code_login_mail_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task when mail service is not initialized. + + This test verifies that the task can properly: + 1. Check mail service initialization status + 2. Return early when mail is not initialized + 3. Not attempt to send email when service is unavailable + 4. Handle gracefully without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Mock mail service as not initialized + mock_mail = mock_external_service_dependencies["mail"] + mock_mail.is_inited.return_value = False + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was not called + mock_email_service_instance.send_email.assert_not_called() + + def test_send_email_code_login_mail_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task when email service raises an exception. + + This test verifies that the task can properly: + 1. Handle email service exceptions gracefully + 2. Log appropriate error messages + 3. Continue execution without crashing + 4. Maintain proper error handling + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Mock email service to raise an exception + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.send_email.side_effect = Exception("Email service unavailable") + + # Act: Execute the task - it should handle the exception gracefully + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_mail = mock_external_service_dependencies["mail"] + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was called (and failed) + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_invalid_parameters( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with invalid parameters. + + This test verifies that the task can properly: + 1. Handle empty or None email addresses + 2. Process empty or None verification codes + 3. Handle invalid language codes + 4. Maintain proper error handling for invalid inputs + """ + # Arrange: Setup test data + fake = Faker() + test_language = "en-US" + + # Test cases for invalid parameters + invalid_test_cases = [ + {"email": "", "code": "123456", "description": "empty email"}, + {"email": None, "code": "123456", "description": "None email"}, + {"email": fake.email(), "code": "", "description": "empty code"}, + {"email": fake.email(), "code": None, "description": "None code"}, + {"email": "invalid-email", "code": "123456", "description": "invalid email format"}, + ] + + for test_case in invalid_test_cases: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + + # Act: Execute the task with invalid parameters + send_email_code_login_mail_task( + language=test_language, + to=test_case["email"], + code=test_case["code"], + ) + + # Assert: Verify that email service was still called + # The task should pass parameters to email service as-is + # and let the email service handle validation + mock_email_service_instance.send_email.assert_called_once() + + def test_send_email_code_login_mail_task_edge_cases( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with edge cases and boundary conditions. + + This test verifies that the task can properly: + 1. Handle very long email addresses + 2. Process very long verification codes + 3. Handle special characters in parameters + 4. Process extreme language codes + """ + # Arrange: Setup test data + fake = Faker() + test_language = "en-US" + + # Edge case test data + edge_cases = [ + { + "email": "a" * 100 + "@example.com", # Very long email + "code": "1" * 20, # Very long code + "description": "very long email and code", + }, + { + "email": "test+tag@example.com", # Email with special characters + "code": "123-456", # Code with special characters + "description": "special characters", + }, + { + "email": "test@sub.domain.example.com", # Complex domain + "code": "000000", # All zeros + "description": "complex domain and all zeros code", + }, + { + "email": "test@example.co.uk", # International domain + "code": "999999", # All nines + "description": "international domain and all nines code", + }, + ] + + for test_case in edge_cases: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + + # Act: Execute the task with edge case data + send_email_code_login_mail_task( + language=test_language, + to=test_case["email"], + code=test_case["code"], + ) + + # Assert: Verify that email service was called with edge case data + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_case["email"], + template_context={ + "to": test_case["email"], + "code": test_case["code"], + }, + ) + + def test_send_email_code_login_mail_task_database_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with database integration. + + This test verifies that the task can properly: + 1. Work with real database connections + 2. Handle database session management + 3. Maintain proper database state + 4. Complete without database-related errors + """ + # Arrange: Setup test data with database + fake = Faker() + account, tenant = self._create_test_tenant_and_account(db_session_with_containers, fake) + + test_email = account.email + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called with database account email + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + # Verify database state is maintained + db_session_with_containers.refresh(account) + assert account.email == test_email + assert account.status == "active" + + def test_send_email_code_login_mail_task_redis_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with Redis integration. + + This test verifies that the task can properly: + 1. Work with Redis cache connections + 2. Handle Redis operations without errors + 3. Maintain proper cache state + 4. Complete without Redis-related errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Setup Redis cache data + from extensions.ext_redis import redis_client + + cache_key = f"email_code_login_test_{test_email}" + redis_client.set(cache_key, "test_value", ex=300) + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called + mock_email_service_instance.send_email.assert_called_once() + + # Verify Redis cache is still accessible + assert redis_client.exists(cache_key) == 1 + assert redis_client.get(cache_key) == b"test_value" + + # Clean up Redis cache + redis_client.delete(cache_key) + + def test_send_email_code_login_mail_task_error_handling_comprehensive( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test comprehensive error handling for email code login mail task. + + This test verifies that the task can properly: + 1. Handle various types of exceptions + 2. Log appropriate error messages + 3. Continue execution despite errors + 4. Maintain proper error reporting + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Test different exception types + exception_types = [ + ("ValueError", ValueError("Invalid email format")), + ("RuntimeError", RuntimeError("Service unavailable")), + ("ConnectionError", ConnectionError("Network error")), + ("TimeoutError", TimeoutError("Request timeout")), + ("Exception", Exception("Generic error")), + ] + + for error_name, exception in exception_types: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + mock_email_service_instance.send_email.side_effect = exception + + # Mock logging to capture error messages + with patch("tasks.mail_email_code_login.logger") as mock_logger: + # Act: Execute the task - it should handle the exception gracefully + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify error handling + # Verify email service was called (and failed) + mock_email_service_instance.send_email.assert_called_once() + + # Verify error was logged + error_calls = [ + call + for call in mock_logger.exception.call_args_list + if f"Send email code login mail to {test_email} failed" in str(call) + ] + # Check if any exception call was made (the exact message format may vary) + assert mock_logger.exception.call_count >= 1, f"Error should be logged for {error_name}" + + # Reset side effect for next iteration + mock_email_service_instance.send_email.side_effect = None diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py new file mode 100644 index 0000000000..d67794654f --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py @@ -0,0 +1,261 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from tasks.mail_inner_task import send_inner_email_task + + +class TestMailInnerTask: + """Integration tests for send_inner_email_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_inner_task.mail") as mock_mail, + patch("tasks.mail_inner_task.get_email_i18n_service") as mock_get_email_i18n_service, + patch("tasks.mail_inner_task._render_template_with_strategy") as mock_render_template, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email i18n service + mock_email_service = MagicMock() + mock_get_email_i18n_service.return_value = mock_email_service + + # Setup mock template rendering + mock_render_template.return_value = "Test email content" + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "render_template": mock_render_template, + } + + def _create_test_email_data(self, fake: Faker) -> dict: + """ + Helper method to create test email data for testing. + + Args: + fake: Faker instance for generating test data + + Returns: + dict: Test email data including recipients, subject, body, and substitutions + """ + return { + "to": [fake.email() for _ in range(3)], + "subject": fake.sentence(nb_words=4), + "body": "Hello {{name}}, this is a test email from {{company}}.", + "substitutions": { + "name": fake.name(), + "company": fake.company(), + "date": fake.date(), + }, + } + + def test_send_inner_email_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful email sending with valid data. + + This test verifies: + - Proper email service initialization check + - Template rendering with substitutions + - Email service integration + - Multiple recipient handling + """ + # Arrange: Create test data + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + # Verify mail service was checked for initialization + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify template rendering was called with correct parameters + mock_external_service_dependencies["render_template"].assert_called_once_with( + email_data["body"], email_data["substitutions"] + ) + + # Verify email service was called once with the full recipient list + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_single_recipient(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending with single recipient. + + This test verifies: + - Single recipient handling + - Template rendering + - Email service integration + """ + # Arrange: Create test data with single recipient + fake = Faker() + email_data = { + "to": [fake.email()], + "subject": fake.sentence(nb_words=3), + "body": "Welcome {{user_name}}!", + "substitutions": { + "user_name": fake.name(), + }, + } + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_empty_substitutions(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending with empty substitutions. + + This test verifies: + - Template rendering with empty substitutions + - Email service integration + - Handling of minimal template context + """ + # Arrange: Create test data with empty substitutions + fake = Faker() + email_data = { + "to": [fake.email()], + "subject": fake.sentence(nb_words=3), + "body": "This is a simple email without variables.", + "substitutions": {}, + } + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["render_template"].assert_called_once_with(email_data["body"], {}) + + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No template rendering occurs + - No email service calls + - No exceptions raised + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["render_template"].assert_not_called() + mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called() + + def test_send_inner_email_template_rendering_error( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending when template rendering fails. + + This test verifies: + - Exception handling during template rendering + - No email service calls when template fails + """ + # Arrange: Setup template rendering to raise an exception + mock_external_service_dependencies["render_template"].side_effect = Exception("Template rendering failed") + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify template rendering was attempted + mock_external_service_dependencies["render_template"].assert_called_once() + + # Verify no email service calls due to exception + mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called() + + def test_send_inner_email_service_error(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending when email service fails. + + This test verifies: + - Exception handling during email sending + - Graceful error handling + """ + # Arrange: Setup email service to raise an exception + mock_external_service_dependencies["email_service"].send_raw_email.side_effect = Exception( + "Email service failed" + ) + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify template rendering occurred + mock_external_service_dependencies["render_template"].assert_called_once() + + # Verify email service was called (and failed) + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py new file mode 100644 index 0000000000..c083861004 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py @@ -0,0 +1,544 @@ +""" +Integration tests for mail_invite_member_task using testcontainers. + +This module provides integration tests for the invite member email task +using TestContainers infrastructure. The tests ensure that the task properly sends +invitation emails with internationalization support, handles error scenarios, +and integrates correctly with the database and Redis for token management. + +All tests use the testcontainers infrastructure to ensure proper database isolation +and realistic testing scenarios with actual PostgreSQL and Redis instances. +""" + +import json +import uuid +from datetime import UTC, datetime +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from extensions.ext_redis import redis_client +from libs.email_i18n import EmailType +from models.account import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_invite_member_task import send_invite_member_mail_task + + +class TestMailInviteMemberTask: + """ + Integration tests for send_invite_member_mail_task using testcontainers. + + This test class covers the core functionality of the invite member email task: + - Email sending with proper internationalization + - Template context generation and URL construction + - Error handling for failure scenarios + - Integration with Redis for token validation + - Mail service initialization checks + - Real database integration with actual invitation flow + + All tests use the testcontainers infrastructure to ensure proper database isolation + and realistic testing environment with actual database and Redis interactions. + """ + + @pytest.fixture(autouse=True) + def cleanup_database(self, db_session_with_containers): + """Clean up database before each test to ensure isolation.""" + # Clear all test data + db_session_with_containers.query(TenantAccountJoin).delete() + db_session_with_containers.query(Tenant).delete() + db_session_with_containers.query(Account).delete() + db_session_with_containers.commit() + + # Clear Redis cache + redis_client.flushdb() + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_invite_member_task.mail") as mock_mail, + patch("tasks.mail_invite_member_task.get_email_i18n_service") as mock_email_service, + patch("tasks.mail_invite_member_task.dify_config") as mock_config, + ): + # Setup mail service mock + mock_mail.is_inited.return_value = True + + # Setup email service mock + mock_email_service_instance = MagicMock() + mock_email_service_instance.send_email.return_value = None + mock_email_service.return_value = mock_email_service_instance + + # Setup config mock + mock_config.CONSOLE_WEB_URL = "https://console.dify.ai" + + yield { + "mail": mock_mail, + "email_service": mock_email_service_instance, + "config": mock_config, + } + + def _create_test_account_and_tenant(self, db_session_with_containers): + """ + Helper method to create a test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + tuple: (Account, Tenant) created instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + password=fake.password(), + interface_language="en-US", + status=AccountStatus.ACTIVE, + ) + account.created_at = datetime.now(UTC) + account.updated_at = datetime.now(UTC) + db_session_with_containers.add(account) + db_session_with_containers.commit() + db_session_with_containers.refresh(account) + + # Create tenant + tenant = Tenant( + name=fake.company(), + ) + tenant.created_at = datetime.now(UTC) + tenant.updated_at = datetime.now(UTC) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + db_session_with_containers.refresh(tenant) + + # Create tenant member relationship + tenant_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + ) + tenant_join.created_at = datetime.now(UTC) + db_session_with_containers.add(tenant_join) + db_session_with_containers.commit() + + return account, tenant + + def _create_invitation_token(self, tenant, account): + """ + Helper method to create a valid invitation token in Redis. + + Args: + tenant: Tenant instance + account: Account instance + + Returns: + str: Generated invitation token + """ + token = str(uuid.uuid4()) + invitation_data = { + "account_id": account.id, + "email": account.email, + "workspace_id": tenant.id, + } + cache_key = f"member_invite:token:{token}" + redis_client.setex(cache_key, 24 * 60 * 60, json.dumps(invitation_data)) # 24 hours + return token + + def _create_pending_account_for_invitation(self, db_session_with_containers, email, tenant): + """ + Helper method to create a pending account for invitation testing. + + Args: + db_session_with_containers: Database session + email: Email address for the account + tenant: Tenant instance + + Returns: + Account: Created pending account + """ + account = Account( + email=email, + name=email.split("@")[0], + password="", + interface_language="en-US", + status=AccountStatus.PENDING, + ) + + account.created_at = datetime.now(UTC) + account.updated_at = datetime.now(UTC) + db_session_with_containers.add(account) + db_session_with_containers.commit() + db_session_with_containers.refresh(account) + + # Create tenant member relationship + tenant_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.NORMAL, + ) + tenant_join.created_at = datetime.now(UTC) + db_session_with_containers.add(tenant_join) + db_session_with_containers.commit() + + return account + + def test_send_invite_member_mail_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful invitation email sending with all parameters. + + This test verifies: + - Email service is called with correct parameters + - Template context includes all required fields + - URL is constructed correctly with token + - Performance logging is recorded + - No exceptions are raised + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + invitee_email = "test@example.com" + language = "en-US" + token = self._create_invitation_token(tenant, inviter) + inviter_name = inviter.name + workspace_name = tenant.name + + # Act: Execute the task + send_invite_member_mail_task( + language=language, + to=invitee_email, + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify email service was called correctly + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_called_once() + + # Verify call arguments + call_args = mock_email_service.send_email.call_args + assert call_args[1]["email_type"] == EmailType.INVITE_MEMBER + assert call_args[1]["language_code"] == language + assert call_args[1]["to"] == invitee_email + + # Verify template context + template_context = call_args[1]["template_context"] + assert template_context["to"] == invitee_email + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + assert template_context["url"] == f"https://console.dify.ai/activate?token={token}" + + def test_send_invite_member_mail_different_languages( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test invitation email sending with different language codes. + + This test verifies: + - Email service handles different language codes correctly + - Template context is passed correctly for each language + - No language-specific errors occur + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + test_languages = ["en-US", "zh-CN", "ja-JP", "fr-FR", "de-DE", "es-ES"] + + for language in test_languages: + # Act: Execute the task with different language + send_invite_member_mail_task( + language=language, + to="test@example.com", + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify language code was passed correctly + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + assert call_args[1]["language_code"] == language + + def test_send_invite_member_mail_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test behavior when mail service is not initialized. + + This test verifies: + - Task returns early when mail is not initialized + - Email service is not called + - No exceptions are raised + """ + # Arrange: Setup mail service as not initialized + mock_mail = mock_external_service_dependencies["mail"] + mock_mail.is_inited.return_value = False + + # Act: Execute the task + result = send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token="test-token", + inviter_name="Test User", + workspace_name="Test Workspace", + ) + + # Assert: Verify early return + assert result is None + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_not_called() + + def test_send_invite_member_mail_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test error handling when email service raises an exception. + + This test verifies: + - Exception is caught and logged + - Task completes without raising exception + - Error logging is performed + """ + # Arrange: Setup email service to raise exception + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.side_effect = Exception("Email service failed") + + # Act & Assert: Execute task and verify exception is handled + with patch("tasks.mail_invite_member_task.logger") as mock_logger: + send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token="test-token", + inviter_name="Test User", + workspace_name="Test Workspace", + ) + + # Verify error was logged + mock_logger.exception.assert_called_once() + error_call = mock_logger.exception.call_args[0][0] + assert "Send invite member mail to %s failed" in error_call + + def test_send_invite_member_mail_template_context_validation( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test template context contains all required fields for email rendering. + + This test verifies: + - All required template context fields are present + - Field values match expected data + - URL construction is correct + - No missing or None values in context + """ + # Arrange: Create test data with specific values + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = "test-token-123" + invitee_email = "invitee@example.com" + inviter_name = "John Doe" + workspace_name = "Acme Corp" + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=invitee_email, + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify template context + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + template_context = call_args[1]["template_context"] + + # Verify all required fields are present + required_fields = ["to", "inviter_name", "workspace_name", "url"] + for field in required_fields: + assert field in template_context + assert template_context[field] is not None + assert template_context[field] != "" + + # Verify specific values + assert template_context["to"] == invitee_email + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + assert template_context["url"] == f"https://console.dify.ai/activate?token={token}" + + def test_send_invite_member_mail_integration_with_redis_token( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test integration with Redis token validation. + + This test verifies: + - Task works with real Redis token data + - Token validation can be performed after email sending + - Redis data integrity is maintained + """ + # Arrange: Create test data and store token in Redis + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + # Verify token exists in Redis before sending email + cache_key = f"member_invite:token:{token}" + assert redis_client.exists(cache_key) == 1 + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=inviter.email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify token still exists after email sending + assert redis_client.exists(cache_key) == 1 + + # Verify token data integrity + token_data = redis_client.get(cache_key) + assert token_data is not None + invitation_data = json.loads(token_data) + assert invitation_data["account_id"] == inviter.id + assert invitation_data["email"] == inviter.email + assert invitation_data["workspace_id"] == tenant.id + + def test_send_invite_member_mail_with_special_characters( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending with special characters in names and workspace names. + + This test verifies: + - Special characters are handled correctly in template context + - Email service receives properly formatted data + - No encoding issues occur + """ + # Arrange: Create test data with special characters + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + special_cases = [ + ("John O'Connor", "Acme & Co."), + ("José María", "Café & Restaurant"), + ("李小明", "北京科技有限公司"), + ("François & Marie", "L'École Internationale"), + ("Александр", "ООО Технологии"), + ("محمد أحمد", "شركة التقنية المتقدمة"), + ] + + for inviter_name, workspace_name in special_cases: + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify special characters are preserved + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + template_context = call_args[1]["template_context"] + + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + + def test_send_invite_member_mail_real_database_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test real database integration with actual invitation flow. + + This test verifies: + - Task works with real database entities + - Account and tenant relationships are properly maintained + - Database state is consistent after email sending + - Real invitation data flow is tested + """ + # Arrange: Create real database entities + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + invitee_email = "newmember@example.com" + + # Create a pending account for invitation (simulating real invitation flow) + pending_account = self._create_pending_account_for_invitation(db_session_with_containers, invitee_email, tenant) + + # Create invitation token with real account data + token = self._create_invitation_token(tenant, pending_account) + + # Act: Execute the task with real data + send_invite_member_mail_task( + language="en-US", + to=invitee_email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify email service was called with real data + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_called_once() + + # Verify database state is maintained + db_session_with_containers.refresh(pending_account) + db_session_with_containers.refresh(tenant) + + assert pending_account.status == AccountStatus.PENDING + assert pending_account.email == invitee_email + assert tenant.name is not None + + # Verify tenant relationship exists + tenant_join = ( + db_session_with_containers.query(TenantAccountJoin) + .filter_by(tenant_id=tenant.id, account_id=pending_account.id) + .first() + ) + assert tenant_join is not None + assert tenant_join.role == TenantAccountRole.NORMAL + + def test_send_invite_member_mail_token_lifecycle_management( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test token lifecycle management and validation. + + This test verifies: + - Token is properly stored in Redis with correct TTL + - Token data structure is correct + - Token can be retrieved and validated after email sending + - Token expiration is handled correctly + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=inviter.email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify token lifecycle + cache_key = f"member_invite:token:{token}" + + # Token should still exist + assert redis_client.exists(cache_key) == 1 + + # Token should have correct TTL (approximately 24 hours) + ttl = redis_client.ttl(cache_key) + assert 23 * 60 * 60 <= ttl <= 24 * 60 * 60 # Allow some tolerance + + # Token data should be valid + token_data = redis_client.get(cache_key) + assert token_data is not None + + invitation_data = json.loads(token_data) + assert invitation_data["account_id"] == inviter.id + assert invitation_data["email"] == inviter.email + assert invitation_data["workspace_id"] == tenant.id diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py new file mode 100644 index 0000000000..e128b06b11 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py @@ -0,0 +1,401 @@ +""" +TestContainers-based integration tests for mail_owner_transfer_task. + +This module provides comprehensive integration tests for the mail owner transfer tasks +using TestContainers to ensure real email service integration and proper functionality +testing with actual database and service dependencies. +""" + +import logging +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_owner_transfer_task import ( + send_new_owner_transfer_notify_email_task, + send_old_owner_transfer_notify_email_task, + send_owner_transfer_confirm_task, +) + +logger = logging.getLogger(__name__) + + +class TestMailOwnerTransferTask: + """Integration tests for mail owner transfer tasks using testcontainers.""" + + @pytest.fixture + def mock_mail_dependencies(self): + """Mock setup for mail service dependencies.""" + with ( + patch("tasks.mail_owner_transfer_task.mail") as mock_mail, + patch("tasks.mail_owner_transfer_task.get_email_i18n_service") as mock_get_email_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email service + mock_email_service = MagicMock() + mock_get_email_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "get_email_service": mock_get_email_service, + } + + def _create_test_account_and_tenant(self, db_session_with_containers): + """ + Helper method to create test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + tuple: (account, tenant) - Created account and tenant instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db_session_with_containers.add(account) + db_session_with_containers.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db_session_with_containers.add(join) + db_session_with_containers.commit() + + return account, tenant + + def test_send_owner_transfer_confirm_task_success(self, db_session_with_containers, mock_mail_dependencies): + """ + Test successful owner transfer confirmation email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context is properly constructed + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_code = "123456" + test_workspace = tenant.name + + # Act: Execute the task + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_CONFIRM + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["code"] == test_code + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + + def test_send_owner_transfer_confirm_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test owner transfer confirmation email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_workspace = "Test Workspace" + + # Act: Execute the task + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_owner_transfer_confirm_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in owner transfer confirmation email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_workspace = "Test Workspace" + + # Act & Assert: Verify no exception is raised + try: + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() + + def test_send_old_owner_transfer_notify_email_task_success( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test successful old owner transfer notification email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context includes new owner email + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_workspace = tenant.name + test_new_owner_email = "newowner@example.com" + + # Act: Execute the task + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_OLD_NOTIFY + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + assert call_args[1]["template_context"]["NewOwnerEmail"] == test_new_owner_email + + def test_send_old_owner_transfer_notify_email_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test old owner transfer notification email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + test_new_owner_email = "newowner@example.com" + + # Act: Execute the task + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_old_owner_transfer_notify_email_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in old owner transfer notification email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + test_new_owner_email = "newowner@example.com" + + # Act & Assert: Verify no exception is raised + try: + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() + + def test_send_new_owner_transfer_notify_email_task_success( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test successful new owner transfer notification email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context is properly constructed + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_workspace = tenant.name + + # Act: Execute the task + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_NEW_NOTIFY + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + + def test_send_new_owner_transfer_notify_email_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test new owner transfer notification email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + + # Act: Execute the task + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_new_owner_transfer_notify_email_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in new owner transfer notification email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + + # Act & Assert: Verify no exception is raised + try: + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index f4e3d97719..209b6bf59b 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -15,13 +15,13 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") - monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") + monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") # Custom value for testing monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") monkeypatch.setenv("DB_PORT", "5432") monkeypatch.setenv("DB_DATABASE", "dify") - monkeypatch.setenv("HTTP_REQUEST_MAX_READ_TIMEOUT", "600") + monkeypatch.setenv("HTTP_REQUEST_MAX_READ_TIMEOUT", "300") # Custom value for testing # load dotenv file with pydantic-settings config = DifyConfig() @@ -33,17 +33,38 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch): assert config.EDITION == "SELF_HOSTED" assert config.API_COMPRESSION_ENABLED is False assert config.SENTRY_TRACES_SAMPLE_RATE == 1.0 + assert config.TEMPLATE_TRANSFORM_MAX_LENGTH == 400_000 - # annotated field with default value - assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600 + # annotated field with custom configured value + assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 300 - # annotated field with configured value + # annotated field with custom configured value assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 30 # values from pyproject.toml assert Version(config.project.version) >= Version("1.0.0") +def test_http_timeout_defaults(monkeypatch: pytest.MonkeyPatch): + """Test that HTTP timeout defaults are correctly set""" + # clear system environment variables + os.environ.clear() + + # Set minimal required env vars + monkeypatch.setenv("DB_USERNAME", "postgres") + monkeypatch.setenv("DB_PASSWORD", "postgres") + monkeypatch.setenv("DB_HOST", "localhost") + monkeypatch.setenv("DB_PORT", "5432") + monkeypatch.setenv("DB_DATABASE", "dify") + + config = DifyConfig() + + # Verify default timeout values + assert config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT == 10 + assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600 + assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 600 + + # NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected. # This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`. def test_flask_configs(monkeypatch: pytest.MonkeyPatch): @@ -54,7 +75,6 @@ def test_flask_configs(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") - monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -104,7 +124,6 @@ def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") - monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") diff --git a/api/tests/unit_tests/controllers/console/app/test_description_validation.py b/api/tests/unit_tests/controllers/console/app/test_description_validation.py index 178267e560..dcc408a21c 100644 --- a/api/tests/unit_tests/controllers/console/app/test_description_validation.py +++ b/api/tests/unit_tests/controllers/console/app/test_description_validation.py @@ -1,174 +1,53 @@ import pytest -from controllers.console.app.app import _validate_description_length as app_validate -from controllers.console.datasets.datasets import _validate_description_length as dataset_validate -from controllers.service_api.dataset.dataset import _validate_description_length as service_dataset_validate +from libs.validators import validate_description_length class TestDescriptionValidationUnit: - """Unit tests for description validation functions in App and Dataset APIs""" + """Unit tests for the centralized description validation function.""" - def test_app_validate_description_length_valid(self): - """Test App validation function with valid descriptions""" + def test_validate_description_length_valid(self): + """Test validation function with valid descriptions.""" # Empty string should be valid - assert app_validate("") == "" + assert validate_description_length("") == "" # None should be valid - assert app_validate(None) is None + assert validate_description_length(None) is None # Short description should be valid short_desc = "Short description" - assert app_validate(short_desc) == short_desc + assert validate_description_length(short_desc) == short_desc # Exactly 400 characters should be valid exactly_400 = "x" * 400 - assert app_validate(exactly_400) == exactly_400 + assert validate_description_length(exactly_400) == exactly_400 # Just under limit should be valid just_under = "x" * 399 - assert app_validate(just_under) == just_under + assert validate_description_length(just_under) == just_under - def test_app_validate_description_length_invalid(self): - """Test App validation function with invalid descriptions""" + def test_validate_description_length_invalid(self): + """Test validation function with invalid descriptions.""" # 401 characters should fail just_over = "x" * 401 with pytest.raises(ValueError) as exc_info: - app_validate(just_over) + validate_description_length(just_over) assert "Description cannot exceed 400 characters." in str(exc_info.value) # 500 characters should fail way_over = "x" * 500 with pytest.raises(ValueError) as exc_info: - app_validate(way_over) + validate_description_length(way_over) assert "Description cannot exceed 400 characters." in str(exc_info.value) # 1000 characters should fail very_long = "x" * 1000 with pytest.raises(ValueError) as exc_info: - app_validate(very_long) + validate_description_length(very_long) assert "Description cannot exceed 400 characters." in str(exc_info.value) - def test_dataset_validate_description_length_valid(self): - """Test Dataset validation function with valid descriptions""" - # Empty string should be valid - assert dataset_validate("") == "" - - # Short description should be valid - short_desc = "Short description" - assert dataset_validate(short_desc) == short_desc - - # Exactly 400 characters should be valid - exactly_400 = "x" * 400 - assert dataset_validate(exactly_400) == exactly_400 - - # Just under limit should be valid - just_under = "x" * 399 - assert dataset_validate(just_under) == just_under - - def test_dataset_validate_description_length_invalid(self): - """Test Dataset validation function with invalid descriptions""" - # 401 characters should fail - just_over = "x" * 401 - with pytest.raises(ValueError) as exc_info: - dataset_validate(just_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - # 500 characters should fail - way_over = "x" * 500 - with pytest.raises(ValueError) as exc_info: - dataset_validate(way_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - def test_service_dataset_validate_description_length_valid(self): - """Test Service Dataset validation function with valid descriptions""" - # Empty string should be valid - assert service_dataset_validate("") == "" - - # None should be valid - assert service_dataset_validate(None) is None - - # Short description should be valid - short_desc = "Short description" - assert service_dataset_validate(short_desc) == short_desc - - # Exactly 400 characters should be valid - exactly_400 = "x" * 400 - assert service_dataset_validate(exactly_400) == exactly_400 - - # Just under limit should be valid - just_under = "x" * 399 - assert service_dataset_validate(just_under) == just_under - - def test_service_dataset_validate_description_length_invalid(self): - """Test Service Dataset validation function with invalid descriptions""" - # 401 characters should fail - just_over = "x" * 401 - with pytest.raises(ValueError) as exc_info: - service_dataset_validate(just_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - # 500 characters should fail - way_over = "x" * 500 - with pytest.raises(ValueError) as exc_info: - service_dataset_validate(way_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - def test_app_dataset_validation_consistency(self): - """Test that App and Dataset validation functions behave identically""" - test_cases = [ - "", # Empty string - "Short description", # Normal description - "x" * 100, # Medium description - "x" * 400, # Exactly at limit - ] - - # Test valid cases produce same results - for test_desc in test_cases: - assert app_validate(test_desc) == dataset_validate(test_desc) == service_dataset_validate(test_desc) - - # Test invalid cases produce same errors - invalid_cases = [ - "x" * 401, # Just over limit - "x" * 500, # Way over limit - "x" * 1000, # Very long - ] - - for invalid_desc in invalid_cases: - app_error = None - dataset_error = None - service_dataset_error = None - - # Capture App validation error - try: - app_validate(invalid_desc) - except ValueError as e: - app_error = str(e) - - # Capture Dataset validation error - try: - dataset_validate(invalid_desc) - except ValueError as e: - dataset_error = str(e) - - # Capture Service Dataset validation error - try: - service_dataset_validate(invalid_desc) - except ValueError as e: - service_dataset_error = str(e) - - # All should produce errors - assert app_error is not None, f"App validation should fail for {len(invalid_desc)} characters" - assert dataset_error is not None, f"Dataset validation should fail for {len(invalid_desc)} characters" - error_msg = f"Service Dataset validation should fail for {len(invalid_desc)} characters" - assert service_dataset_error is not None, error_msg - - # Errors should be identical - error_msg = f"Error messages should be identical for {len(invalid_desc)} characters" - assert app_error == dataset_error == service_dataset_error, error_msg - assert app_error == "Description cannot exceed 400 characters." - def test_boundary_values(self): - """Test boundary values around the 400 character limit""" + """Test boundary values around the 400 character limit.""" boundary_tests = [ (0, True), # Empty (1, True), # Minimum @@ -184,69 +63,45 @@ class TestDescriptionValidationUnit: if should_pass: # Should not raise exception - assert app_validate(test_desc) == test_desc - assert dataset_validate(test_desc) == test_desc - assert service_dataset_validate(test_desc) == test_desc + assert validate_description_length(test_desc) == test_desc else: # Should raise ValueError with pytest.raises(ValueError): - app_validate(test_desc) - with pytest.raises(ValueError): - dataset_validate(test_desc) - with pytest.raises(ValueError): - service_dataset_validate(test_desc) + validate_description_length(test_desc) def test_special_characters(self): """Test validation with special characters, Unicode, etc.""" # Unicode characters unicode_desc = "测试描述" * 100 # Chinese characters if len(unicode_desc) <= 400: - assert app_validate(unicode_desc) == unicode_desc - assert dataset_validate(unicode_desc) == unicode_desc - assert service_dataset_validate(unicode_desc) == unicode_desc + assert validate_description_length(unicode_desc) == unicode_desc # Special characters special_desc = "Special chars: !@#$%^&*()_+-=[]{}|;':\",./<>?" * 10 if len(special_desc) <= 400: - assert app_validate(special_desc) == special_desc - assert dataset_validate(special_desc) == special_desc - assert service_dataset_validate(special_desc) == special_desc + assert validate_description_length(special_desc) == special_desc # Mixed content mixed_desc = "Mixed content: 测试 123 !@# " * 15 if len(mixed_desc) <= 400: - assert app_validate(mixed_desc) == mixed_desc - assert dataset_validate(mixed_desc) == mixed_desc - assert service_dataset_validate(mixed_desc) == mixed_desc + assert validate_description_length(mixed_desc) == mixed_desc elif len(mixed_desc) > 400: with pytest.raises(ValueError): - app_validate(mixed_desc) - with pytest.raises(ValueError): - dataset_validate(mixed_desc) - with pytest.raises(ValueError): - service_dataset_validate(mixed_desc) + validate_description_length(mixed_desc) def test_whitespace_handling(self): - """Test validation with various whitespace scenarios""" + """Test validation with various whitespace scenarios.""" # Leading/trailing whitespace whitespace_desc = " Description with whitespace " if len(whitespace_desc) <= 400: - assert app_validate(whitespace_desc) == whitespace_desc - assert dataset_validate(whitespace_desc) == whitespace_desc - assert service_dataset_validate(whitespace_desc) == whitespace_desc + assert validate_description_length(whitespace_desc) == whitespace_desc # Newlines and tabs multiline_desc = "Line 1\nLine 2\tTabbed content" if len(multiline_desc) <= 400: - assert app_validate(multiline_desc) == multiline_desc - assert dataset_validate(multiline_desc) == multiline_desc - assert service_dataset_validate(multiline_desc) == multiline_desc + assert validate_description_length(multiline_desc) == multiline_desc # Only whitespace over limit only_spaces = " " * 401 with pytest.raises(ValueError): - app_validate(only_spaces) - with pytest.raises(ValueError): - dataset_validate(only_spaces) - with pytest.raises(ValueError): - service_dataset_validate(only_spaces) + validate_description_length(only_spaces) diff --git a/api/tests/unit_tests/controllers/console/auth/test_oauth.py b/api/tests/unit_tests/controllers/console/auth/test_oauth.py index 1a2e27e8fe..67f4b85413 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_oauth.py +++ b/api/tests/unit_tests/controllers/console/auth/test_oauth.py @@ -143,7 +143,7 @@ class TestOAuthCallback: oauth_provider.get_user_info.return_value = OAuthUserInfo(id="123", name="Test User", email="test@example.com") account = MagicMock() - account.status = AccountStatus.ACTIVE.value + account.status = AccountStatus.ACTIVE token_pair = MagicMock() token_pair.access_token = "jwt_access_token" @@ -220,11 +220,11 @@ class TestOAuthCallback: @pytest.mark.parametrize( ("account_status", "expected_redirect"), [ - (AccountStatus.BANNED.value, "http://localhost:3000/signin?message=Account is banned."), + (AccountStatus.BANNED, "http://localhost:3000/signin?message=Account is banned."), # CLOSED status: Currently NOT handled, will proceed to login (security issue) # This documents actual behavior. See test_defensive_check_for_closed_account_status for details ( - AccountStatus.CLOSED.value, + AccountStatus.CLOSED, "http://localhost:3000?access_token=jwt_access_token&refresh_token=jwt_refresh_token", ), ], @@ -296,13 +296,13 @@ class TestOAuthCallback: mock_get_providers.return_value = {"github": oauth_setup["provider"]} mock_account = MagicMock() - mock_account.status = AccountStatus.PENDING.value + mock_account.status = AccountStatus.PENDING mock_generate_account.return_value = mock_account with app.test_request_context("/auth/oauth/github/callback?code=test_code"): resource.get("github") - assert mock_account.status == AccountStatus.ACTIVE.value + assert mock_account.status == AccountStatus.ACTIVE assert mock_account.initialized_at is not None mock_db.session.commit.assert_called_once() @@ -352,7 +352,7 @@ class TestOAuthCallback: # Create account with CLOSED status closed_account = MagicMock() - closed_account.status = AccountStatus.CLOSED.value + closed_account.status = AccountStatus.CLOSED closed_account.id = "123" closed_account.name = "Closed Account" mock_generate_account.return_value = closed_account diff --git a/api/tests/unit_tests/controllers/console/test_wraps.py b/api/tests/unit_tests/controllers/console/test_wraps.py index 9742368f04..6777077de8 100644 --- a/api/tests/unit_tests/controllers/console/test_wraps.py +++ b/api/tests/unit_tests/controllers/console/test_wraps.py @@ -60,7 +60,7 @@ class TestAccountInitialization: return "success" # Act - with patch("controllers.console.wraps.current_user", mock_user): + with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_user, "tenant123")): result = protected_view() # Assert @@ -77,7 +77,7 @@ class TestAccountInitialization: return "success" # Act & Assert - with patch("controllers.console.wraps.current_user", mock_user): + with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_user, "tenant123")): with pytest.raises(AccountNotInitializedError): protected_view() @@ -163,7 +163,9 @@ class TestBillingResourceLimits: return "member_added" # Act - with patch("controllers.console.wraps.current_user"): + with patch( + "controllers.console.wraps.current_account_with_tenant", return_value=(MockUser("test_user"), "tenant123") + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): result = add_member() @@ -185,7 +187,10 @@ class TestBillingResourceLimits: # Act & Assert with app.test_request_context(): - with patch("controllers.console.wraps.current_user", MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): with pytest.raises(Exception) as exc_info: add_member() @@ -207,7 +212,10 @@ class TestBillingResourceLimits: # Test 1: Should reject when source is datasets with app.test_request_context("/?source=datasets"): - with patch("controllers.console.wraps.current_user", MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): with pytest.raises(Exception) as exc_info: upload_document() @@ -215,7 +223,10 @@ class TestBillingResourceLimits: # Test 2: Should allow when source is not datasets with app.test_request_context("/?source=other"): - with patch("controllers.console.wraps.current_user", MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): result = upload_document() assert result == "document_uploaded" @@ -239,7 +250,9 @@ class TestRateLimiting: return "knowledge_success" # Act - with patch("controllers.console.wraps.current_user"): + with patch( + "controllers.console.wraps.current_account_with_tenant", return_value=(MockUser("test_user"), "tenant123") + ): with patch( "controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit ): @@ -271,7 +284,10 @@ class TestRateLimiting: # Act & Assert with app.test_request_context(): - with patch("controllers.console.wraps.current_user", MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch( "controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit ): diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py b/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py new file mode 100644 index 0000000000..8ccd739e64 --- /dev/null +++ b/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py @@ -0,0 +1,733 @@ +import json +import unittest +from unittest.mock import MagicMock, patch + +import pytest + +from core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector import ( + AlibabaCloudMySQLVector, + AlibabaCloudMySQLVectorConfig, +) +from core.rag.models.document import Document + +try: + from mysql.connector import Error as MySQLError +except ImportError: + # Fallback for testing environments where mysql-connector-python might not be installed + class MySQLError(Exception): + def __init__(self, errno, msg): + self.errno = errno + self.msg = msg + super().__init__(msg) + + +class TestAlibabaCloudMySQLVector(unittest.TestCase): + def setUp(self): + self.config = AlibabaCloudMySQLVectorConfig( + host="localhost", + port=3306, + user="test_user", + password="test_password", + database="test_db", + max_connection=5, + charset="utf8mb4", + ) + self.collection_name = "test_collection" + + # Sample documents for testing + self.sample_documents = [ + Document( + page_content="This is a test document about AI.", + metadata={"doc_id": "doc1", "document_id": "dataset1", "source": "test"}, + ), + Document( + page_content="Another document about machine learning.", + metadata={"doc_id": "doc2", "document_id": "dataset1", "source": "test"}, + ), + ] + + # Sample embeddings + self.sample_embeddings = [[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_init(self, mock_pool_class): + """Test AlibabaCloudMySQLVector initialization.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor for vector support check + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [ + {"VERSION()": "8.0.36"}, # Version check + {"vector_support": True}, # Vector support check + ] + + alibabacloud_mysql_vector = AlibabaCloudMySQLVector(self.collection_name, self.config) + + assert alibabacloud_mysql_vector.collection_name == self.collection_name + assert alibabacloud_mysql_vector.table_name == self.collection_name.lower() + assert alibabacloud_mysql_vector.get_type() == "alibabacloud_mysql" + assert alibabacloud_mysql_vector.distance_function == "cosine" + assert alibabacloud_mysql_vector.pool is not None + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + @patch("core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.redis_client") + def test_create_collection(self, mock_redis, mock_pool_class): + """Test collection creation.""" + # Mock Redis operations + mock_redis.lock.return_value.__enter__ = MagicMock() + mock_redis.lock.return_value.__exit__ = MagicMock() + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [ + {"VERSION()": "8.0.36"}, # Version check + {"vector_support": True}, # Vector support check + ] + + alibabacloud_mysql_vector = AlibabaCloudMySQLVector(self.collection_name, self.config) + alibabacloud_mysql_vector._create_collection(768) + + # Verify SQL execution calls - should include table creation and index creation + assert mock_cursor.execute.called + assert mock_cursor.execute.call_count >= 3 # CREATE TABLE + 2 indexes + mock_redis.set.assert_called_once() + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_vector_support_check_success(self, mock_pool_class): + """Test successful vector support check.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + # Should not raise an exception + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + assert vector_store is not None + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_vector_support_check_failure(self, mock_pool_class): + """Test vector support check failure.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.35"}, {"vector_support": False}] + + with pytest.raises(ValueError) as context: + AlibabaCloudMySQLVector(self.collection_name, self.config) + + assert "RDS MySQL Vector functions are not available" in str(context.value) + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_vector_support_check_function_error(self, mock_pool_class): + """Test vector support check with function not found error.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.return_value = {"VERSION()": "8.0.36"} + mock_cursor.execute.side_effect = [None, MySQLError(errno=1305, msg="FUNCTION VEC_FromText does not exist")] + + with pytest.raises(ValueError) as context: + AlibabaCloudMySQLVector(self.collection_name, self.config) + + assert "RDS MySQL Vector functions are not available" in str(context.value) + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + @patch("core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.redis_client") + def test_create_documents(self, mock_redis, mock_pool_class): + """Test creating documents with embeddings.""" + # Setup mocks + self._setup_mocks(mock_redis, mock_pool_class) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + result = vector_store.create(self.sample_documents, self.sample_embeddings) + + assert len(result) == 2 + assert "doc1" in result + assert "doc2" in result + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_add_texts(self, mock_pool_class): + """Test adding texts to the vector store.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + result = vector_store.add_texts(self.sample_documents, self.sample_embeddings) + + assert len(result) == 2 + mock_cursor.executemany.assert_called_once() + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_text_exists(self, mock_pool_class): + """Test checking if text exists.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [ + {"VERSION()": "8.0.36"}, + {"vector_support": True}, + {"id": "doc1"}, # Text exists + ] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + exists = vector_store.text_exists("doc1") + + assert exists + # Check that the correct SQL was executed (last call after init) + execute_calls = mock_cursor.execute.call_args_list + last_call = execute_calls[-1] + assert "SELECT id FROM" in last_call[0][0] + assert last_call[0][1] == ("doc1",) + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_text_not_exists(self, mock_pool_class): + """Test checking if text does not exist.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [ + {"VERSION()": "8.0.36"}, + {"vector_support": True}, + None, # Text does not exist + ] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + exists = vector_store.text_exists("nonexistent") + + assert not exists + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_get_by_ids(self, mock_pool_class): + """Test getting documents by IDs.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [ + {"meta": json.dumps({"doc_id": "doc1", "source": "test"}), "text": "Test document 1"}, + {"meta": json.dumps({"doc_id": "doc2", "source": "test"}), "text": "Test document 2"}, + ] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + docs = vector_store.get_by_ids(["doc1", "doc2"]) + + assert len(docs) == 2 + assert docs[0].page_content == "Test document 1" + assert docs[1].page_content == "Test document 2" + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_get_by_ids_empty_list(self, mock_pool_class): + """Test getting documents with empty ID list.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + docs = vector_store.get_by_ids([]) + + assert len(docs) == 0 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_by_ids(self, mock_pool_class): + """Test deleting documents by IDs.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + vector_store.delete_by_ids(["doc1", "doc2"]) + + # Check that delete SQL was executed + execute_calls = mock_cursor.execute.call_args_list + delete_calls = [call for call in execute_calls if "DELETE" in str(call)] + assert len(delete_calls) == 1 + delete_call = delete_calls[0] + assert "DELETE FROM" in delete_call[0][0] + assert delete_call[0][1] == ["doc1", "doc2"] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_by_ids_empty_list(self, mock_pool_class): + """Test deleting with empty ID list.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + vector_store.delete_by_ids([]) # Should not raise an exception + + # Verify no delete SQL was executed + execute_calls = mock_cursor.execute.call_args_list + delete_calls = [call for call in execute_calls if "DELETE" in str(call)] + assert len(delete_calls) == 0 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_by_ids_table_not_exists(self, mock_pool_class): + """Test deleting when table doesn't exist.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + # Simulate table doesn't exist error on delete + + def execute_side_effect(*args, **kwargs): + if "DELETE" in args[0]: + raise MySQLError(errno=1146, msg="Table doesn't exist") + + mock_cursor.execute.side_effect = execute_side_effect + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + # Should not raise an exception + vector_store.delete_by_ids(["doc1"]) + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_by_metadata_field(self, mock_pool_class): + """Test deleting documents by metadata field.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + vector_store.delete_by_metadata_field("document_id", "dataset1") + + # Check that the correct SQL was executed + execute_calls = mock_cursor.execute.call_args_list + delete_calls = [call for call in execute_calls if "DELETE" in str(call)] + assert len(delete_calls) == 1 + delete_call = delete_calls[0] + assert "JSON_UNQUOTE(JSON_EXTRACT(meta" in delete_call[0][0] + assert delete_call[0][1] == ("$.document_id", "dataset1") + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_cosine(self, mock_pool_class): + """Test vector search with cosine distance.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [{"meta": json.dumps({"doc_id": "doc1", "source": "test"}), "text": "Test document 1", "distance": 0.1}] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + query_vector = [0.1, 0.2, 0.3, 0.4] + docs = vector_store.search_by_vector(query_vector, top_k=5) + + assert len(docs) == 1 + assert docs[0].page_content == "Test document 1" + assert abs(docs[0].metadata["score"] - 0.9) < 0.1 # 1 - 0.1 = 0.9 + assert docs[0].metadata["distance"] == 0.1 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_euclidean(self, mock_pool_class): + """Test vector search with euclidean distance.""" + config = AlibabaCloudMySQLVectorConfig( + host="localhost", + port=3306, + user="test_user", + password="test_password", + database="test_db", + max_connection=5, + distance_function="euclidean", + ) + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [{"meta": json.dumps({"doc_id": "doc1", "source": "test"}), "text": "Test document 1", "distance": 2.0}] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, config) + query_vector = [0.1, 0.2, 0.3, 0.4] + docs = vector_store.search_by_vector(query_vector, top_k=5) + + assert len(docs) == 1 + assert abs(docs[0].metadata["score"] - 1.0 / 3.0) < 0.01 # 1/(1+2) = 1/3 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_with_filter(self, mock_pool_class): + """Test vector search with document ID filter.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter([]) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + query_vector = [0.1, 0.2, 0.3, 0.4] + docs = vector_store.search_by_vector(query_vector, top_k=5, document_ids_filter=["dataset1"]) + + # Verify the SQL contains the WHERE clause for filtering + execute_calls = mock_cursor.execute.call_args_list + search_calls = [call for call in execute_calls if "VEC_DISTANCE" in str(call)] + assert len(search_calls) > 0 + search_call = search_calls[0] + assert "WHERE JSON_UNQUOTE" in search_call[0][0] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_with_score_threshold(self, mock_pool_class): + """Test vector search with score threshold.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [ + { + "meta": json.dumps({"doc_id": "doc1", "source": "test"}), + "text": "High similarity document", + "distance": 0.1, # High similarity (score = 0.9) + }, + { + "meta": json.dumps({"doc_id": "doc2", "source": "test"}), + "text": "Low similarity document", + "distance": 0.8, # Low similarity (score = 0.2) + }, + ] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + query_vector = [0.1, 0.2, 0.3, 0.4] + docs = vector_store.search_by_vector(query_vector, top_k=5, score_threshold=0.5) + + # Only the high similarity document should be returned + assert len(docs) == 1 + assert docs[0].page_content == "High similarity document" + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_vector_invalid_top_k(self, mock_pool_class): + """Test vector search with invalid top_k.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + query_vector = [0.1, 0.2, 0.3, 0.4] + + with pytest.raises(ValueError): + vector_store.search_by_vector(query_vector, top_k=0) + + with pytest.raises(ValueError): + vector_store.search_by_vector(query_vector, top_k="invalid") + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_full_text(self, mock_pool_class): + """Test full-text search.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter( + [ + { + "meta": {"doc_id": "doc1", "source": "test"}, + "text": "This document contains machine learning content", + "score": 1.5, + } + ] + ) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + docs = vector_store.search_by_full_text("machine learning", top_k=5) + + assert len(docs) == 1 + assert docs[0].page_content == "This document contains machine learning content" + assert docs[0].metadata["score"] == 1.5 + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_full_text_with_filter(self, mock_pool_class): + """Test full-text search with document ID filter.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + mock_cursor.__iter__ = lambda self: iter([]) + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + docs = vector_store.search_by_full_text("machine learning", top_k=5, document_ids_filter=["dataset1"]) + + # Verify the SQL contains the AND clause for filtering + execute_calls = mock_cursor.execute.call_args_list + search_calls = [call for call in execute_calls if "MATCH" in str(call)] + assert len(search_calls) > 0 + search_call = search_calls[0] + assert "AND JSON_UNQUOTE" in search_call[0][0] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_search_by_full_text_invalid_top_k(self, mock_pool_class): + """Test full-text search with invalid top_k.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + + with pytest.raises(ValueError): + vector_store.search_by_full_text("test", top_k=0) + + with pytest.raises(ValueError): + vector_store.search_by_full_text("test", top_k="invalid") + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_delete_collection(self, mock_pool_class): + """Test deleting the entire collection.""" + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + vector_store = AlibabaCloudMySQLVector(self.collection_name, self.config) + vector_store.delete() + + # Check that DROP TABLE SQL was executed + execute_calls = mock_cursor.execute.call_args_list + drop_calls = [call for call in execute_calls if "DROP TABLE" in str(call)] + assert len(drop_calls) == 1 + drop_call = drop_calls[0] + assert f"DROP TABLE IF EXISTS {self.collection_name.lower()}" in drop_call[0][0] + + @patch( + "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" + ) + def test_unsupported_distance_function(self, mock_pool_class): + """Test that Pydantic validation rejects unsupported distance functions.""" + # Test that creating config with unsupported distance function raises ValidationError + with pytest.raises(ValueError) as context: + AlibabaCloudMySQLVectorConfig( + host="localhost", + port=3306, + user="test_user", + password="test_password", + database="test_db", + max_connection=5, + distance_function="manhattan", # Unsupported - not in Literal["cosine", "euclidean"] + ) + + # The error should be related to validation + assert "Input should be 'cosine' or 'euclidean'" in str(context.value) or "manhattan" in str(context.value) + + def _setup_mocks(self, mock_redis, mock_pool_class): + """Helper method to setup common mocks.""" + # Mock Redis operations + mock_redis.lock.return_value.__enter__ = MagicMock() + mock_redis.lock.return_value.__exit__ = MagicMock() + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.get_connection.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] + + +@pytest.mark.parametrize( + "invalid_config_override", + [ + {"host": ""}, # Test empty host + {"port": 0}, # Test invalid port + {"max_connection": 0}, # Test invalid max_connection + ], +) +def test_config_validation_parametrized(invalid_config_override): + """Test configuration validation for various invalid inputs using parametrize.""" + config = { + "host": "localhost", + "port": 3306, + "user": "test", + "password": "test", + "database": "test", + "max_connection": 5, + } + config.update(invalid_config_override) + + with pytest.raises(ValueError): + AlibabaCloudMySQLVectorConfig(**config) + + +if __name__ == "__main__": + unittest.main() diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py b/api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py index 48cc8a7e1c..fb2ddfe162 100644 --- a/api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py +++ b/api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py @@ -11,8 +11,8 @@ def test_default_value(): config = valid_config.copy() del config[key] with pytest.raises(ValidationError) as e: - MilvusConfig(**config) + MilvusConfig.model_validate(config) assert e.value.errors()[0]["msg"] == f"Value error, config MILVUS_{key.upper()} is required" - config = MilvusConfig(**valid_config) + config = MilvusConfig.model_validate(valid_config) assert config.database == "default" diff --git a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py index 6689e13b96..b4ee1b91b4 100644 --- a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py +++ b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py @@ -1,10 +1,12 @@ import os +from pytest_mock import MockerFixture + from core.rag.extractor.firecrawl.firecrawl_app import FirecrawlApp from tests.unit_tests.core.rag.extractor.test_notion_extractor import _mock_response -def test_firecrawl_web_extractor_crawl_mode(mocker): +def test_firecrawl_web_extractor_crawl_mode(mocker: MockerFixture): url = "https://firecrawl.dev" api_key = os.getenv("FIRECRAWL_API_KEY") or "fc-" base_url = "https://api.firecrawl.dev" @@ -18,7 +20,7 @@ def test_firecrawl_web_extractor_crawl_mode(mocker): mocked_firecrawl = { "id": "test", } - mocker.patch("requests.post", return_value=_mock_response(mocked_firecrawl)) + mocker.patch("httpx.post", return_value=_mock_response(mocked_firecrawl)) job_id = firecrawl_app.crawl_url(url, params) assert job_id is not None diff --git a/api/tests/unit_tests/core/rag/extractor/test_notion_extractor.py b/api/tests/unit_tests/core/rag/extractor/test_notion_extractor.py index eea584a2f8..58bec7d19e 100644 --- a/api/tests/unit_tests/core/rag/extractor/test_notion_extractor.py +++ b/api/tests/unit_tests/core/rag/extractor/test_notion_extractor.py @@ -1,5 +1,7 @@ from unittest import mock +from pytest_mock import MockerFixture + from core.rag.extractor import notion_extractor user_id = "user1" @@ -57,7 +59,7 @@ def _remove_multiple_new_lines(text): return text.strip() -def test_notion_page(mocker): +def test_notion_page(mocker: MockerFixture): texts = ["Head 1", "1.1", "paragraph 1", "1.1.1"] mocked_notion_page = { "object": "list", @@ -69,7 +71,7 @@ def test_notion_page(mocker): ], "next_cursor": None, } - mocker.patch("requests.request", return_value=_mock_response(mocked_notion_page)) + mocker.patch("httpx.request", return_value=_mock_response(mocked_notion_page)) page_docs = extractor._load_data_as_documents(page_id, "page") assert len(page_docs) == 1 @@ -77,14 +79,14 @@ def test_notion_page(mocker): assert content == "# Head 1\n## 1.1\nparagraph 1\n### 1.1.1" -def test_notion_database(mocker): +def test_notion_database(mocker: MockerFixture): page_title_list = ["page1", "page2", "page3"] mocked_notion_database = { "object": "list", "results": [_generate_page(i) for i in page_title_list], "next_cursor": None, } - mocker.patch("requests.post", return_value=_mock_response(mocked_notion_database)) + mocker.patch("httpx.post", return_value=_mock_response(mocked_notion_database)) database_docs = extractor._load_data_as_documents(database_id, "database") assert len(database_docs) == 1 content = _remove_multiple_new_lines(database_docs[0].page_content) diff --git a/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py b/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py index e7733b2317..e6d0371cd5 100644 --- a/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py +++ b/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py @@ -140,7 +140,7 @@ class TestCeleryWorkflowExecutionRepository: assert call_args["execution_data"] == sample_workflow_execution.model_dump() assert call_args["tenant_id"] == mock_account.current_tenant_id assert call_args["app_id"] == "test-app" - assert call_args["triggered_from"] == WorkflowRunTriggeredFrom.APP_RUN.value + assert call_args["triggered_from"] == WorkflowRunTriggeredFrom.APP_RUN assert call_args["creator_user_id"] == mock_account.id # Verify no task tracking occurs (no _pending_saves attribute) diff --git a/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py b/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py index 3abe20fca1..f6211f4cca 100644 --- a/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py +++ b/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py @@ -149,7 +149,7 @@ class TestCeleryWorkflowNodeExecutionRepository: assert call_args["execution_data"] == sample_workflow_node_execution.model_dump() assert call_args["tenant_id"] == mock_account.current_tenant_id assert call_args["app_id"] == "test-app" - assert call_args["triggered_from"] == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value + assert call_args["triggered_from"] == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN assert call_args["creator_user_id"] == mock_account.id # Verify execution is cached diff --git a/api/tests/unit_tests/core/repositories/test_workflow_node_execution_truncation.py b/api/tests/unit_tests/core/repositories/test_workflow_node_execution_truncation.py index 36f7d3ef55..485be90eae 100644 --- a/api/tests/unit_tests/core/repositories/test_workflow_node_execution_truncation.py +++ b/api/tests/unit_tests/core/repositories/test_workflow_node_execution_truncation.py @@ -145,12 +145,12 @@ class TestSQLAlchemyWorkflowNodeExecutionRepositoryTruncation: db_model.index = 1 db_model.predecessor_node_id = None db_model.node_id = "node-id" - db_model.node_type = NodeType.LLM.value + db_model.node_type = NodeType.LLM db_model.title = "Test Node" db_model.inputs = json.dumps({"value": "inputs"}) db_model.process_data = json.dumps({"value": "process_data"}) db_model.outputs = json.dumps({"value": "outputs"}) - db_model.status = WorkflowNodeExecutionStatus.SUCCEEDED.value + db_model.status = WorkflowNodeExecutionStatus.SUCCEEDED db_model.error = None db_model.elapsed_time = 1.0 db_model.execution_metadata = "{}" diff --git a/api/tests/unit_tests/core/test_model_manager.py b/api/tests/unit_tests/core/test_model_manager.py index d98e9f6bad..5a7547e85c 100644 --- a/api/tests/unit_tests/core/test_model_manager.py +++ b/api/tests/unit_tests/core/test_model_manager.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch import pytest import redis +from pytest_mock import MockerFixture from core.entities.provider_entities import ModelLoadBalancingConfiguration from core.model_manager import LBModelManager @@ -39,7 +40,7 @@ def lb_model_manager(): return lb_model_manager -def test_lb_model_manager_fetch_next(mocker, lb_model_manager): +def test_lb_model_manager_fetch_next(mocker: MockerFixture, lb_model_manager: LBModelManager): # initialize redis client redis_client.initialize(redis.Redis()) diff --git a/api/tests/unit_tests/core/test_provider_configuration.py b/api/tests/unit_tests/core/test_provider_configuration.py index 75621ecb6a..9060cf7b6c 100644 --- a/api/tests/unit_tests/core/test_provider_configuration.py +++ b/api/tests/unit_tests/core/test_provider_configuration.py @@ -14,7 +14,13 @@ from core.entities.provider_entities import ( ) from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.entities.provider_entities import ConfigurateMethod, ProviderEntity +from core.model_runtime.entities.provider_entities import ( + ConfigurateMethod, + CredentialFormSchema, + FormOption, + FormType, + ProviderEntity, +) from models.provider import Provider, ProviderType @@ -306,3 +312,174 @@ class TestProviderConfiguration: # Assert assert credentials == {"openai_api_key": "test_key"} + + def test_extract_secret_variables_with_secret_input(self, provider_configuration): + """Test extracting secret variables from credential form schemas""" + # Arrange + credential_form_schemas = [ + CredentialFormSchema( + variable="api_key", + label=I18nObject(en_US="API Key", zh_Hans="API 密钥"), + type=FormType.SECRET_INPUT, + required=True, + ), + CredentialFormSchema( + variable="model_name", + label=I18nObject(en_US="Model Name", zh_Hans="模型名称"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="secret_token", + label=I18nObject(en_US="Secret Token", zh_Hans="密钥令牌"), + type=FormType.SECRET_INPUT, + required=False, + ), + ] + + # Act + secret_variables = provider_configuration.extract_secret_variables(credential_form_schemas) + + # Assert + assert len(secret_variables) == 2 + assert "api_key" in secret_variables + assert "secret_token" in secret_variables + assert "model_name" not in secret_variables + + def test_extract_secret_variables_no_secret_input(self, provider_configuration): + """Test extracting secret variables when no secret input fields exist""" + # Arrange + credential_form_schemas = [ + CredentialFormSchema( + variable="model_name", + label=I18nObject(en_US="Model Name", zh_Hans="模型名称"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=FormType.SELECT, + required=True, + options=[FormOption(label=I18nObject(en_US="0.1", zh_Hans="0.1"), value="0.1")], + ), + ] + + # Act + secret_variables = provider_configuration.extract_secret_variables(credential_form_schemas) + + # Assert + assert len(secret_variables) == 0 + + def test_extract_secret_variables_empty_list(self, provider_configuration): + """Test extracting secret variables from empty credential form schemas""" + # Arrange + credential_form_schemas = [] + + # Act + secret_variables = provider_configuration.extract_secret_variables(credential_form_schemas) + + # Assert + assert len(secret_variables) == 0 + + @patch("core.entities.provider_configuration.encrypter") + def test_obfuscated_credentials_with_secret_variables(self, mock_encrypter, provider_configuration): + """Test obfuscating credentials with secret variables""" + # Arrange + credentials = { + "api_key": "sk-1234567890abcdef", + "model_name": "gpt-4", + "secret_token": "secret_value_123", + "temperature": "0.7", + } + + credential_form_schemas = [ + CredentialFormSchema( + variable="api_key", + label=I18nObject(en_US="API Key", zh_Hans="API 密钥"), + type=FormType.SECRET_INPUT, + required=True, + ), + CredentialFormSchema( + variable="model_name", + label=I18nObject(en_US="Model Name", zh_Hans="模型名称"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="secret_token", + label=I18nObject(en_US="Secret Token", zh_Hans="密钥令牌"), + type=FormType.SECRET_INPUT, + required=False, + ), + CredentialFormSchema( + variable="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=FormType.TEXT_INPUT, + required=True, + ), + ] + + mock_encrypter.obfuscated_token.side_effect = lambda x: f"***{x[-4:]}" + + # Act + obfuscated = provider_configuration.obfuscated_credentials(credentials, credential_form_schemas) + + # Assert + assert obfuscated["api_key"] == "***cdef" + assert obfuscated["model_name"] == "gpt-4" # Not obfuscated + assert obfuscated["secret_token"] == "***_123" + assert obfuscated["temperature"] == "0.7" # Not obfuscated + + # Verify encrypter was called for secret fields only + assert mock_encrypter.obfuscated_token.call_count == 2 + mock_encrypter.obfuscated_token.assert_any_call("sk-1234567890abcdef") + mock_encrypter.obfuscated_token.assert_any_call("secret_value_123") + + def test_obfuscated_credentials_no_secret_variables(self, provider_configuration): + """Test obfuscating credentials when no secret variables exist""" + # Arrange + credentials = { + "model_name": "gpt-4", + "temperature": "0.7", + "max_tokens": "1000", + } + + credential_form_schemas = [ + CredentialFormSchema( + variable="model_name", + label=I18nObject(en_US="Model Name", zh_Hans="模型名称"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=FormType.TEXT_INPUT, + required=True, + ), + CredentialFormSchema( + variable="max_tokens", + label=I18nObject(en_US="Max Tokens", zh_Hans="最大令牌数"), + type=FormType.TEXT_INPUT, + required=True, + ), + ] + + # Act + obfuscated = provider_configuration.obfuscated_credentials(credentials, credential_form_schemas) + + # Assert + assert obfuscated == credentials # No changes expected + + def test_obfuscated_credentials_empty_credentials(self, provider_configuration): + """Test obfuscating empty credentials""" + # Arrange + credentials = {} + credential_form_schemas = [] + + # Act + obfuscated = provider_configuration.obfuscated_credentials(credentials, credential_form_schemas) + + # Assert + assert obfuscated == {} diff --git a/api/tests/unit_tests/core/test_provider_manager.py b/api/tests/unit_tests/core/test_provider_manager.py index 2dab394029..0c3887beab 100644 --- a/api/tests/unit_tests/core/test_provider_manager.py +++ b/api/tests/unit_tests/core/test_provider_manager.py @@ -1,4 +1,5 @@ import pytest +from pytest_mock import MockerFixture from core.entities.provider_entities import ModelSettings from core.model_runtime.entities.model_entities import ModelType @@ -7,19 +8,25 @@ from models.provider import LoadBalancingModelConfig, ProviderModelSetting @pytest.fixture -def mock_provider_entity(mocker): +def mock_provider_entity(mocker: MockerFixture): mock_entity = mocker.Mock() mock_entity.provider = "openai" mock_entity.configurate_methods = ["predefined-model"] mock_entity.supported_model_types = [ModelType.LLM] - mock_entity.model_credential_schema = mocker.Mock() - mock_entity.model_credential_schema.credential_form_schemas = [] + # Use PropertyMock to ensure credential_form_schemas is iterable + provider_credential_schema = mocker.Mock() + type(provider_credential_schema).credential_form_schemas = mocker.PropertyMock(return_value=[]) + mock_entity.provider_credential_schema = provider_credential_schema + + model_credential_schema = mocker.Mock() + type(model_credential_schema).credential_form_schemas = mocker.PropertyMock(return_value=[]) + mock_entity.model_credential_schema = model_credential_schema return mock_entity -def test__to_model_settings(mocker, mock_provider_entity): +def test__to_model_settings(mocker: MockerFixture, mock_provider_entity): # Mocking the inputs provider_model_settings = [ ProviderModelSetting( @@ -79,7 +86,7 @@ def test__to_model_settings(mocker, mock_provider_entity): assert result[0].load_balancing_configs[1].name == "first" -def test__to_model_settings_only_one_lb(mocker, mock_provider_entity): +def test__to_model_settings_only_one_lb(mocker: MockerFixture, mock_provider_entity): # Mocking the inputs provider_model_settings = [ ProviderModelSetting( @@ -127,7 +134,7 @@ def test__to_model_settings_only_one_lb(mocker, mock_provider_entity): assert len(result[0].load_balancing_configs) == 0 -def test__to_model_settings_lb_disabled(mocker, mock_provider_entity): +def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_entity): # Mocking the inputs provider_model_settings = [ ProviderModelSetting( diff --git a/api/tests/unit_tests/core/tools/test_tool_entities.py b/api/tests/unit_tests/core/tools/test_tool_entities.py new file mode 100644 index 0000000000..a5b7e8a9a3 --- /dev/null +++ b/api/tests/unit_tests/core/tools/test_tool_entities.py @@ -0,0 +1,29 @@ +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_entities import ToolEntity, ToolIdentity, ToolInvokeMessage + + +def _make_identity() -> ToolIdentity: + return ToolIdentity( + author="author", + name="tool", + label=I18nObject(en_US="Label"), + provider="builtin", + ) + + +def test_log_message_metadata_none_defaults_to_empty_dict(): + log_message = ToolInvokeMessage.LogMessage( + id="log-1", + label="Log entry", + status=ToolInvokeMessage.LogMessage.LogStatus.START, + data={}, + metadata=None, + ) + + assert log_message.metadata == {} + + +def test_tool_entity_output_schema_none_defaults_to_empty_dict(): + entity = ToolEntity(identity=_make_identity(), output_schema=None) + + assert entity.output_schema == {} diff --git a/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py b/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py new file mode 100644 index 0000000000..68fe82d05e --- /dev/null +++ b/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py @@ -0,0 +1,113 @@ +from core.variables.segments import ( + BooleanSegment, + IntegerSegment, + NoneSegment, + StringSegment, +) +from core.workflow.entities.variable_pool import VariablePool + + +class TestVariablePoolGetAndNestedAttribute: + # + # _get_nested_attribute tests + # + def test__get_nested_attribute_existing_key(self): + pool = VariablePool.empty() + obj = {"a": 123} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert segment.value == 123 + + def test__get_nested_attribute_missing_key(self): + pool = VariablePool.empty() + obj = {"a": 123} + segment = pool._get_nested_attribute(obj, "b") + assert segment is None + + def test__get_nested_attribute_non_dict(self): + pool = VariablePool.empty() + obj = ["not", "a", "dict"] + segment = pool._get_nested_attribute(obj, "a") + assert segment is None + + def test__get_nested_attribute_with_none_value(self): + pool = VariablePool.empty() + obj = {"a": None} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert isinstance(segment, NoneSegment) + + def test__get_nested_attribute_with_empty_string(self): + pool = VariablePool.empty() + obj = {"a": ""} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert isinstance(segment, StringSegment) + assert segment.value == "" + + # + # get tests + # + def test_get_simple_variable(self): + pool = VariablePool.empty() + pool.add(("node1", "var1"), "value1") + segment = pool.get(("node1", "var1")) + assert segment is not None + assert segment.value == "value1" + + def test_get_missing_variable(self): + pool = VariablePool.empty() + result = pool.get(("node1", "unknown")) + assert result is None + + def test_get_with_too_short_selector(self): + pool = VariablePool.empty() + result = pool.get(("only_node",)) + assert result is None + + def test_get_nested_object_attribute(self): + pool = VariablePool.empty() + obj_value = {"inner": "hello"} + pool.add(("node1", "obj"), obj_value) + + # simulate selector with nested attr + segment = pool.get(("node1", "obj", "inner")) + assert segment is not None + assert segment.value == "hello" + + def test_get_nested_object_missing_attribute(self): + pool = VariablePool.empty() + obj_value = {"inner": "hello"} + pool.add(("node1", "obj"), obj_value) + + result = pool.get(("node1", "obj", "not_exist")) + assert result is None + + def test_get_nested_object_attribute_with_falsy_values(self): + pool = VariablePool.empty() + obj_value = { + "inner_none": None, + "inner_empty": "", + "inner_zero": 0, + "inner_false": False, + } + pool.add(("node1", "obj"), obj_value) + + segment_none = pool.get(("node1", "obj", "inner_none")) + assert segment_none is not None + assert isinstance(segment_none, NoneSegment) + + segment_empty = pool.get(("node1", "obj", "inner_empty")) + assert segment_empty is not None + assert isinstance(segment_empty, StringSegment) + assert segment_empty.value == "" + + segment_zero = pool.get(("node1", "obj", "inner_zero")) + assert segment_zero is not None + assert isinstance(segment_zero, IntegerSegment) + assert segment_zero.value == 0 + + segment_false = pool.get(("node1", "obj", "inner_false")) + assert segment_false is not None + assert isinstance(segment_false, BooleanSegment) + assert segment_false.value is False diff --git a/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py b/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py index 2c08fff27b..8677325d4e 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py @@ -35,11 +35,15 @@ class TestRedisChannel: """Test sending a command to Redis.""" mock_redis = MagicMock() mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + context = MagicMock() + context.__enter__.return_value = mock_pipe + context.__exit__.return_value = None + mock_redis.pipeline.return_value = context channel = RedisChannel(mock_redis, "test:key", 3600) + pending_key = "test:key:pending" + # Create a test command command = GraphEngineCommand(command_type=CommandType.ABORT) @@ -55,6 +59,7 @@ class TestRedisChannel: # Verify expire was set mock_pipe.expire.assert_called_once_with("test:key", 3600) + mock_pipe.set.assert_called_once_with(pending_key, "1", ex=3600) # Verify execute was called mock_pipe.execute.assert_called_once() @@ -62,33 +67,48 @@ class TestRedisChannel: def test_fetch_commands_empty(self): """Test fetching commands when Redis list is empty.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context] - # Simulate empty list - mock_pipe.execute.return_value = [[], 1] # Empty list, delete successful + # No pending marker + pending_pipe.execute.return_value = [None, 0] + mock_redis.llen.return_value = 0 channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() assert commands == [] - mock_pipe.lrange.assert_called_once_with("test:key", 0, -1) - mock_pipe.delete.assert_called_once_with("test:key") + mock_redis.pipeline.assert_called_once() + fetch_pipe.lrange.assert_not_called() + fetch_pipe.delete.assert_not_called() def test_fetch_commands_with_abort_command(self): """Test fetching abort commands from Redis.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Create abort command data abort_command = AbortCommand() command_json = json.dumps(abort_command.model_dump()) # Simulate Redis returning one command - mock_pipe.execute.return_value = [[command_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -100,9 +120,15 @@ class TestRedisChannel: def test_fetch_commands_multiple(self): """Test fetching multiple commands from Redis.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Create multiple commands command1 = GraphEngineCommand(command_type=CommandType.ABORT) @@ -112,7 +138,8 @@ class TestRedisChannel: command2_json = json.dumps(command2.model_dump()) # Simulate Redis returning multiple commands - mock_pipe.execute.return_value = [[command1_json.encode(), command2_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command1_json.encode(), command2_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -124,9 +151,15 @@ class TestRedisChannel: def test_fetch_commands_skips_invalid_json(self): """Test that invalid JSON commands are skipped.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mix valid and invalid JSON valid_command = AbortCommand() @@ -134,7 +167,8 @@ class TestRedisChannel: invalid_json = b"invalid json {" # Simulate Redis returning mixed valid/invalid commands - mock_pipe.execute.return_value = [[invalid_json, valid_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[invalid_json, valid_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -147,7 +181,7 @@ class TestRedisChannel: """Test deserializing an abort command.""" channel = RedisChannel(MagicMock(), "test:key") - abort_data = {"command_type": CommandType.ABORT.value} + abort_data = {"command_type": CommandType.ABORT} command = channel._deserialize_command(abort_data) assert isinstance(command, AbortCommand) @@ -158,7 +192,7 @@ class TestRedisChannel: channel = RedisChannel(MagicMock(), "test:key") # For now, only ABORT is supported, but test generic handling - generic_data = {"command_type": CommandType.ABORT.value} + generic_data = {"command_type": CommandType.ABORT} command = channel._deserialize_command(generic_data) assert command is not None @@ -187,13 +221,20 @@ class TestRedisChannel: def test_atomic_fetch_and_clear(self): """Test that fetch_commands atomically fetches and clears the list.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] command = AbortCommand() command_json = json.dumps(command.model_dump()) - mock_pipe.execute.return_value = [[command_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") @@ -202,7 +243,29 @@ class TestRedisChannel: assert len(commands) == 1 # Verify both lrange and delete were called in the pipeline - assert mock_pipe.lrange.call_count == 1 - assert mock_pipe.delete.call_count == 1 - mock_pipe.lrange.assert_called_with("test:key", 0, -1) - mock_pipe.delete.assert_called_with("test:key") + assert fetch_pipe.lrange.call_count == 1 + assert fetch_pipe.delete.call_count == 1 + fetch_pipe.lrange.assert_called_with("test:key", 0, -1) + fetch_pipe.delete.assert_called_with("test:key") + + def test_fetch_commands_without_pending_marker_returns_empty(self): + """Ensure we avoid unnecessary list reads when pending flag is missing.""" + mock_redis = MagicMock() + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] + + # Pending flag absent + pending_pipe.execute.return_value = [None, 0] + channel = RedisChannel(mock_redis, "test:key") + commands = channel.fetch_commands() + + assert commands == [] + mock_redis.llen.assert_not_called() + assert mock_redis.pipeline.call_count == 1 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_answer_order_workflow.py b/api/tests/unit_tests/core/workflow/graph_engine/test_answer_order_workflow.py new file mode 100644 index 0000000000..6569439b56 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_answer_order_workflow.py @@ -0,0 +1,28 @@ +from .test_mock_config import MockConfigBuilder +from .test_table_runner import TableTestRunner, WorkflowTestCase + +LLM_NODE_ID = "1759052580454" + + +def test_answer_nodes_emit_in_order() -> None: + mock_config = ( + MockConfigBuilder() + .with_llm_response("unused default") + .with_node_output(LLM_NODE_ID, {"text": "mocked llm text"}) + .build() + ) + + expected_answer = "--- answer 1 ---\n\nfoo\n--- answer 2 ---\n\nmocked llm text\n" + + case = WorkflowTestCase( + fixture_path="test-answer-order", + query="", + expected_outputs={"answer": expected_answer}, + use_auto_mock=True, + mock_config=mock_config, + ) + + runner = TableTestRunner() + result = runner.run_test_case(case) + + assert result.success, result.error diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py b/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py new file mode 100644 index 0000000000..830fc0884d --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py @@ -0,0 +1,104 @@ +"""Tests for dispatcher command checking behavior.""" + +from __future__ import annotations + +import queue +from datetime import datetime + +from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus +from core.workflow.graph_engine.event_management.event_manager import EventManager +from core.workflow.graph_engine.orchestration.dispatcher import Dispatcher +from core.workflow.graph_events import NodeRunStartedEvent, NodeRunSucceededEvent +from core.workflow.node_events import NodeRunResult + + +class _StubExecutionCoordinator: + """Stub execution coordinator that tracks command checks.""" + + def __init__(self) -> None: + self.command_checks = 0 + self.scaling_checks = 0 + self._execution_complete = False + self.mark_complete_called = False + self.failed = False + + def check_commands(self) -> None: + self.command_checks += 1 + + def check_scaling(self) -> None: + self.scaling_checks += 1 + + def is_execution_complete(self) -> bool: + return self._execution_complete + + def mark_complete(self) -> None: + self.mark_complete_called = True + + def mark_failed(self, error: Exception) -> None: # pragma: no cover - defensive, not triggered in tests + self.failed = True + + def set_execution_complete(self) -> None: + self._execution_complete = True + + +class _StubEventHandler: + """Minimal event handler that marks execution complete after handling an event.""" + + def __init__(self, coordinator: _StubExecutionCoordinator) -> None: + self._coordinator = coordinator + self.events = [] + + def dispatch(self, event) -> None: + self.events.append(event) + self._coordinator.set_execution_complete() + + +def _run_dispatcher_for_event(event) -> int: + """Run the dispatcher loop for a single event and return command check count.""" + event_queue: queue.Queue = queue.Queue() + event_queue.put(event) + + coordinator = _StubExecutionCoordinator() + event_handler = _StubEventHandler(coordinator) + event_manager = EventManager() + + dispatcher = Dispatcher( + event_queue=event_queue, + event_handler=event_handler, + event_collector=event_manager, + execution_coordinator=coordinator, + ) + + dispatcher._dispatcher_loop() + + return coordinator.command_checks + + +def _make_started_event() -> NodeRunStartedEvent: + return NodeRunStartedEvent( + id="start-event", + node_id="node-1", + node_type=NodeType.CODE, + node_title="Test Node", + start_at=datetime.utcnow(), + ) + + +def _make_succeeded_event() -> NodeRunSucceededEvent: + return NodeRunSucceededEvent( + id="success-event", + node_id="node-1", + node_type=NodeType.CODE, + node_title="Test Node", + start_at=datetime.utcnow(), + node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED), + ) + + +def test_dispatcher_checks_commands_after_node_completion() -> None: + """Dispatcher should only check commands after node completion events.""" + started_checks = _run_dispatcher_for_event(_make_started_event()) + succeeded_checks = _run_dispatcher_for_event(_make_succeeded_event()) + + assert started_checks == 0 + assert succeeded_checks == 1 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py index 6a9bfbdcc3..c39c12925f 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py @@ -56,8 +56,8 @@ def test_mock_iteration_node_preserves_config(): workflow_id="test", graph_config={"nodes": [], "edges": []}, user_id="test", - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) @@ -117,8 +117,8 @@ def test_mock_loop_node_preserves_config(): workflow_id="test", graph_config={"nodes": [], "edges": []}, user_id="test", - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, call_depth=0, ) diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py b/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py index b286d99f70..e191246bed 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py @@ -49,7 +49,7 @@ class TestRedisStopIntegration: # Verify the command data command_json = calls[0][0][1] command_data = json.loads(command_json) - assert command_data["command_type"] == CommandType.ABORT.value + assert command_data["command_type"] == CommandType.ABORT assert command_data["reason"] == "Test stop" def test_graph_engine_manager_handles_redis_failure_gracefully(self): @@ -122,7 +122,7 @@ class TestRedisStopIntegration: # Verify serialized command command_json = calls[0][0][1] command_data = json.loads(command_json) - assert command_data["command_type"] == CommandType.ABORT.value + assert command_data["command_type"] == CommandType.ABORT assert command_data["reason"] == "User requested stop" # Check expire was set @@ -132,17 +132,22 @@ class TestRedisStopIntegration: """Test RedisChannel correctly fetches and deserializes commands.""" # Setup mock_redis = MagicMock() - mock_pipeline = MagicMock() - mock_redis.pipeline.return_value.__enter__ = Mock(return_value=mock_pipeline) - mock_redis.pipeline.return_value.__exit__ = Mock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mock command data - abort_command_json = json.dumps( - {"command_type": CommandType.ABORT.value, "reason": "Test abort", "payload": None} - ) + abort_command_json = json.dumps({"command_type": CommandType.ABORT, "reason": "Test abort", "payload": None}) # Mock pipeline execute to return commands - mock_pipeline.execute.return_value = [ + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [ [abort_command_json.encode()], # lrange result True, # delete result ] @@ -160,19 +165,29 @@ class TestRedisStopIntegration: assert commands[0].reason == "Test abort" # Verify Redis operations - mock_pipeline.lrange.assert_called_once_with(channel_key, 0, -1) - mock_pipeline.delete.assert_called_once_with(channel_key) + pending_pipe.get.assert_called_once_with(f"{channel_key}:pending") + pending_pipe.delete.assert_called_once_with(f"{channel_key}:pending") + fetch_pipe.lrange.assert_called_once_with(channel_key, 0, -1) + fetch_pipe.delete.assert_called_once_with(channel_key) + assert mock_redis.pipeline.call_count == 2 def test_redis_channel_fetch_commands_handles_invalid_json(self): """Test RedisChannel gracefully handles invalid JSON in commands.""" # Setup mock_redis = MagicMock() - mock_pipeline = MagicMock() - mock_redis.pipeline.return_value.__enter__ = Mock(return_value=mock_pipeline) - mock_redis.pipeline.return_value.__exit__ = Mock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mock invalid command data - mock_pipeline.execute.return_value = [ + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [ [b"invalid json", b'{"command_type": "invalid_type"}'], # lrange result True, # delete result ] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py b/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py new file mode 100644 index 0000000000..a7309f64de --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py @@ -0,0 +1,41 @@ +"""Validate conversation variable updates inside an iteration workflow. + +This test uses the ``update-conversation-variable-in-iteration`` fixture, which +routes ``sys.query`` into the conversation variable ``answer`` from within an +iteration container. The workflow should surface that updated conversation +variable in the final answer output. + +Code nodes in the fixture are mocked because their concrete outputs are not +relevant to verifying variable propagation semantics. +""" + +from .test_mock_config import MockConfigBuilder +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def test_update_conversation_variable_in_iteration(): + fixture_name = "update-conversation-variable-in-iteration" + user_query = "ensure conversation variable syncs" + + mock_config = ( + MockConfigBuilder() + .with_node_output("1759032363865", {"result": [1]}) + .with_node_output("1759032476318", {"result": ""}) + .build() + ) + + case = WorkflowTestCase( + fixture_path=fixture_name, + use_auto_mock=True, + mock_config=mock_config, + query=user_query, + expected_outputs={"answer": user_query}, + description="Conversation variable updated within iteration should flow to answer output.", + ) + + runner = TableTestRunner() + result = runner.run_test_case(case) + + assert result.success, f"Workflow execution failed: {result.error}" + assert result.actual_outputs is not None + assert result.actual_outputs.get("answer") == user_query diff --git a/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py b/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py index b942614232..55fe62ca43 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py @@ -35,7 +35,7 @@ def list_operator_node(): "extract_by": ExtractConfig(enabled=False, serial="1"), "title": "Test Title", } - node_data = ListOperatorNodeData(**config) + node_data = ListOperatorNodeData.model_validate(config) node_config = { "id": "test_node_id", "data": node_data.model_dump(), diff --git a/api/tests/unit_tests/core/workflow/nodes/test_question_classifier_node.py b/api/tests/unit_tests/core/workflow/nodes/test_question_classifier_node.py index f990280c5f..47ef289ef3 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_question_classifier_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_question_classifier_node.py @@ -17,7 +17,7 @@ def test_init_question_classifier_node_data(): "vision": {"enabled": True, "configs": {"variable_selector": ["image"], "detail": "low"}}, } - node_data = QuestionClassifierNodeData(**data) + node_data = QuestionClassifierNodeData.model_validate(data) assert node_data.query_variable_selector == ["id", "name"] assert node_data.model.provider == "openai" @@ -49,7 +49,7 @@ def test_init_question_classifier_node_data_without_vision_config(): }, } - node_data = QuestionClassifierNodeData(**data) + node_data = QuestionClassifierNodeData.model_validate(data) assert node_data.query_variable_selector == ["id", "name"] assert node_data.model.provider == "openai" diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py index 3e50d5522a..6189febdf5 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py @@ -87,7 +87,7 @@ def test_overwrite_string_variable(): "data": { "title": "test", "assigned_variable_selector": ["conversation", conversation_variable.name], - "write_mode": WriteMode.OVER_WRITE.value, + "write_mode": WriteMode.OVER_WRITE, "input_variable_selector": [DEFAULT_NODE_ID, input_variable.name], }, } @@ -189,7 +189,7 @@ def test_append_variable_to_array(): "data": { "title": "test", "assigned_variable_selector": ["conversation", conversation_variable.name], - "write_mode": WriteMode.APPEND.value, + "write_mode": WriteMode.APPEND, "input_variable_selector": [DEFAULT_NODE_ID, input_variable.name], }, } @@ -282,7 +282,7 @@ def test_clear_array(): "data": { "title": "test", "assigned_variable_selector": ["conversation", conversation_variable.name], - "write_mode": WriteMode.CLEAR.value, + "write_mode": WriteMode.CLEAR, "input_variable_selector": [], }, } diff --git a/api/tests/unit_tests/core/workflow/test_system_variable.py b/api/tests/unit_tests/core/workflow/test_system_variable.py index 11d788ed79..3ae5edb383 100644 --- a/api/tests/unit_tests/core/workflow/test_system_variable.py +++ b/api/tests/unit_tests/core/workflow/test_system_variable.py @@ -46,7 +46,7 @@ class TestSystemVariableSerialization: def test_basic_deserialization(self): """Test successful deserialization from JSON structure with all fields correctly mapped.""" # Test with complete data - system_var = SystemVariable(**COMPLETE_VALID_DATA) + system_var = SystemVariable.model_validate(COMPLETE_VALID_DATA) # Verify all fields are correctly mapped assert system_var.user_id == COMPLETE_VALID_DATA["user_id"] @@ -59,7 +59,7 @@ class TestSystemVariableSerialization: assert system_var.files == [] # Test with minimal data (only required fields) - minimal_var = SystemVariable(**VALID_BASE_DATA) + minimal_var = SystemVariable.model_validate(VALID_BASE_DATA) assert minimal_var.user_id == VALID_BASE_DATA["user_id"] assert minimal_var.app_id == VALID_BASE_DATA["app_id"] assert minimal_var.workflow_id == VALID_BASE_DATA["workflow_id"] @@ -75,12 +75,12 @@ class TestSystemVariableSerialization: # Test workflow_run_id only (preferred alias) data_run_id = {**VALID_BASE_DATA, "workflow_run_id": workflow_id} - system_var1 = SystemVariable(**data_run_id) + system_var1 = SystemVariable.model_validate(data_run_id) assert system_var1.workflow_execution_id == workflow_id # Test workflow_execution_id only (direct field name) data_execution_id = {**VALID_BASE_DATA, "workflow_execution_id": workflow_id} - system_var2 = SystemVariable(**data_execution_id) + system_var2 = SystemVariable.model_validate(data_execution_id) assert system_var2.workflow_execution_id == workflow_id # Test both present - workflow_run_id should take precedence @@ -89,17 +89,17 @@ class TestSystemVariableSerialization: "workflow_execution_id": "should-be-ignored", "workflow_run_id": workflow_id, } - system_var3 = SystemVariable(**data_both) + system_var3 = SystemVariable.model_validate(data_both) assert system_var3.workflow_execution_id == workflow_id # Test neither present - should be None - system_var4 = SystemVariable(**VALID_BASE_DATA) + system_var4 = SystemVariable.model_validate(VALID_BASE_DATA) assert system_var4.workflow_execution_id is None def test_serialization_round_trip(self): """Test that serialize → deserialize produces the same result with alias handling.""" # Create original SystemVariable - original = SystemVariable(**COMPLETE_VALID_DATA) + original = SystemVariable.model_validate(COMPLETE_VALID_DATA) # Serialize to dict serialized = original.model_dump(mode="json") @@ -110,7 +110,7 @@ class TestSystemVariableSerialization: assert serialized["workflow_run_id"] == COMPLETE_VALID_DATA["workflow_run_id"] # Deserialize back - deserialized = SystemVariable(**serialized) + deserialized = SystemVariable.model_validate(serialized) # Verify all fields match after round-trip assert deserialized.user_id == original.user_id @@ -125,7 +125,7 @@ class TestSystemVariableSerialization: def test_json_round_trip(self): """Test JSON serialization/deserialization consistency with proper structure.""" # Create original SystemVariable - original = SystemVariable(**COMPLETE_VALID_DATA) + original = SystemVariable.model_validate(COMPLETE_VALID_DATA) # Serialize to JSON string json_str = original.model_dump_json() @@ -137,7 +137,7 @@ class TestSystemVariableSerialization: assert json_data["workflow_run_id"] == COMPLETE_VALID_DATA["workflow_run_id"] # Deserialize from JSON data - deserialized = SystemVariable(**json_data) + deserialized = SystemVariable.model_validate(json_data) # Verify key fields match after JSON round-trip assert deserialized.workflow_execution_id == original.workflow_execution_id @@ -149,13 +149,13 @@ class TestSystemVariableSerialization: """Test deserialization with File objects in the files field - SystemVariable specific logic.""" # Test with empty files list data_empty = {**VALID_BASE_DATA, "files": []} - system_var_empty = SystemVariable(**data_empty) + system_var_empty = SystemVariable.model_validate(data_empty) assert system_var_empty.files == [] # Test with single File object test_file = create_test_file() data_single = {**VALID_BASE_DATA, "files": [test_file]} - system_var_single = SystemVariable(**data_single) + system_var_single = SystemVariable.model_validate(data_single) assert len(system_var_single.files) == 1 assert system_var_single.files[0].filename == "test.txt" assert system_var_single.files[0].tenant_id == "test-tenant-id" @@ -179,14 +179,14 @@ class TestSystemVariableSerialization: ) data_multiple = {**VALID_BASE_DATA, "files": [file1, file2]} - system_var_multiple = SystemVariable(**data_multiple) + system_var_multiple = SystemVariable.model_validate(data_multiple) assert len(system_var_multiple.files) == 2 assert system_var_multiple.files[0].filename == "doc1.txt" assert system_var_multiple.files[1].filename == "image.jpg" # Verify files field serialization/deserialization serialized = system_var_multiple.model_dump(mode="json") - deserialized = SystemVariable(**serialized) + deserialized = SystemVariable.model_validate(serialized) assert len(deserialized.files) == 2 assert deserialized.files[0].filename == "doc1.txt" assert deserialized.files[1].filename == "image.jpg" @@ -197,7 +197,7 @@ class TestSystemVariableSerialization: # Create with workflow_run_id (alias) data_with_alias = {**VALID_BASE_DATA, "workflow_run_id": workflow_id} - system_var = SystemVariable(**data_with_alias) + system_var = SystemVariable.model_validate(data_with_alias) # Serialize and verify alias is used serialized = system_var.model_dump() @@ -205,7 +205,7 @@ class TestSystemVariableSerialization: assert "workflow_execution_id" not in serialized # Deserialize and verify field mapping - deserialized = SystemVariable(**serialized) + deserialized = SystemVariable.model_validate(serialized) assert deserialized.workflow_execution_id == workflow_id # Test JSON serialization path @@ -213,7 +213,7 @@ class TestSystemVariableSerialization: assert json_serialized["workflow_run_id"] == workflow_id assert "workflow_execution_id" not in json_serialized - json_deserialized = SystemVariable(**json_serialized) + json_deserialized = SystemVariable.model_validate(json_serialized) assert json_deserialized.workflow_execution_id == workflow_id def test_model_validator_serialization_logic(self): @@ -222,7 +222,7 @@ class TestSystemVariableSerialization: # Test direct instantiation with workflow_execution_id (should work) data1 = {**VALID_BASE_DATA, "workflow_execution_id": workflow_id} - system_var1 = SystemVariable(**data1) + system_var1 = SystemVariable.model_validate(data1) assert system_var1.workflow_execution_id == workflow_id # Test serialization of the above (should use alias) @@ -236,7 +236,7 @@ class TestSystemVariableSerialization: "workflow_execution_id": "should-be-removed", "workflow_run_id": workflow_id, } - system_var2 = SystemVariable(**data2) + system_var2 = SystemVariable.model_validate(data2) assert system_var2.workflow_execution_id == workflow_id # Verify serialization consistency diff --git a/api/tests/unit_tests/extensions/storage/test_supabase_storage.py b/api/tests/unit_tests/extensions/storage/test_supabase_storage.py index 958072223e..476f87269c 100644 --- a/api/tests/unit_tests/extensions/storage/test_supabase_storage.py +++ b/api/tests/unit_tests/extensions/storage/test_supabase_storage.py @@ -172,73 +172,31 @@ class TestSupabaseStorage: assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] mock_client.storage.from_().download.assert_called_with("test.txt") - def test_exists_with_list_containing_items(self, storage_with_mock_client): - """Test exists returns True when list() returns items (using len() > 0).""" + def test_exists_returns_true_when_file_found(self, storage_with_mock_client): + """Test exists returns True when list() returns items.""" storage, mock_client = storage_with_mock_client - # Mock list return with special object that has count() method - mock_list_result = Mock() - mock_list_result.count.return_value = 1 - mock_client.storage.from_().list.return_value = mock_list_result + mock_client.storage.from_().list.return_value = [{"name": "test.txt"}] result = storage.exists("test.txt") assert result is True - # from_ gets called during init too, so just check it was called with the right bucket assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") + mock_client.storage.from_().list.assert_called_with(path="test.txt") - def test_exists_with_count_method_greater_than_zero(self, storage_with_mock_client): - """Test exists returns True when list result has count() > 0.""" + def test_exists_returns_false_when_file_not_found(self, storage_with_mock_client): + """Test exists returns False when list() returns an empty list.""" storage, mock_client = storage_with_mock_client - # Mock list return with count() method - mock_list_result = Mock() - mock_list_result.count.return_value = 1 - mock_client.storage.from_().list.return_value = mock_list_result - - result = storage.exists("test.txt") - - assert result is True - # Verify the correct calls were made - assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - mock_list_result.count.assert_called() - - def test_exists_with_count_method_zero(self, storage_with_mock_client): - """Test exists returns False when list result has count() == 0.""" - storage, mock_client = storage_with_mock_client - - # Mock list return with count() method returning 0 - mock_list_result = Mock() - mock_list_result.count.return_value = 0 - mock_client.storage.from_().list.return_value = mock_list_result + mock_client.storage.from_().list.return_value = [] result = storage.exists("test.txt") assert result is False - # Verify the correct calls were made assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - mock_list_result.count.assert_called() + mock_client.storage.from_().list.assert_called_with(path="test.txt") - def test_exists_with_empty_list(self, storage_with_mock_client): - """Test exists returns False when list() returns empty list.""" - storage, mock_client = storage_with_mock_client - - # Mock list return with special object that has count() method returning 0 - mock_list_result = Mock() - mock_list_result.count.return_value = 0 - mock_client.storage.from_().list.return_value = mock_list_result - - result = storage.exists("test.txt") - - assert result is False - # Verify the correct calls were made - assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - - def test_delete_calls_remove_with_filename(self, storage_with_mock_client): + def test_delete_calls_remove_with_filename_in_list(self, storage_with_mock_client): """Test delete calls remove([...]) (some client versions require a list).""" storage, mock_client = storage_with_mock_client @@ -247,7 +205,7 @@ class TestSupabaseStorage: storage.delete(filename) mock_client.storage.from_.assert_called_once_with("test-bucket") - mock_client.storage.from_().remove.assert_called_once_with(filename) + mock_client.storage.from_().remove.assert_called_once_with([filename]) def test_bucket_exists_returns_true_when_bucket_found(self): """Test bucket_exists returns True when bucket is found in list.""" diff --git a/api/tests/unit_tests/libs/test_helper.py b/api/tests/unit_tests/libs/test_helper.py index b7701055f5..85789bfa7e 100644 --- a/api/tests/unit_tests/libs/test_helper.py +++ b/api/tests/unit_tests/libs/test_helper.py @@ -11,7 +11,7 @@ class TestExtractTenantId: def test_extract_tenant_id_from_account_with_tenant(self): """Test extracting tenant_id from Account with current_tenant_id.""" # Create a mock Account object - account = Account() + account = Account(name="test", email="test@example.com") # Mock the current_tenant_id property account._current_tenant = type("MockTenant", (), {"id": "account-tenant-123"})() @@ -21,7 +21,7 @@ class TestExtractTenantId: def test_extract_tenant_id_from_account_without_tenant(self): """Test extracting tenant_id from Account without current_tenant_id.""" # Create a mock Account object - account = Account() + account = Account(name="test", email="test@example.com") account._current_tenant = None tenant_id = extract_tenant_id(account) diff --git a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py index 04988e85d8..1659205ec0 100644 --- a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py +++ b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py @@ -1,3 +1,5 @@ +from unittest.mock import patch + import pytest from tos import TosClientV2 # type: ignore @@ -13,7 +15,13 @@ class TestVolcengineTos(BaseStorageTest): @pytest.fixture(autouse=True) def setup_method(self, setup_volcengine_tos_mock): """Executed before each test method.""" - self.storage = VolcengineTosStorage() + with patch("extensions.storage.volcengine_tos_storage.dify_config") as mock_config: + mock_config.VOLCENGINE_TOS_ACCESS_KEY = "test_access_key" + mock_config.VOLCENGINE_TOS_SECRET_KEY = "test_secret_key" + mock_config.VOLCENGINE_TOS_ENDPOINT = "test_endpoint" + mock_config.VOLCENGINE_TOS_REGION = "test_region" + self.storage = VolcengineTosStorage() + self.storage.bucket_name = get_example_bucket() self.storage.client = TosClientV2( ak="dify", diff --git a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py index fadd1ee88f..5cba43714a 100644 --- a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py +++ b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py @@ -59,12 +59,11 @@ def session(): @pytest.fixture def mock_user(): """Create a user instance for testing.""" - user = Account() + user = Account(name="test", email="test@example.com") user.id = "test-user-id" - tenant = Tenant() + tenant = Tenant(name="Test Workspace") tenant.id = "test-tenant" - tenant.name = "Test Workspace" user._current_tenant = MagicMock() user._current_tenant.id = "test-tenant" @@ -299,7 +298,7 @@ def test_to_domain_model(repository): db_model.predecessor_node_id = "test-predecessor-id" db_model.node_execution_id = "test-node-execution-id" db_model.node_id = "test-node-id" - db_model.node_type = NodeType.START.value + db_model.node_type = NodeType.START db_model.title = "Test Node" db_model.inputs = json.dumps(inputs_dict) db_model.process_data = json.dumps(process_data_dict) diff --git a/api/tests/unit_tests/services/test_metadata_bug_complete.py b/api/tests/unit_tests/services/test_metadata_bug_complete.py index 0ff1edc950..31fe9b2868 100644 --- a/api/tests/unit_tests/services/test_metadata_bug_complete.py +++ b/api/tests/unit_tests/services/test_metadata_bug_complete.py @@ -118,7 +118,7 @@ class TestMetadataBugCompleteValidation: # But would crash when trying to create MetadataArgs with pytest.raises((ValueError, TypeError)): - MetadataArgs(**args) + MetadataArgs.model_validate(args) def test_7_end_to_end_validation_layers(self): """Test all validation layers work together correctly.""" @@ -131,7 +131,7 @@ class TestMetadataBugCompleteValidation: valid_data = {"type": "string", "name": "test_metadata"} # Should create valid Pydantic object - metadata_args = MetadataArgs(**valid_data) + metadata_args = MetadataArgs.model_validate(valid_data) assert metadata_args.type == "string" assert metadata_args.name == "test_metadata" diff --git a/api/tests/unit_tests/services/test_metadata_nullable_bug.py b/api/tests/unit_tests/services/test_metadata_nullable_bug.py index d151100cf3..c8cd7025c2 100644 --- a/api/tests/unit_tests/services/test_metadata_nullable_bug.py +++ b/api/tests/unit_tests/services/test_metadata_nullable_bug.py @@ -76,7 +76,7 @@ class TestMetadataNullableBug: # Step 2: Try to create MetadataArgs with None values # This should fail at Pydantic validation level with pytest.raises((ValueError, TypeError)): - metadata_args = MetadataArgs(**args) + metadata_args = MetadataArgs.model_validate(args) # Step 3: If we bypass Pydantic (simulating the bug scenario) # Move this outside the request context to avoid Flask-Login issues diff --git a/api/tests/unit_tests/services/workflow/test_workflow_converter.py b/api/tests/unit_tests/services/workflow/test_workflow_converter.py index 2ca781bae5..63ce4c0c3c 100644 --- a/api/tests/unit_tests/services/workflow/test_workflow_converter.py +++ b/api/tests/unit_tests/services/workflow/test_workflow_converter.py @@ -107,7 +107,7 @@ def test__convert_to_http_request_node_for_chatbot(default_variables): assert body_data body_data_json = json.loads(body_data) - assert body_data_json["point"] == APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY.value + assert body_data_json["point"] == APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY body_params = body_data_json["params"] assert body_params["app_id"] == app_model.id @@ -168,7 +168,7 @@ def test__convert_to_http_request_node_for_workflow_app(default_variables): assert body_data body_data_json = json.loads(body_data) - assert body_data_json["point"] == APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY.value + assert body_data_json["point"] == APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY body_params = body_data_json["params"] assert body_params["app_id"] == app_model.id diff --git a/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py b/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py index 7e324ca4db..66361f26e0 100644 --- a/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py +++ b/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py @@ -47,7 +47,8 @@ class TestDraftVariableSaver: def test__should_variable_be_visible(self): mock_session = MagicMock(spec=Session) - mock_user = Account(id=str(uuid.uuid4())) + mock_user = Account(name="test", email="test@example.com") + mock_user.id = str(uuid.uuid4()) test_app_id = self._get_test_app_id() saver = DraftVariableSaver( session=mock_session, diff --git a/api/uv.lock b/api/uv.lock index 7ce71cd215..96aee8a97b 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1,13 +1,19 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.11, <3.13" resolution-markers = [ - "python_full_version >= '3.12.4' and sys_platform == 'linux'", - "python_full_version >= '3.12.4' and sys_platform != 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and sys_platform != 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform != 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", + "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", ] [[package]] @@ -42,7 +48,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.12.15" +version = "3.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -53,42 +59,42 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/f1/8515650ac3121a9e55c7b217c60e7fae3e0134b5acfe65691781b5356929/aiohttp-3.13.0.tar.gz", hash = "sha256:378dbc57dd8cf341ce243f13fa1fa5394d68e2e02c15cd5f28eae35a70ec7f67", size = 7832348, upload-time = "2025-10-06T19:58:48.089Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, - { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, - { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, - { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, - { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, - { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, - { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, - { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, - { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, - { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, - { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, - { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, - { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, - { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, - { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, - { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, - { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, - { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, - { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, - { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, - { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, - { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, - { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, - { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, - { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, - { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, - { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, - { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, - { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, - { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, - { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, - { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, - { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/db/df80cacac46cd548a736c5535b13cc18925cf6f9f83cd128cf3839842219/aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99eb94e97a42367fef5fc11e28cb2362809d3e70837f6e60557816c7106e2e20", size = 741374, upload-time = "2025-10-06T19:55:13.095Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f9/2d6d93fd57ab4726e18a7cdab083772eda8302d682620fbf2aef48322351/aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4696665b2713021c6eba3e2b882a86013763b442577fe5d2056a42111e732eca", size = 494956, upload-time = "2025-10-06T19:55:14.687Z" }, + { url = "https://files.pythonhosted.org/packages/89/a6/e1c061b079fed04ffd6777950c82f2e8246fd08b7b3c4f56fdd47f697e5a/aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3e6a38366f7f0d0f6ed7a1198055150c52fda552b107dad4785c0852ad7685d1", size = 491154, upload-time = "2025-10-06T19:55:16.661Z" }, + { url = "https://files.pythonhosted.org/packages/fe/4d/ee8913c0d2c7da37fdc98673a342b51611eaa0871682b37b8430084e35b5/aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aab715b1a0c37f7f11f9f1f579c6fbaa51ef569e47e3c0a4644fba46077a9409", size = 1745707, upload-time = "2025-10-06T19:55:18.376Z" }, + { url = "https://files.pythonhosted.org/packages/f9/70/26b2c97e8fa68644aec43d788940984c5f3b53a8d1468d5baaa328f809c9/aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7972c82bed87d7bd8e374b60a6b6e816d75ba4f7c2627c2d14eed216e62738e1", size = 1702404, upload-time = "2025-10-06T19:55:20.098Z" }, + { url = "https://files.pythonhosted.org/packages/65/1e/c8aa3c293a0e8b18968b1b88e9bd8fb269eb67eb7449f504a4c3e175b159/aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca8313cb852af788c78d5afdea24c40172cbfff8b35e58b407467732fde20390", size = 1805519, upload-time = "2025-10-06T19:55:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/51/b6/a3753fe86249eb441768658cfc00f8c4e0913b255c13be00ddb8192775e1/aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c333a2385d2a6298265f4b3e960590f787311b87f6b5e6e21bb8375914ef504", size = 1893904, upload-time = "2025-10-06T19:55:23.462Z" }, + { url = "https://files.pythonhosted.org/packages/51/6d/7b1e020fe1d2a2be7cf0ce5e35922f345e3507cf337faa1a6563c42065c1/aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc6d5fc5edbfb8041d9607f6a417997fa4d02de78284d386bea7ab767b5ea4f3", size = 1745043, upload-time = "2025-10-06T19:55:25.208Z" }, + { url = "https://files.pythonhosted.org/packages/e6/df/aad5dce268f9d4f29759c3eeb5fb5995c569d76abb267468dc1075218d5b/aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ddedba3d0043349edc79df3dc2da49c72b06d59a45a42c1c8d987e6b8d175b8", size = 1604765, upload-time = "2025-10-06T19:55:27.157Z" }, + { url = "https://files.pythonhosted.org/packages/1c/19/a84a0e97b2da2224c8b85e1aef5cac834d07b2903c17bff1a6bdbc7041d2/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23ca762140159417a6bbc959ca1927f6949711851e56f2181ddfe8d63512b5ad", size = 1721737, upload-time = "2025-10-06T19:55:28.854Z" }, + { url = "https://files.pythonhosted.org/packages/6c/61/ca6ad390128d964a08554fd63d6df5810fb5fbc7e599cb9e617f1729ae19/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfe824d6707a5dc3c5676685f624bc0c63c40d79dc0239a7fd6c034b98c25ebe", size = 1716052, upload-time = "2025-10-06T19:55:30.563Z" }, + { url = "https://files.pythonhosted.org/packages/2a/71/769e249e6625372c7d14be79b8b8c3b0592963a09793fb3d36758e60952c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3c11fa5dd2ef773a8a5a6daa40243d83b450915992eab021789498dc87acc114", size = 1783532, upload-time = "2025-10-06T19:55:32.798Z" }, + { url = "https://files.pythonhosted.org/packages/66/64/b9cd03cdbb629bc492e4a744fbe96550a8340b0cd7a0cc4a9c90cfecd8d3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00fdfe370cffede3163ba9d3f190b32c0cfc8c774f6f67395683d7b0e48cdb8a", size = 1593072, upload-time = "2025-10-06T19:55:34.686Z" }, + { url = "https://files.pythonhosted.org/packages/24/0e/87922c8cfdbd09f5e2197e9d87714a98c99c423560d44739e3af55400fe3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6475e42ef92717a678bfbf50885a682bb360a6f9c8819fb1a388d98198fdcb80", size = 1798613, upload-time = "2025-10-06T19:55:36.393Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/a3adfe2af76e1ee9e3b5464522004b148b266bc99d7ec424ca7843d64a3c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:77da5305a410910218b99f2a963092f4277d8a9c1f429c1ff1b026d1826bd0b6", size = 1737480, upload-time = "2025-10-06T19:55:38.043Z" }, + { url = "https://files.pythonhosted.org/packages/ad/53/e124dcbd64e6365602f3493fe37a11ca5b7ac0a40822a6e2bc8260cd08e0/aiohttp-3.13.0-cp311-cp311-win32.whl", hash = "sha256:2f9d9ea547618d907f2ee6670c9a951f059c5994e4b6de8dcf7d9747b420c820", size = 429824, upload-time = "2025-10-06T19:55:39.595Z" }, + { url = "https://files.pythonhosted.org/packages/3e/bd/485d98b372a2cd6998484a93ddd401ec6b6031657661c36846a10e2a1f6e/aiohttp-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f19f7798996d4458c669bd770504f710014926e9970f4729cf55853ae200469", size = 454137, upload-time = "2025-10-06T19:55:41.617Z" }, + { url = "https://files.pythonhosted.org/packages/3a/95/7e8bdfa6e79099a086d59d42589492f1fe9d29aae3cefb58b676015ce278/aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1c272a9a18a5ecc48a7101882230046b83023bb2a662050ecb9bfcb28d9ab53a", size = 735585, upload-time = "2025-10-06T19:55:43.401Z" }, + { url = "https://files.pythonhosted.org/packages/9f/20/2f1d3ee06ee94eafe516810705219bff234d09f135d6951661661d5595ae/aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:97891a23d7fd4e1afe9c2f4473e04595e4acb18e4733b910b6577b74e7e21985", size = 490613, upload-time = "2025-10-06T19:55:45.237Z" }, + { url = "https://files.pythonhosted.org/packages/74/15/ab8600ef6dc1dcd599009a81acfed2ea407037e654d32e47e344e0b08c34/aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:475bd56492ce5f4cffe32b5533c6533ee0c406d1d0e6924879f83adcf51da0ae", size = 489750, upload-time = "2025-10-06T19:55:46.937Z" }, + { url = "https://files.pythonhosted.org/packages/33/59/752640c2b86ca987fe5703a01733b00d375e6cd2392bc7574489934e64e5/aiohttp-3.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c32ada0abb4bc94c30be2b681c42f058ab104d048da6f0148280a51ce98add8c", size = 1736812, upload-time = "2025-10-06T19:55:48.917Z" }, + { url = "https://files.pythonhosted.org/packages/3d/c6/dd6b86ddb852a7fdbcdc7a45b6bdc80178aef713c08279afcaee7a5a9f07/aiohttp-3.13.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4af1f8877ca46ecdd0bc0d4a6b66d4b2bddc84a79e2e8366bc0d5308e76bceb8", size = 1698535, upload-time = "2025-10-06T19:55:50.75Z" }, + { url = "https://files.pythonhosted.org/packages/33/e2/27c92d205b9e8cee7661670e8e9f187931b71e26d42796b153d2a0ba6949/aiohttp-3.13.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e04ab827ec4f775817736b20cdc8350f40327f9b598dec4e18c9ffdcbea88a93", size = 1766573, upload-time = "2025-10-06T19:55:53.106Z" }, + { url = "https://files.pythonhosted.org/packages/df/6a/1fc1ad71d130a30f7a207d8d958a41224c29b834463b5185efb2dbff6ad4/aiohttp-3.13.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a6d9487b9471ec36b0faedf52228cd732e89be0a2bbd649af890b5e2ce422353", size = 1865229, upload-time = "2025-10-06T19:55:55.01Z" }, + { url = "https://files.pythonhosted.org/packages/14/51/d0c1701a79fcb0109cff5304da16226581569b89a282d8e7f1549a7e3ec0/aiohttp-3.13.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e66c57416352f36bf98f6641ddadd47c93740a22af7150d3e9a1ef6e983f9a8", size = 1750379, upload-time = "2025-10-06T19:55:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3d/2ec4b934f85856de1c0c18e90adc8902adadbfac2b3c0b831bfeb7214fc8/aiohttp-3.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:469167d5372f5bb3aedff4fc53035d593884fff2617a75317740e885acd48b04", size = 1560798, upload-time = "2025-10-06T19:55:58.888Z" }, + { url = "https://files.pythonhosted.org/packages/38/56/e23d9c3e13006e599fdce3851517c70279e177871e3e567d22cf3baf5d6c/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a9f3546b503975a69b547c9fd1582cad10ede1ce6f3e313a2f547c73a3d7814f", size = 1697552, upload-time = "2025-10-06T19:56:01.172Z" }, + { url = "https://files.pythonhosted.org/packages/56/cb/caa32c2ccaeca0a3dc39129079fd2ad02f9406c3a5f7924340435b87d4cd/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6b4174fcec98601f0cfdf308ee29a6ae53c55f14359e848dab4e94009112ee7d", size = 1718609, upload-time = "2025-10-06T19:56:03.102Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c0/5911856fef9e40fd1ccbb8c54a90116875d5753a92c1cac66ce2059b390d/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a533873a7a4ec2270fb362ee5a0d3b98752e4e1dc9042b257cd54545a96bd8ed", size = 1735887, upload-time = "2025-10-06T19:56:04.841Z" }, + { url = "https://files.pythonhosted.org/packages/0e/48/8d6f4757a24c02f0a454c043556593a00645d10583859f7156db44d8b7d3/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ce887c5e54411d607ee0959cac15bb31d506d86a9bcaddf0b7e9d63325a7a802", size = 1553079, upload-time = "2025-10-06T19:56:07.197Z" }, + { url = "https://files.pythonhosted.org/packages/39/fa/e82c9445e40b50e46770702b5b6ca2f767966d53e1a5eef03583ceac6df6/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d871f6a30d43e32fc9252dc7b9febe1a042b3ff3908aa83868d7cf7c9579a59b", size = 1762750, upload-time = "2025-10-06T19:56:09.376Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e6/9d30554e7f1e700bfeae4ab6b153d5dc7441606a9ec5e929288fa93a1477/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:222c828243b4789d79a706a876910f656fad4381661691220ba57b2ab4547865", size = 1717461, upload-time = "2025-10-06T19:56:11.551Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e5/29cca547990a59ea54f0674fc01de98519fc628cfceeab6175711750eca7/aiohttp-3.13.0-cp312-cp312-win32.whl", hash = "sha256:682d2e434ff2f1108314ff7f056ce44e457f12dbed0249b24e106e385cf154b9", size = 424633, upload-time = "2025-10-06T19:56:13.316Z" }, + { url = "https://files.pythonhosted.org/packages/8b/68/46dd042d7bc62eab30bafdb8569f55ef125c3a88bb174270324224f8df56/aiohttp-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a2be20eb23888df130214b91c262a90e2de1553d6fb7de9e9010cec994c0ff2", size = 451401, upload-time = "2025-10-06T19:56:15.188Z" }, ] [[package]] @@ -118,16 +124,16 @@ wheels = [ [[package]] name = "alembic" -version = "1.16.5" +version = "1.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/ca/4dc52902cf3491892d464f5265a81e9dff094692c8a049a3ed6a05fe7ee8/alembic-1.16.5.tar.gz", hash = "sha256:a88bb7f6e513bd4301ecf4c7f2206fe93f9913f9b48dac3b78babde2d6fe765e", size = 1969868, upload-time = "2025-08-27T18:02:05.668Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/45/6f4555f2039f364c3ce31399529dcf48dd60726ff3715ad67f547d87dfd2/alembic-1.17.0.tar.gz", hash = "sha256:4652a0b3e19616b57d652b82bfa5e38bf5dbea0813eed971612671cb9e90c0fe", size = 1975526, upload-time = "2025-10-11T18:40:13.585Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/4a/4c61d4c84cfd9befb6fa08a702535b27b21fff08c946bc2f6139decbf7f7/alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3", size = 247355, upload-time = "2025-08-27T18:02:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/44/1f/38e29b06bfed7818ebba1f84904afdc8153ef7b6c7e0d8f3bc6643f5989c/alembic-1.17.0-py3-none-any.whl", hash = "sha256:80523bc437d41b35c5db7e525ad9d908f79de65c27d6a5a5eab6df348a352d99", size = 247449, upload-time = "2025-10-11T18:40:16.288Z" }, ] [[package]] @@ -333,16 +339,16 @@ wheels = [ [[package]] name = "anyio" -version = "4.10.0" +version = "4.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] @@ -377,11 +383,11 @@ wheels = [ [[package]] name = "asgiref" -version = "3.9.1" +version = "3.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870, upload-time = "2025-07-08T09:07:43.344Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/08/4dfec9b90758a59acc6be32ac82e98d1fbfc321cb5cfa410436dbacf821c/asgiref-3.10.0.tar.gz", hash = "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e", size = 37483, upload-time = "2025-10-05T09:15:06.557Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790, upload-time = "2025-07-08T09:07:41.548Z" }, + { url = "https://files.pythonhosted.org/packages/17/9c/fc2331f538fbf7eedba64b2052e99ccf9ba9d6888e2f41441ee28847004b/asgiref-3.10.0-py3-none-any.whl", hash = "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734", size = 24050, upload-time = "2025-10-05T09:15:05.11Z" }, ] [[package]] @@ -395,23 +401,23 @@ wheels = [ [[package]] name = "attrs" -version = "25.3.0" +version = "25.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] [[package]] name = "authlib" -version = "1.6.4" +version = "1.6.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/bb/73a1f1c64ee527877f64122422dafe5b87a846ccf4ac933fe21bcbb8fee8/authlib-1.6.4.tar.gz", hash = "sha256:104b0442a43061dc8bc23b133d1d06a2b0a9c2e3e33f34c4338929e816287649", size = 164046, upload-time = "2025-09-17T09:59:23.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/3f/1d3bbd0bf23bdd99276d4def22f29c27a914067b4cf66f753ff9b8bbd0f3/authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b", size = 164553, upload-time = "2025-10-02T13:36:09.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/aa/91355b5f539caf1b94f0e66ff1e4ee39373b757fce08204981f7829ede51/authlib-1.6.4-py2.py3-none-any.whl", hash = "sha256:39313d2a2caac3ecf6d8f95fbebdfd30ae6ea6ae6a6db794d976405fdd9aa796", size = 243076, upload-time = "2025-09-17T09:59:22.259Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/5082412d1ee302e9e7d80b6949bc4d2a8fa1149aaab610c5fc24709605d6/authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a", size = 243608, upload-time = "2025-10-02T13:36:07.637Z" }, ] [[package]] @@ -445,16 +451,17 @@ wheels = [ [[package]] name = "azure-storage-blob" -version = "12.13.0" +version = "12.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core" }, { name = "cryptography" }, - { name = "msrest" }, + { name = "isodate" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/93/b13bf390e940a79a399981f75ac8d2e05a70112a95ebb7b41e9b752d2921/azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884", size = 684838, upload-time = "2022-07-07T22:35:44.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332, upload-time = "2025-07-16T21:34:07.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/2a/b8246df35af68d64fb7292c93dbbde63cd25036f2f669a9d9ae59e518c76/azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3", size = 377309, upload-time = "2022-07-07T22:35:41.905Z" }, + { url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907, upload-time = "2025-07-16T21:34:09.367Z" }, ] [[package]] @@ -468,68 +475,70 @@ wheels = [ [[package]] name = "basedpyright" -version = "1.31.4" +version = "1.31.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodejs-wheel-binaries" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/53/570b03ec0445a9b2cc69788482c1d12902a9b88a9b159e449c4c537c4e3a/basedpyright-1.31.4.tar.gz", hash = "sha256:2450deb16530f7c88c1a7da04530a079f9b0b18ae1c71cb6f812825b3b82d0b1", size = 22494467, upload-time = "2025-09-03T13:05:55.817Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/ba/ed69e8df732a09c8ca469f592c8e08707fe29149735b834c276d94d4a3da/basedpyright-1.31.7.tar.gz", hash = "sha256:394f334c742a19bcc5905b2455c9f5858182866b7679a6f057a70b44b049bceb", size = 22710948, upload-time = "2025-10-11T05:12:48.3Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/40/d1047a5addcade9291685d06ef42a63c1347517018bafd82747af9da0294/basedpyright-1.31.4-py3-none-any.whl", hash = "sha256:055e4a38024bd653be12d6216c1cfdbee49a1096d342b4d5f5b4560f7714b6fc", size = 11731440, upload-time = "2025-09-03T13:05:52.308Z" }, + { url = "https://files.pythonhosted.org/packages/f8/90/ce01ad2d0afdc1b82b8b5aaba27e60d2e138e39d887e71c35c55d8f1bfcd/basedpyright-1.31.7-py3-none-any.whl", hash = "sha256:7c54beb7828c9ed0028630aaa6904f395c27e5a9f5a313aa9e91fc1d11170831", size = 11817571, upload-time = "2025-10-11T05:12:45.432Z" }, ] [[package]] name = "bce-python-sdk" -version = "0.9.45" +version = "0.9.46" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "future" }, { name = "pycryptodome" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/19/0f23aedecb980288e663ba9ce81fa1545d6331d62bd75262fca49678052d/bce_python_sdk-0.9.45.tar.gz", hash = "sha256:ba60d66e80fcd012a6362bf011fee18bca616b0005814d261aba3aa202f7025f", size = 252769, upload-time = "2025-08-28T10:24:54.303Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/57/f98bc15c12cc022ef195f689ee57ed61d8a8677bda3089c4d58fb1872d45/bce_python_sdk-0.9.46.tar.gz", hash = "sha256:4bf01b22e6d172ccd94aa201f8bc6f2a98d0da4784160e77cfacfcc71c2686be", size = 253806, upload-time = "2025-09-15T06:51:52.753Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/1f/d3fd91808a1f4881b4072424390d38e85707edd75ed5d9cea2a0299a7a7a/bce_python_sdk-0.9.45-py3-none-any.whl", hash = "sha256:cce3ca7ad4de8be2cc0722c1d6a7db7be6f2833f8d9ca7f892c572e6ff78a959", size = 352012, upload-time = "2025-08-28T10:24:52.387Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f5/20e9ab324b22a77970c57bc8267e586e85e2aa1277d80f2c58ca8a39a13e/bce_python_sdk-0.9.46-py3-none-any.whl", hash = "sha256:655074da6592ce8b036f605d9a272bfdcd1f515eb2f8e3f0333bb7cc62f700cb", size = 352622, upload-time = "2025-09-15T06:51:50.811Z" }, ] [[package]] name = "bcrypt" -version = "4.3.0" +version = "5.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697, upload-time = "2025-02-28T01:24:09.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019, upload-time = "2025-02-28T01:23:05.838Z" }, - { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174, upload-time = "2025-02-28T01:23:07.274Z" }, - { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870, upload-time = "2025-02-28T01:23:09.151Z" }, - { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601, upload-time = "2025-02-28T01:23:11.461Z" }, - { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660, upload-time = "2025-02-28T01:23:12.989Z" }, - { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083, upload-time = "2025-02-28T01:23:14.5Z" }, - { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237, upload-time = "2025-02-28T01:23:16.686Z" }, - { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737, upload-time = "2025-02-28T01:23:18.897Z" }, - { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741, upload-time = "2025-02-28T01:23:21.041Z" }, - { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472, upload-time = "2025-02-28T01:23:23.183Z" }, - { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606, upload-time = "2025-02-28T01:23:25.361Z" }, - { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867, upload-time = "2025-02-28T01:23:26.875Z" }, - { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589, upload-time = "2025-02-28T01:23:28.381Z" }, - { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794, upload-time = "2025-02-28T01:23:30.187Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969, upload-time = "2025-02-28T01:23:31.945Z" }, - { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158, upload-time = "2025-02-28T01:23:34.161Z" }, - { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285, upload-time = "2025-02-28T01:23:35.765Z" }, - { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583, upload-time = "2025-02-28T01:23:38.021Z" }, - { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896, upload-time = "2025-02-28T01:23:39.575Z" }, - { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492, upload-time = "2025-02-28T01:23:40.901Z" }, - { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213, upload-time = "2025-02-28T01:23:42.653Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162, upload-time = "2025-02-28T01:23:43.964Z" }, - { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856, upload-time = "2025-02-28T01:23:46.011Z" }, - { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726, upload-time = "2025-02-28T01:23:47.575Z" }, - { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664, upload-time = "2025-02-28T01:23:49.059Z" }, - { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128, upload-time = "2025-02-28T01:23:50.399Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598, upload-time = "2025-02-28T01:23:51.775Z" }, - { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, - { url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103, upload-time = "2025-02-28T01:24:00.764Z" }, - { url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513, upload-time = "2025-02-28T01:24:02.243Z" }, - { url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685, upload-time = "2025-02-28T01:24:04.512Z" }, - { url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110, upload-time = "2025-02-28T01:24:05.896Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" }, + { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" }, ] [[package]] @@ -546,11 +555,11 @@ wheels = [ [[package]] name = "billiard" -version = "4.2.1" +version = "4.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031, upload-time = "2024-09-21T13:40:22.491Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/6a/1405343016bce8354b29d90aad6b0bf6485b5e60404516e4b9a3a9646cf0/billiard-4.2.2.tar.gz", hash = "sha256:e815017a062b714958463e07ba15981d802dc53d41c5b69d28c5a7c238f8ecf3", size = 155592, upload-time = "2025-09-20T14:44:40.456Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766, upload-time = "2024-09-21T13:40:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/a6/80/ef8dff49aae0e4430f81842f7403e14e0ca59db7bbaf7af41245b67c6b25/billiard-4.2.2-py3-none-any.whl", hash = "sha256:4bc05dcf0d1cc6addef470723aac2a6232f3c7ed7475b0b580473a9145829457", size = 86896, upload-time = "2025-09-20T14:44:39.157Z" }, ] [[package]] @@ -578,16 +587,16 @@ wheels = [ [[package]] name = "boto3-stubs" -version = "1.40.35" +version = "1.40.51" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/24/18/6a64ff9603845d635f6167b6d9a3f9a6e658d8a28eef36f8423eb5a99ae1/boto3_stubs-1.40.35.tar.gz", hash = "sha256:2d6f2dbe6e9b42deb7b8fbeed051461e7906903f26e99634d00be45cc40db41a", size = 100819, upload-time = "2025-09-19T19:42:36.372Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/4d/b07f9ee0fe432fa8ec6dc368ee7a0409e2b6d9df2c5a2a88265c9b6fd878/boto3_stubs-1.40.51.tar.gz", hash = "sha256:0281e820813a310954e15fb7c1d470c24c34c1cccc7b1ddad977fa293a1080a9", size = 100890, upload-time = "2025-10-13T19:25:36.126Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/d4/d744260908ad55903baefa086a3c9cabc50bfafd63c3f2d0e05688378013/boto3_stubs-1.40.35-py3-none-any.whl", hash = "sha256:2bb44e6c17831650a28e3e00bf5be0a6ba771fce08724ba978ffcd06a7bca7e3", size = 69689, upload-time = "2025-09-19T19:42:30.08Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2e/4476431f11fc3bf7a7e0f4f5c275f17607aa127da7c0d8685a4dc6bf6291/boto3_stubs-1.40.51-py3-none-any.whl", hash = "sha256:896d0ffaa298ce1749eea1a54946320a0f4e07c6912f8e1f8c0744a708ee25a4", size = 69709, upload-time = "2025-10-13T19:25:23.116Z" }, ] [package.optional-dependencies] @@ -611,14 +620,14 @@ wheels = [ [[package]] name = "botocore-stubs" -version = "1.40.29" +version = "1.40.51" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/5c/49b2860e2a26b7383d5915374e61d962a3853e3fd569e4370444f0b902c0/botocore_stubs-1.40.29.tar.gz", hash = "sha256:324669d5ed7b5f7271bf3c3ea7208191b1d183f17d7e73398f11fef4a31fdf6b", size = 42742, upload-time = "2025-09-11T20:22:35.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/ca/429fadb6e037cb7b300d508a0b24b59a71961db12539e21749cbec7e7422/botocore_stubs-1.40.51.tar.gz", hash = "sha256:8ddbeb1f68e39382533bb53f3b968d29e640406016af00ad8bbd6e1a2bd59536", size = 42249, upload-time = "2025-10-13T20:26:57.777Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/3c/f901ca6c4d66e0bebbfc56e614fc214416db72c613f768ee2fc84ffdbff4/botocore_stubs-1.40.29-py3-none-any.whl", hash = "sha256:84cbcc6328dddaa1f825830f7dec8fa0dcd3bac8002211322e8529cbfb5eaddd", size = 66843, upload-time = "2025-09-11T20:22:32.576Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b9/5f1296bc46f293f284a1a6259f3c1f21f4161088dc6f70428698841b56a7/botocore_stubs-1.40.51-py3-none-any.whl", hash = "sha256:9a028104979205c9be0b68bb59ba679e4fe452e017eec3d40f6c2b41c590a73c", size = 66541, upload-time = "2025-10-13T20:26:55.559Z" }, ] [[package]] @@ -693,7 +702,7 @@ name = "brotlicffi" version = "1.1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi" }, + { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/95/9d/70caa61192f570fcf0352766331b735afa931b4c6bc9a348a0925cc13288/brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13", size = 465192, upload-time = "2023-09-14T14:22:40.707Z" } wheels = [ @@ -773,11 +782,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.8.3" +version = "2025.10.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, ] [[package]] @@ -926,14 +935,14 @@ wheels = [ [[package]] name = "click" -version = "8.2.1" +version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] @@ -1076,7 +1085,7 @@ wheels = [ [[package]] name = "cos-python-sdk-v5" -version = "1.9.30" +version = "1.9.38" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "crcmod" }, @@ -1085,7 +1094,10 @@ dependencies = [ { name = "six" }, { name = "xmltodict" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c4/f2/be99b41433b33a76896680920fca621f191875ca410a66778015e47a501b/cos-python-sdk-v5-1.9.30.tar.gz", hash = "sha256:a23fd090211bf90883066d90cd74317860aa67c6d3aa80fe5e44b18c7e9b2a81", size = 108384, upload-time = "2024-06-14T08:02:37.063Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/3c/d208266fec7cc3221b449e236b87c3fc1999d5ac4379d4578480321cfecc/cos_python_sdk_v5-1.9.38.tar.gz", hash = "sha256:491a8689ae2f1a6f04dacba66a877b2c8d361456f9cfd788ed42170a1cbf7a9f", size = 98092, upload-time = "2025-07-22T07:56:20.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/c8/c9c156aa3bc7caba9b4f8a2b6abec3da6263215988f3fec0ea843f137a10/cos_python_sdk_v5-1.9.38-py3-none-any.whl", hash = "sha256:1d3dd3be2bd992b2e9c2dcd018e2596aa38eab022dbc86b4a5d14c8fc88370e6", size = 92601, upload-time = "2025-08-17T05:12:30.867Z" }, +] [[package]] name = "couchbase" @@ -1141,32 +1153,33 @@ toml = [ [[package]] name = "crc32c" -version = "2.7.1" +version = "2.7.1.post0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/4c/4e40cc26347ac8254d3f25b9f94710b8e8df24ee4dddc1ba41907a88a94d/crc32c-2.7.1.tar.gz", hash = "sha256:f91b144a21eef834d64178e01982bb9179c354b3e9e5f4c803b0e5096384968c", size = 45712, upload-time = "2024-09-24T06:20:17.553Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/02/5e49cc17a5f6f8cb78b55dd57d50b36416e69051c29bba1eab3e86a01927/crc32c-2.7.1.post0.tar.gz", hash = "sha256:dcaa776413af5790cc55561469cd76306e97b325fe4aa195db535fb3f328e709", size = 46574, upload-time = "2025-10-13T02:06:16.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/8e/2f37f46368bbfd50edfc11b96f0aa135699034b1b020966c70ebaff3463b/crc32c-2.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:19e03a50545a3ef400bd41667d5525f71030488629c57d819e2dd45064f16192", size = 49672, upload-time = "2024-09-24T06:18:18.032Z" }, - { url = "https://files.pythonhosted.org/packages/ed/b8/e52f7c4b045b871c2984d70f37c31d4861b533a8082912dfd107a96cf7c1/crc32c-2.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c03286b1e5ce9bed7090084f206aacd87c5146b4b10de56fe9e86cbbbf851cf", size = 37155, upload-time = "2024-09-24T06:18:19.373Z" }, - { url = "https://files.pythonhosted.org/packages/25/ee/0cfa82a68736697f3c7e435ba658c2ef8c997f42b89f6ab4545efe1b2649/crc32c-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ebbf144a1a56a532b353e81fa0f3edca4f4baa1bf92b1dde2c663a32bb6a15", size = 35372, upload-time = "2024-09-24T06:18:20.983Z" }, - { url = "https://files.pythonhosted.org/packages/aa/92/c878aaba81c431fcd93a059e9f6c90db397c585742793f0bf6e0c531cc67/crc32c-2.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96b794fd11945298fdd5eb1290a812efb497c14bc42592c5c992ca077458eeba", size = 54879, upload-time = "2024-09-24T06:18:23.085Z" }, - { url = "https://files.pythonhosted.org/packages/5b/f5/ab828ab3907095e06b18918408748950a9f726ee2b37be1b0839fb925ee1/crc32c-2.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df7194dd3c0efb5a21f5d70595b7a8b4fd9921fbbd597d6d8e7a11eca3e2d27", size = 52588, upload-time = "2024-09-24T06:18:24.463Z" }, - { url = "https://files.pythonhosted.org/packages/6a/2b/9e29e9ac4c4213d60491db09487125db358cd9263490fbadbd55e48fbe03/crc32c-2.7.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d698eec444b18e296a104d0b9bb6c596c38bdcb79d24eba49604636e9d747305", size = 53674, upload-time = "2024-09-24T06:18:25.624Z" }, - { url = "https://files.pythonhosted.org/packages/79/ed/df3c4c14bf1b29f5c9b52d51fb6793e39efcffd80b2941d994e8f7f5f688/crc32c-2.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e07cf10ef852d219d179333fd706d1c415626f1f05e60bd75acf0143a4d8b225", size = 54691, upload-time = "2024-09-24T06:18:26.578Z" }, - { url = "https://files.pythonhosted.org/packages/0c/47/4917af3c9c1df2fff28bbfa6492673c9adeae5599dcc207bbe209847489c/crc32c-2.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d2a051f296e6e92e13efee3b41db388931cdb4a2800656cd1ed1d9fe4f13a086", size = 52896, upload-time = "2024-09-24T06:18:28.174Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6f/26fc3dda5835cda8f6cd9d856afe62bdeae428de4c34fea200b0888e8835/crc32c-2.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1738259802978cdf428f74156175da6a5fdfb7256f647fdc0c9de1bc6cd7173", size = 53554, upload-time = "2024-09-24T06:18:29.104Z" }, - { url = "https://files.pythonhosted.org/packages/56/3e/6f39127f7027c75d130c0ba348d86a6150dff23761fbc6a5f71659f4521e/crc32c-2.7.1-cp311-cp311-win32.whl", hash = "sha256:f7786d219a1a1bf27d0aa1869821d11a6f8e90415cfffc1e37791690d4a848a1", size = 38370, upload-time = "2024-09-24T06:18:30.013Z" }, - { url = "https://files.pythonhosted.org/packages/c9/fb/1587c2705a3a47a3d0067eecf9a6fec510761c96dec45c7b038fb5c8ff46/crc32c-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:887f6844bb3ad35f0778cd10793ad217f7123a5422e40041231b8c4c7329649d", size = 39795, upload-time = "2024-09-24T06:18:31.324Z" }, - { url = "https://files.pythonhosted.org/packages/1d/02/998dc21333413ce63fe4c1ca70eafe61ca26afc7eb353f20cecdb77d614e/crc32c-2.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f7d1c4e761fe42bf856130daf8b2658df33fe0ced3c43dadafdfeaa42b57b950", size = 49568, upload-time = "2024-09-24T06:18:32.425Z" }, - { url = "https://files.pythonhosted.org/packages/9c/3e/e3656bfa76e50ef87b7136fef2dbf3c46e225629432fc9184fdd7fd187ff/crc32c-2.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:73361c79a6e4605204457f19fda18b042a94508a52e53d10a4239da5fb0f6a34", size = 37019, upload-time = "2024-09-24T06:18:34.097Z" }, - { url = "https://files.pythonhosted.org/packages/0b/7d/5ff9904046ad15a08772515db19df43107bf5e3901a89c36a577b5f40ba0/crc32c-2.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd778fc8ac0ed2ffbfb122a9aa6a0e409a8019b894a1799cda12c01534493e0", size = 35373, upload-time = "2024-09-24T06:18:35.02Z" }, - { url = "https://files.pythonhosted.org/packages/4d/41/4aedc961893f26858ab89fc772d0eaba91f9870f19eaa933999dcacb94ec/crc32c-2.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ef661b34e9f25991fface7f9ad85e81bbc1b3fe3b916fd58c893eabe2fa0b8", size = 54675, upload-time = "2024-09-24T06:18:35.954Z" }, - { url = "https://files.pythonhosted.org/packages/d6/63/8cabf09b7e39b9fec8f7010646c8b33057fc8d67e6093b3cc15563d23533/crc32c-2.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571aa4429444b5d7f588e4377663592145d2d25eb1635abb530f1281794fc7c9", size = 52386, upload-time = "2024-09-24T06:18:36.896Z" }, - { url = "https://files.pythonhosted.org/packages/79/13/13576941bf7cf95026abae43d8427c812c0054408212bf8ed490eda846b0/crc32c-2.7.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c02a3bd67dea95cdb25844aaf44ca2e1b0c1fd70b287ad08c874a95ef4bb38db", size = 53495, upload-time = "2024-09-24T06:18:38.099Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b6/55ffb26d0517d2d6c6f430ce2ad36ae7647c995c5bfd7abce7f32bb2bad1/crc32c-2.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d17637c4867672cb8adeea007294e3c3df9d43964369516cfe2c1f47ce500a", size = 54456, upload-time = "2024-09-24T06:18:39.051Z" }, - { url = "https://files.pythonhosted.org/packages/c2/1a/5562e54cb629ecc5543d3604dba86ddfc7c7b7bf31d64005b38a00d31d31/crc32c-2.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4a400ac3c69a32e180d8753fd7ec7bccb80ade7ab0812855dce8a208e72495f", size = 52647, upload-time = "2024-09-24T06:18:40.021Z" }, - { url = "https://files.pythonhosted.org/packages/48/ec/ce4138eaf356cd9aae60bbe931755e5e0151b3eca5f491fce6c01b97fd59/crc32c-2.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:588587772e55624dd9c7a906ec9e8773ae0b6ac5e270fc0bc84ee2758eba90d5", size = 53332, upload-time = "2024-09-24T06:18:40.925Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b5/144b42cd838a901175a916078781cb2c3c9f977151c9ba085aebd6d15b22/crc32c-2.7.1-cp312-cp312-win32.whl", hash = "sha256:9f14b60e5a14206e8173dd617fa0c4df35e098a305594082f930dae5488da428", size = 38371, upload-time = "2024-09-24T06:18:42.711Z" }, - { url = "https://files.pythonhosted.org/packages/ae/c4/7929dcd5d9b57db0cce4fe6f6c191049380fc6d8c9b9f5581967f4ec018e/crc32c-2.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:7c810a246660a24dc818047dc5f89c7ce7b2814e1e08a8e99993f4103f7219e8", size = 39805, upload-time = "2024-09-24T06:18:43.6Z" }, + { url = "https://files.pythonhosted.org/packages/77/e6/5723311e6320b35d7755fef8ab10c5d4b55173ce11888e8ddaf62b63091f/crc32c-2.7.1.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:65124edce1903484b19109da542d8671a814bdd2cc4006847701449a1f137869", size = 64759, upload-time = "2025-10-13T02:05:01.212Z" }, + { url = "https://files.pythonhosted.org/packages/94/ac/f9550d21a4434b5dad9124ccd6b7cee97ce66bc0cb91a605bf01d9c2475d/crc32c-2.7.1.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a177ee47782f9b83002b08c4d4ba57a6e31dcd96be89d1c6b71f599d9c06bba6", size = 61419, upload-time = "2025-10-13T02:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/ef/39/82874155870dc42c917ff842073c2714955c3c3d0629579a7ca3db1bbcb1/crc32c-2.7.1.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65ce2c3f1938b1310c1d592ac97f5e32b2cb67b67ae54ec89e710605f01e91cb", size = 59951, upload-time = "2025-10-13T02:05:03.268Z" }, + { url = "https://files.pythonhosted.org/packages/b9/24/42aa97aac254adeafaa44297654a520db1922dcab4a07bbb965b41d52b66/crc32c-2.7.1.post0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c57ac2129a4adc56b8898c524a33525f008a346edc5df2b1ab7b7bfc4e80bbe", size = 78633, upload-time = "2025-10-13T02:05:04.429Z" }, + { url = "https://files.pythonhosted.org/packages/ac/76/a0c8683b9c38e260d23eb8d419d3ca39277e5e640521e9cc7ca407633ee4/crc32c-2.7.1.post0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3dcdcc73ea9f5e5e32cf1d0868315c62274f8f8fb2a1356e6bf2e958fc7f5bc9", size = 80187, upload-time = "2025-10-13T02:05:05.254Z" }, + { url = "https://files.pythonhosted.org/packages/16/05/978a558f580294e521dc432656e8d1b955ddc73f22870d7e767ff9c1a8d4/crc32c-2.7.1.post0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7cc745faf51a57c056021c005766cd8bacd818213ef424064118747c99a16d70", size = 79263, upload-time = "2025-10-13T02:05:06.121Z" }, + { url = "https://files.pythonhosted.org/packages/cd/48/9efdc54175f56b5aea24fbd9076759e052ca828713590a6bf60f822d40f7/crc32c-2.7.1.post0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1220cf7e97f453e07f998574e663e822e5602ed591b9a2fd436bf65dcae26168", size = 78527, upload-time = "2025-10-13T02:05:06.978Z" }, + { url = "https://files.pythonhosted.org/packages/0f/88/5accf5fb60559df04d0863496a86b912af37e6b7d160d458da25e473a881/crc32c-2.7.1.post0-cp311-cp311-win32.whl", hash = "sha256:d5087f2bc6954b38dc1ceac9b2ea9c9c4956b4e8ce82d965f4c474aefac2d061", size = 63277, upload-time = "2025-10-13T02:05:07.852Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b2/c037161956d00324198a94962788b5e6a6e76b892d96205b15a37bea0c81/crc32c-2.7.1.post0-cp311-cp311-win_amd64.whl", hash = "sha256:feda0b536b1310b0535085835564918df6ba415e0b230734e1386deb7c614c02", size = 65029, upload-time = "2025-10-13T02:05:08.658Z" }, + { url = "https://files.pythonhosted.org/packages/7d/00/243cc1b15bcadf72bd71cf9a33d425715726b95b5f37a85b306d495362f4/crc32c-2.7.1.post0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4eda225a4c49901b9baf1af2aec19dd614c527bac81e02c52d1b9f1d6f6d244c", size = 64820, upload-time = "2025-10-13T02:05:09.476Z" }, + { url = "https://files.pythonhosted.org/packages/6e/76/e63deacf3e5dcd38764a1a617fd25749ea83fe20ff42a7912a855a975a0f/crc32c-2.7.1.post0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e304b07182b915fa9ab5340b51a6845d45331974d73b80a1710405ec8f0b4d44", size = 61474, upload-time = "2025-10-13T02:05:10.44Z" }, + { url = "https://files.pythonhosted.org/packages/c2/96/a341802b0a84fc00f9eca4e7dfdc0f41a69fc226b62ea1661468d4812800/crc32c-2.7.1.post0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1bbd4d2272aa7bdc5527fc3130caf31819e5efad19b6abd7158859f1cc808923", size = 59963, upload-time = "2025-10-13T02:05:11.271Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8a/5e1f6789239935a95a6fb579e5f20dc4032265c5de215cec841d369ad188/crc32c-2.7.1.post0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:eea5fe4f477249f19201b2c1ac9f0df70987593b0dd0e0d15521480500d18455", size = 78461, upload-time = "2025-10-13T02:05:12.077Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7a/bf07239d7f55cf94ad6979de1f97105cdcfa1b73cf8818a5461f37043962/crc32c-2.7.1.post0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc97ce3c913eded8f4d19d5da7492ebb7ab7de1eb05749c8e5c48f4999e263e0", size = 79963, upload-time = "2025-10-13T02:05:13.343Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/09a11007d66767a1d339424560386c99323e904e5e7f0e75ff4a13156d3c/crc32c-2.7.1.post0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c13bdb21cc11fc2e9b7387fe726e65993f79407b3e4b8c107ee345e9c6cfe399", size = 79040, upload-time = "2025-10-13T02:05:14.216Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ca/4f8d8832524a70f39a20302e171782368fd66474e792b2aaf6bc9bb1ba9d/crc32c-2.7.1.post0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f9edc07f0617c212d700e31fc6437811b3036f84931e9b837a14169dd0e8d65", size = 78319, upload-time = "2025-10-13T02:05:15.303Z" }, + { url = "https://files.pythonhosted.org/packages/6d/41/63331e510e31928ae5af30fa3d40bca86b8b7c38164b5b59a57cdb8b5a2e/crc32c-2.7.1.post0-cp312-cp312-win32.whl", hash = "sha256:6d205730d184b5ba9a37ee855883b536e40dbf13817d15e4bab4997149c59d82", size = 63286, upload-time = "2025-10-13T02:05:16.181Z" }, + { url = "https://files.pythonhosted.org/packages/ed/3f/05cb1cd66b98f7165b8d181a164ef2c16b7ef0019a191e6ff8defa4df327/crc32c-2.7.1.post0-cp312-cp312-win_amd64.whl", hash = "sha256:f8c1584fe841883300cd3cb0e8341da5a4c954fc2dcf9e0eb15d3b697d90930e", size = 65034, upload-time = "2025-10-13T02:05:17.078Z" }, + { url = "https://files.pythonhosted.org/packages/54/7f/18a4262600e9f772d2b2d10adff4b002d64e5eaa6f0da5e6ded16086e8ad/crc32c-2.7.1.post0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:700d0637f620be903b596fd145d25664c0e821b9b24d462eaa3beeacb906478f", size = 60777, upload-time = "2025-10-13T02:06:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/1b/de/d9a6fdee4b1058b1922b1395814e010e85cb2c1a6ddb1388cbf7523a9f8f/crc32c-2.7.1.post0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:213aa16faf30c267579f9b76cfc572162fccd537095a5533e329318c2e5da589", size = 59663, upload-time = "2025-10-13T02:06:11.844Z" }, + { url = "https://files.pythonhosted.org/packages/4e/86/9e71dd8847ee075504a7ab69a101ab7dff7fd46cc22dbbef242ceeb187bf/crc32c-2.7.1.post0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e1b2b232edd75f3281ab059d2811e4ac674931a1889e0070a2fc73d93c0f204", size = 62539, upload-time = "2025-10-13T02:06:13.075Z" }, + { url = "https://files.pythonhosted.org/packages/67/c0/905905212c0aec771d81df4d88f87008dadeecd6ad628d1e17f9a5acd7dd/crc32c-2.7.1.post0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2e76c1e536f2408c5c5ce796e1a89ef252a438aa011c1f31048aa0783b75626", size = 63248, upload-time = "2025-10-13T02:06:14.764Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/6bdc8b946c6db999a0318e620a0f50e2099d9cba3d9c9de05932d12795a5/crc32c-2.7.1.post0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a1ea03ed177cb022d859ce86bac6044d5cd68dcf7e22f022e288a96f2bd6fa2f", size = 65049, upload-time = "2025-10-13T02:06:15.672Z" }, ] [[package]] @@ -1177,43 +1190,49 @@ sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c [[package]] name = "cryptography" -version = "45.0.7" +version = "46.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/35/c495bffc2056f2dadb32434f1feedd79abde2a7f8363e1974afa9c33c7e2/cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971", size = 744980, upload-time = "2025-09-01T11:15:03.146Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/9b/e301418629f7bfdf72db9e80ad6ed9d1b83c487c471803eaa6464c511a01/cryptography-46.0.2.tar.gz", hash = "sha256:21b6fc8c71a3f9a604f028a329e5560009cc4a3a828bfea5fcba8eb7647d88fe", size = 749293, upload-time = "2025-10-01T00:29:11.856Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/91/925c0ac74362172ae4516000fe877912e33b5983df735ff290c653de4913/cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee", size = 7041105, upload-time = "2025-09-01T11:13:59.684Z" }, - { url = "https://files.pythonhosted.org/packages/fc/63/43641c5acce3a6105cf8bd5baeceeb1846bb63067d26dae3e5db59f1513a/cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6", size = 4205799, upload-time = "2025-09-01T11:14:02.517Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/c238dd9107f10bfde09a4d1c52fd38828b1aa353ced11f358b5dd2507d24/cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339", size = 4430504, upload-time = "2025-09-01T11:14:04.522Z" }, - { url = "https://files.pythonhosted.org/packages/62/62/24203e7cbcc9bd7c94739428cd30680b18ae6b18377ae66075c8e4771b1b/cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8", size = 4209542, upload-time = "2025-09-01T11:14:06.309Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e3/e7de4771a08620eef2389b86cd87a2c50326827dea5528feb70595439ce4/cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf", size = 3889244, upload-time = "2025-09-01T11:14:08.152Z" }, - { url = "https://files.pythonhosted.org/packages/96/b8/bca71059e79a0bb2f8e4ec61d9c205fbe97876318566cde3b5092529faa9/cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513", size = 4461975, upload-time = "2025-09-01T11:14:09.755Z" }, - { url = "https://files.pythonhosted.org/packages/58/67/3f5b26937fe1218c40e95ef4ff8d23c8dc05aa950d54200cc7ea5fb58d28/cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3", size = 4209082, upload-time = "2025-09-01T11:14:11.229Z" }, - { url = "https://files.pythonhosted.org/packages/0e/e4/b3e68a4ac363406a56cf7b741eeb80d05284d8c60ee1a55cdc7587e2a553/cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3", size = 4460397, upload-time = "2025-09-01T11:14:12.924Z" }, - { url = "https://files.pythonhosted.org/packages/22/49/2c93f3cd4e3efc8cb22b02678c1fad691cff9dd71bb889e030d100acbfe0/cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6", size = 4337244, upload-time = "2025-09-01T11:14:14.431Z" }, - { url = "https://files.pythonhosted.org/packages/04/19/030f400de0bccccc09aa262706d90f2ec23d56bc4eb4f4e8268d0ddf3fb8/cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd", size = 4568862, upload-time = "2025-09-01T11:14:16.185Z" }, - { url = "https://files.pythonhosted.org/packages/29/56/3034a3a353efa65116fa20eb3c990a8c9f0d3db4085429040a7eef9ada5f/cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8", size = 2936578, upload-time = "2025-09-01T11:14:17.638Z" }, - { url = "https://files.pythonhosted.org/packages/b3/61/0ab90f421c6194705a99d0fa9f6ee2045d916e4455fdbb095a9c2c9a520f/cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443", size = 3405400, upload-time = "2025-09-01T11:14:18.958Z" }, - { url = "https://files.pythonhosted.org/packages/63/e8/c436233ddf19c5f15b25ace33979a9dd2e7aa1a59209a0ee8554179f1cc0/cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2", size = 7021824, upload-time = "2025-09-01T11:14:20.954Z" }, - { url = "https://files.pythonhosted.org/packages/bc/4c/8f57f2500d0ccd2675c5d0cc462095adf3faa8c52294ba085c036befb901/cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691", size = 4202233, upload-time = "2025-09-01T11:14:22.454Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ac/59b7790b4ccaed739fc44775ce4645c9b8ce54cbec53edf16c74fd80cb2b/cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59", size = 4423075, upload-time = "2025-09-01T11:14:24.287Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/d4f07ea21434bf891faa088a6ac15d6d98093a66e75e30ad08e88aa2b9ba/cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4", size = 4204517, upload-time = "2025-09-01T11:14:25.679Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ac/924a723299848b4c741c1059752c7cfe09473b6fd77d2920398fc26bfb53/cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3", size = 3882893, upload-time = "2025-09-01T11:14:27.1Z" }, - { url = "https://files.pythonhosted.org/packages/83/dc/4dab2ff0a871cc2d81d3ae6d780991c0192b259c35e4d83fe1de18b20c70/cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1", size = 4450132, upload-time = "2025-09-01T11:14:28.58Z" }, - { url = "https://files.pythonhosted.org/packages/12/dd/b2882b65db8fc944585d7fb00d67cf84a9cef4e77d9ba8f69082e911d0de/cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27", size = 4204086, upload-time = "2025-09-01T11:14:30.572Z" }, - { url = "https://files.pythonhosted.org/packages/5d/fa/1d5745d878048699b8eb87c984d4ccc5da4f5008dfd3ad7a94040caca23a/cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17", size = 4449383, upload-time = "2025-09-01T11:14:32.046Z" }, - { url = "https://files.pythonhosted.org/packages/36/8b/fc61f87931bc030598e1876c45b936867bb72777eac693e905ab89832670/cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b", size = 4332186, upload-time = "2025-09-01T11:14:33.95Z" }, - { url = "https://files.pythonhosted.org/packages/0b/11/09700ddad7443ccb11d674efdbe9a832b4455dc1f16566d9bd3834922ce5/cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c", size = 4561639, upload-time = "2025-09-01T11:14:35.343Z" }, - { url = "https://files.pythonhosted.org/packages/71/ed/8f4c1337e9d3b94d8e50ae0b08ad0304a5709d483bfcadfcc77a23dbcb52/cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5", size = 2926552, upload-time = "2025-09-01T11:14:36.929Z" }, - { url = "https://files.pythonhosted.org/packages/bc/ff/026513ecad58dacd45d1d24ebe52b852165a26e287177de1d545325c0c25/cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90", size = 3392742, upload-time = "2025-09-01T11:14:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/99/4e/49199a4c82946938a3e05d2e8ad9482484ba48bbc1e809e3d506c686d051/cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde", size = 3584634, upload-time = "2025-09-01T11:14:50.593Z" }, - { url = "https://files.pythonhosted.org/packages/16/ce/5f6ff59ea9c7779dba51b84871c19962529bdcc12e1a6ea172664916c550/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34", size = 4149533, upload-time = "2025-09-01T11:14:52.091Z" }, - { url = "https://files.pythonhosted.org/packages/ce/13/b3cfbd257ac96da4b88b46372e662009b7a16833bfc5da33bb97dd5631ae/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9", size = 4385557, upload-time = "2025-09-01T11:14:53.551Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c5/8c59d6b7c7b439ba4fc8d0cab868027fd095f215031bc123c3a070962912/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae", size = 4149023, upload-time = "2025-09-01T11:14:55.022Z" }, - { url = "https://files.pythonhosted.org/packages/55/32/05385c86d6ca9ab0b4d5bb442d2e3d85e727939a11f3e163fc776ce5eb40/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b", size = 4385722, upload-time = "2025-09-01T11:14:57.319Z" }, - { url = "https://files.pythonhosted.org/packages/23/87/7ce86f3fa14bc11a5a48c30d8103c26e09b6465f8d8e9d74cf7a0714f043/cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63", size = 3332908, upload-time = "2025-09-01T11:14:58.78Z" }, + { url = "https://files.pythonhosted.org/packages/e0/98/7a8df8c19a335c8028414738490fc3955c0cecbfdd37fcc1b9c3d04bd561/cryptography-46.0.2-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3e32ab7dd1b1ef67b9232c4cf5e2ee4cd517d4316ea910acaaa9c5712a1c663", size = 7261255, upload-time = "2025-10-01T00:27:22.947Z" }, + { url = "https://files.pythonhosted.org/packages/c6/38/b2adb2aa1baa6706adc3eb746691edd6f90a656a9a65c3509e274d15a2b8/cryptography-46.0.2-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1fd1a69086926b623ef8126b4c33d5399ce9e2f3fac07c9c734c2a4ec38b6d02", size = 4297596, upload-time = "2025-10-01T00:27:25.258Z" }, + { url = "https://files.pythonhosted.org/packages/e4/27/0f190ada240003119488ae66c897b5e97149292988f556aef4a6a2a57595/cryptography-46.0.2-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb7fb9cd44c2582aa5990cf61a4183e6f54eea3172e54963787ba47287edd135", size = 4450899, upload-time = "2025-10-01T00:27:27.458Z" }, + { url = "https://files.pythonhosted.org/packages/85/d5/e4744105ab02fdf6bb58ba9a816e23b7a633255987310b4187d6745533db/cryptography-46.0.2-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9066cfd7f146f291869a9898b01df1c9b0e314bfa182cef432043f13fc462c92", size = 4300382, upload-time = "2025-10-01T00:27:29.091Z" }, + { url = "https://files.pythonhosted.org/packages/33/fb/bf9571065c18c04818cb07de90c43fc042c7977c68e5de6876049559c72f/cryptography-46.0.2-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:97e83bf4f2f2c084d8dd792d13841d0a9b241643151686010866bbd076b19659", size = 4017347, upload-time = "2025-10-01T00:27:30.767Z" }, + { url = "https://files.pythonhosted.org/packages/35/72/fc51856b9b16155ca071080e1a3ad0c3a8e86616daf7eb018d9565b99baa/cryptography-46.0.2-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:4a766d2a5d8127364fd936572c6e6757682fc5dfcbdba1632d4554943199f2fa", size = 4983500, upload-time = "2025-10-01T00:27:32.741Z" }, + { url = "https://files.pythonhosted.org/packages/c1/53/0f51e926799025e31746d454ab2e36f8c3f0d41592bc65cb9840368d3275/cryptography-46.0.2-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fab8f805e9675e61ed8538f192aad70500fa6afb33a8803932999b1049363a08", size = 4482591, upload-time = "2025-10-01T00:27:34.869Z" }, + { url = "https://files.pythonhosted.org/packages/86/96/4302af40b23ab8aa360862251fb8fc450b2a06ff24bc5e261c2007f27014/cryptography-46.0.2-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:1e3b6428a3d56043bff0bb85b41c535734204e599c1c0977e1d0f261b02f3ad5", size = 4300019, upload-time = "2025-10-01T00:27:37.029Z" }, + { url = "https://files.pythonhosted.org/packages/9b/59/0be12c7fcc4c5e34fe2b665a75bc20958473047a30d095a7657c218fa9e8/cryptography-46.0.2-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:1a88634851d9b8de8bb53726f4300ab191d3b2f42595e2581a54b26aba71b7cc", size = 4950006, upload-time = "2025-10-01T00:27:40.272Z" }, + { url = "https://files.pythonhosted.org/packages/55/1d/42fda47b0111834b49e31590ae14fd020594d5e4dadd639bce89ad790fba/cryptography-46.0.2-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:be939b99d4e091eec9a2bcf41aaf8f351f312cd19ff74b5c83480f08a8a43e0b", size = 4482088, upload-time = "2025-10-01T00:27:42.668Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/60f583f69aa1602c2bdc7022dae86a0d2b837276182f8c1ec825feb9b874/cryptography-46.0.2-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f13b040649bc18e7eb37936009b24fd31ca095a5c647be8bb6aaf1761142bd1", size = 4425599, upload-time = "2025-10-01T00:27:44.616Z" }, + { url = "https://files.pythonhosted.org/packages/d1/57/d8d4134cd27e6e94cf44adb3f3489f935bde85f3a5508e1b5b43095b917d/cryptography-46.0.2-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bdc25e4e01b261a8fda4e98618f1c9515febcecebc9566ddf4a70c63967043b", size = 4697458, upload-time = "2025-10-01T00:27:46.209Z" }, + { url = "https://files.pythonhosted.org/packages/d1/2b/531e37408573e1da33adfb4c58875013ee8ac7d548d1548967d94a0ae5c4/cryptography-46.0.2-cp311-abi3-win32.whl", hash = "sha256:8b9bf67b11ef9e28f4d78ff88b04ed0929fcd0e4f70bb0f704cfc32a5c6311ee", size = 3056077, upload-time = "2025-10-01T00:27:48.424Z" }, + { url = "https://files.pythonhosted.org/packages/a8/cd/2f83cafd47ed2dc5a3a9c783ff5d764e9e70d3a160e0df9a9dcd639414ce/cryptography-46.0.2-cp311-abi3-win_amd64.whl", hash = "sha256:758cfc7f4c38c5c5274b55a57ef1910107436f4ae842478c4989abbd24bd5acb", size = 3512585, upload-time = "2025-10-01T00:27:50.521Z" }, + { url = "https://files.pythonhosted.org/packages/00/36/676f94e10bfaa5c5b86c469ff46d3e0663c5dc89542f7afbadac241a3ee4/cryptography-46.0.2-cp311-abi3-win_arm64.whl", hash = "sha256:218abd64a2e72f8472c2102febb596793347a3e65fafbb4ad50519969da44470", size = 2927474, upload-time = "2025-10-01T00:27:52.91Z" }, + { url = "https://files.pythonhosted.org/packages/d5/bb/fa95abcf147a1b0bb94d95f53fbb09da77b24c776c5d87d36f3d94521d2c/cryptography-46.0.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a08e7401a94c002e79dc3bc5231b6558cd4b2280ee525c4673f650a37e2c7685", size = 7248090, upload-time = "2025-10-01T00:28:22.846Z" }, + { url = "https://files.pythonhosted.org/packages/b7/66/f42071ce0e3ffbfa80a88feadb209c779fda92a23fbc1e14f74ebf72ef6b/cryptography-46.0.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d30bc11d35743bf4ddf76674a0a369ec8a21f87aaa09b0661b04c5f6c46e8d7b", size = 4293123, upload-time = "2025-10-01T00:28:25.072Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/1fdbd2e5c1ba822828d250e5a966622ef00185e476d1cd2726b6dd135e53/cryptography-46.0.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bca3f0ce67e5a2a2cf524e86f44697c4323a86e0fd7ba857de1c30d52c11ede1", size = 4439524, upload-time = "2025-10-01T00:28:26.808Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c1/5e4989a7d102d4306053770d60f978c7b6b1ea2ff8c06e0265e305b23516/cryptography-46.0.2-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff798ad7a957a5021dcbab78dfff681f0cf15744d0e6af62bd6746984d9c9e9c", size = 4297264, upload-time = "2025-10-01T00:28:29.327Z" }, + { url = "https://files.pythonhosted.org/packages/28/78/b56f847d220cb1d6d6aef5a390e116ad603ce13a0945a3386a33abc80385/cryptography-46.0.2-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cb5e8daac840e8879407acbe689a174f5ebaf344a062f8918e526824eb5d97af", size = 4011872, upload-time = "2025-10-01T00:28:31.479Z" }, + { url = "https://files.pythonhosted.org/packages/e1/80/2971f214b066b888944f7b57761bf709ee3f2cf805619a18b18cab9b263c/cryptography-46.0.2-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:3f37aa12b2d91e157827d90ce78f6180f0c02319468a0aea86ab5a9566da644b", size = 4978458, upload-time = "2025-10-01T00:28:33.267Z" }, + { url = "https://files.pythonhosted.org/packages/a5/84/0cb0a2beaa4f1cbe63ebec4e97cd7e0e9f835d0ba5ee143ed2523a1e0016/cryptography-46.0.2-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e38f203160a48b93010b07493c15f2babb4e0f2319bbd001885adb3f3696d21", size = 4472195, upload-time = "2025-10-01T00:28:36.039Z" }, + { url = "https://files.pythonhosted.org/packages/30/8b/2b542ddbf78835c7cd67b6fa79e95560023481213a060b92352a61a10efe/cryptography-46.0.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d19f5f48883752b5ab34cff9e2f7e4a7f216296f33714e77d1beb03d108632b6", size = 4296791, upload-time = "2025-10-01T00:28:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/78/12/9065b40201b4f4876e93b9b94d91feb18de9150d60bd842a16a21565007f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:04911b149eae142ccd8c9a68892a70c21613864afb47aba92d8c7ed9cc001023", size = 4939629, upload-time = "2025-10-01T00:28:39.654Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9e/6507dc048c1b1530d372c483dfd34e7709fc542765015425f0442b08547f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8b16c1ede6a937c291d41176934268e4ccac2c6521c69d3f5961c5a1e11e039e", size = 4471988, upload-time = "2025-10-01T00:28:41.822Z" }, + { url = "https://files.pythonhosted.org/packages/b1/86/d025584a5f7d5c5ec8d3633dbcdce83a0cd579f1141ceada7817a4c26934/cryptography-46.0.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:747b6f4a4a23d5a215aadd1d0b12233b4119c4313df83ab4137631d43672cc90", size = 4422989, upload-time = "2025-10-01T00:28:43.608Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/536370418b38a15a61bbe413006b79dfc3d2b4b0eafceb5581983f973c15/cryptography-46.0.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6b275e398ab3a7905e168c036aad54b5969d63d3d9099a0a66cc147a3cc983be", size = 4685578, upload-time = "2025-10-01T00:28:45.361Z" }, + { url = "https://files.pythonhosted.org/packages/15/52/ea7e2b1910f547baed566c866fbb86de2402e501a89ecb4871ea7f169a81/cryptography-46.0.2-cp38-abi3-win32.whl", hash = "sha256:0b507c8e033307e37af61cb9f7159b416173bdf5b41d11c4df2e499a1d8e007c", size = 3036711, upload-time = "2025-10-01T00:28:47.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/9e/171f40f9c70a873e73c2efcdbe91e1d4b1777a03398fa1c4af3c56a2477a/cryptography-46.0.2-cp38-abi3-win_amd64.whl", hash = "sha256:f9b2dc7668418fb6f221e4bf701f716e05e8eadb4f1988a2487b11aedf8abe62", size = 3500007, upload-time = "2025-10-01T00:28:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/1aabe338149a7d0f52c3e30f2880b20027ca2a485316756ed6f000462db3/cryptography-46.0.2-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1d3b3edd145953832e09607986f2bd86f85d1dc9c48ced41808b18009d9f30e5", size = 3714495, upload-time = "2025-10-01T00:28:57.222Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0a/0d10eb970fe3e57da9e9ddcfd9464c76f42baf7b3d0db4a782d6746f788f/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fe245cf4a73c20592f0f48da39748b3513db114465be78f0a36da847221bd1b4", size = 4243379, upload-time = "2025-10-01T00:28:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/7d/60/e274b4d41a9eb82538b39950a74ef06e9e4d723cb998044635d9deb1b435/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2b9cad9cf71d0c45566624ff76654e9bae5f8a25970c250a26ccfc73f8553e2d", size = 4409533, upload-time = "2025-10-01T00:29:00.785Z" }, + { url = "https://files.pythonhosted.org/packages/19/9a/fb8548f762b4749aebd13b57b8f865de80258083fe814957f9b0619cfc56/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9bd26f2f75a925fdf5e0a446c0de2714f17819bf560b44b7480e4dd632ad6c46", size = 4243120, upload-time = "2025-10-01T00:29:02.515Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/883f24147fd4a0c5cab74ac7e36a1ff3094a54ba5c3a6253d2ff4b19255b/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:7282d8f092b5be7172d6472f29b0631f39f18512a3642aefe52c3c0e0ccfad5a", size = 4408940, upload-time = "2025-10-01T00:29:04.42Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b5/c5e179772ec38adb1c072b3aa13937d2860509ba32b2462bf1dda153833b/cryptography-46.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c4b93af7920cdf80f71650769464ccf1fb49a4b56ae0024173c24c48eb6b1612", size = 3438518, upload-time = "2025-10-01T00:29:06.139Z" }, ] [[package]] @@ -1273,11 +1292,10 @@ wheels = [ [[package]] name = "dify-api" -version = "1.9.0" +version = "1.9.1" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, - { name = "authlib" }, { name = "azure-identity" }, { name = "beautifulsoup4" }, { name = "boto3" }, @@ -1308,10 +1326,8 @@ dependencies = [ { name = "json-repair" }, { name = "langfuse" }, { name = "langsmith" }, - { name = "mailchimp-transactional" }, { name = "markdown" }, { name = "numpy" }, - { name = "openai" }, { name = "openpyxl" }, { name = "opentelemetry-api" }, { name = "opentelemetry-distro" }, @@ -1322,8 +1338,8 @@ dependencies = [ { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-celery" }, { name = "opentelemetry-instrumentation-flask" }, + { name = "opentelemetry-instrumentation-httpx" }, { name = "opentelemetry-instrumentation-redis" }, - { name = "opentelemetry-instrumentation-requests" }, { name = "opentelemetry-instrumentation-sqlalchemy" }, { name = "opentelemetry-propagator-b3" }, { name = "opentelemetry-proto" }, @@ -1333,7 +1349,6 @@ dependencies = [ { name = "opik" }, { name = "packaging" }, { name = "pandas", extra = ["excel", "output-formatting", "performance"] }, - { name = "pandoc" }, { name = "psycogreen" }, { name = "psycopg2-binary" }, { name = "pycryptodome" }, @@ -1417,8 +1432,6 @@ dev = [ { name = "types-pyyaml" }, { name = "types-redis" }, { name = "types-regex" }, - { name = "types-requests" }, - { name = "types-requests-oauthlib" }, { name = "types-setuptools" }, { name = "types-shapely" }, { name = "types-simplejson" }, @@ -1451,6 +1464,7 @@ vdb = [ { name = "couchbase" }, { name = "elasticsearch" }, { name = "mo-vector" }, + { name = "mysql-connector-python" }, { name = "opensearch-py" }, { name = "oracledb" }, { name = "pgvecto-rs", extra = ["sqlalchemy"] }, @@ -1471,7 +1485,6 @@ vdb = [ [package.metadata] requires-dist = [ { name = "arize-phoenix-otel", specifier = "~=0.9.2" }, - { name = "authlib", specifier = "==1.6.4" }, { name = "azure-identity", specifier = "==1.16.1" }, { name = "beautifulsoup4", specifier = "==4.12.2" }, { name = "boto3", specifier = "==1.35.99" }, @@ -1480,7 +1493,7 @@ requires-dist = [ { name = "celery", specifier = "~=5.5.2" }, { name = "chardet", specifier = "~=5.1.0" }, { name = "flask", specifier = "~=3.1.2" }, - { name = "flask-compress", specifier = "~=1.17" }, + { name = "flask-compress", specifier = ">=1.17,<1.18" }, { name = "flask-cors", specifier = "~=6.0.0" }, { name = "flask-login", specifier = "~=0.6.3" }, { name = "flask-migrate", specifier = "~=4.0.7" }, @@ -1502,10 +1515,8 @@ requires-dist = [ { name = "json-repair", specifier = ">=0.41.1" }, { name = "langfuse", specifier = "~=2.51.3" }, { name = "langsmith", specifier = "~=0.1.77" }, - { name = "mailchimp-transactional", specifier = "~=1.0.50" }, { name = "markdown", specifier = "~=3.5.1" }, { name = "numpy", specifier = "~=1.26.4" }, - { name = "openai", specifier = "~=1.61.0" }, { name = "openpyxl", specifier = "~=3.1.5" }, { name = "opentelemetry-api", specifier = "==1.27.0" }, { name = "opentelemetry-distro", specifier = "==0.48b0" }, @@ -1516,18 +1527,17 @@ requires-dist = [ { name = "opentelemetry-instrumentation", specifier = "==0.48b0" }, { name = "opentelemetry-instrumentation-celery", specifier = "==0.48b0" }, { name = "opentelemetry-instrumentation-flask", specifier = "==0.48b0" }, + { name = "opentelemetry-instrumentation-httpx", specifier = "==0.48b0" }, { name = "opentelemetry-instrumentation-redis", specifier = "==0.48b0" }, - { name = "opentelemetry-instrumentation-requests", specifier = "==0.48b0" }, { name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.48b0" }, { name = "opentelemetry-propagator-b3", specifier = "==1.27.0" }, { name = "opentelemetry-proto", specifier = "==1.27.0" }, { name = "opentelemetry-sdk", specifier = "==1.27.0" }, { name = "opentelemetry-semantic-conventions", specifier = "==0.48b0" }, { name = "opentelemetry-util-http", specifier = "==0.48b0" }, - { name = "opik", specifier = "~=1.7.25" }, + { name = "opik", specifier = "~=1.8.72" }, { name = "packaging", specifier = "~=23.2" }, { name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" }, - { name = "pandoc", specifier = "~=2.4" }, { name = "psycogreen", specifier = "~=1.0.2" }, { name = "psycopg2-binary", specifier = "~=2.9.6" }, { name = "pycryptodome", specifier = "==3.19.1" }, @@ -1573,7 +1583,7 @@ dev = [ { name = "pytest-cov", specifier = "~=4.1.0" }, { name = "pytest-env", specifier = "~=1.1.3" }, { name = "pytest-mock", specifier = "~=3.14.0" }, - { name = "ruff", specifier = "~=0.12.3" }, + { name = "ruff", specifier = "~=0.14.0" }, { name = "scipy-stubs", specifier = ">=1.15.3.0" }, { name = "sseclient-py", specifier = ">=1.8.0" }, { name = "testcontainers", specifier = "~=4.10.0" }, @@ -1611,8 +1621,6 @@ dev = [ { name = "types-pyyaml", specifier = "~=6.0.12" }, { name = "types-redis", specifier = ">=4.6.0.20241004" }, { name = "types-regex", specifier = "~=2024.11.6" }, - { name = "types-requests", specifier = "~=2.32.0" }, - { name = "types-requests-oauthlib", specifier = "~=2.0.0" }, { name = "types-setuptools", specifier = ">=80.9.0" }, { name = "types-shapely", specifier = "~=2.0.0" }, { name = "types-simplejson", specifier = ">=3.20.0" }, @@ -1622,10 +1630,10 @@ dev = [ { name = "types-ujson", specifier = ">=5.10.0" }, ] storage = [ - { name = "azure-storage-blob", specifier = "==12.13.0" }, + { name = "azure-storage-blob", specifier = "==12.26.0" }, { name = "bce-python-sdk", specifier = "~=0.9.23" }, - { name = "cos-python-sdk-v5", specifier = "==1.9.30" }, - { name = "esdk-obs-python", specifier = "==3.24.6.1" }, + { name = "cos-python-sdk-v5", specifier = "==1.9.38" }, + { name = "esdk-obs-python", specifier = "==3.25.8" }, { name = "google-cloud-storage", specifier = "==2.16.0" }, { name = "opendal", specifier = "~=0.46.0" }, { name = "oss2", specifier = "==2.18.5" }, @@ -1645,8 +1653,9 @@ vdb = [ { name = "couchbase", specifier = "~=4.3.0" }, { name = "elasticsearch", specifier = "==8.14.0" }, { name = "mo-vector", specifier = "~=0.1.13" }, + { name = "mysql-connector-python", specifier = ">=9.3.0" }, { name = "opensearch-py", specifier = "==2.4.0" }, - { name = "oracledb", specifier = "==3.0.0" }, + { name = "oracledb", specifier = "==3.3.0" }, { name = "pgvecto-rs", extras = ["sqlalchemy"], specifier = "~=0.2.1" }, { name = "pgvector", specifier = "==0.2.5" }, { name = "pymilvus", specifier = "~=2.5.0" }, @@ -1658,7 +1667,7 @@ vdb = [ { name = "tidb-vector", specifier = "==0.0.9" }, { name = "upstash-vector", specifier = "==0.6.0" }, { name = "volcengine-compat", specifier = "~=1.0.0" }, - { name = "weaviate-client", specifier = "~=3.24.0" }, + { name = "weaviate-client", specifier = ">=4.0.0,<5.0.0" }, { name = "xinference-client", specifier = "~=1.2.2" }, ] @@ -1728,18 +1737,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] -[[package]] -name = "ecdsa" -version = "0.19.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, -] - [[package]] name = "elastic-transport" version = "8.17.1" @@ -1767,21 +1764,23 @@ wheels = [ [[package]] name = "emoji" -version = "2.14.1" +version = "2.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/7d/01cddcbb6f5cc0ba72e00ddf9b1fa206c802d557fd0a20b18e130edf1336/emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b", size = 597182, upload-time = "2025-01-16T06:31:24.983Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/78/0d2db9382c92a163d7095fc08efff7800880f830a152cfced40161e7638d/emoji-2.15.0.tar.gz", hash = "sha256:eae4ab7d86456a70a00a985125a03263a5eac54cd55e51d7e184b1ed3b6757e4", size = 615483, upload-time = "2025-09-21T12:13:02.755Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/db/a0335710caaa6d0aebdaa65ad4df789c15d89b7babd9a30277838a7d9aac/emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b", size = 590617, upload-time = "2025-01-16T06:31:23.526Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/4b5aaaabddfacfe36ba7768817bd1f71a7a810a43705e531f3ae4c690767/emoji-2.15.0-py3-none-any.whl", hash = "sha256:205296793d66a89d88af4688fa57fd6496732eb48917a87175a023c8138995eb", size = 608433, upload-time = "2025-09-21T12:13:01.197Z" }, ] [[package]] name = "esdk-obs-python" -version = "3.24.6.1" +version = "3.25.8" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "crcmod" }, { name = "pycryptodome" }, + { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/af/d83276f9e288bd6a62f44d67ae1eafd401028ba1b2b643ae4014b51da5bd/esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0", size = 85798, upload-time = "2024-07-26T13:13:22.467Z" } +sdist = { url = "https://files.pythonhosted.org/packages/40/99/52362d6e081a642d6de78f6ab53baa5e3f82f2386c48954e18ee7b4ab22b/esdk-obs-python-3.25.8.tar.gz", hash = "sha256:aeded00b27ecd5a25ffaec38a2cc9416b51923d48db96c663f1a735f859b5273", size = 96302, upload-time = "2025-09-01T11:35:20.432Z" } [[package]] name = "et-xmlfile" @@ -1816,16 +1815,46 @@ wheels = [ [[package]] name = "fastapi" -version = "0.116.1" +version = "0.119.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/f9/5c5bcce82a7997cc0eb8c47b7800f862f6b56adc40486ed246e5010d443b/fastapi-0.119.0.tar.gz", hash = "sha256:451082403a2c1f0b99c6bd57c09110ed5463856804c8078d38e5a1f1035dbbb7", size = 336756, upload-time = "2025-10-11T17:13:40.53Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, + { url = "https://files.pythonhosted.org/packages/ce/70/584c4d7cad80f5e833715c0a29962d7c93b4d18eed522a02981a6d1b6ee5/fastapi-0.119.0-py3-none-any.whl", hash = "sha256:90a2e49ed19515320abb864df570dd766be0662c5d577688f1600170f7f73cf2", size = 107095, upload-time = "2025-10-11T17:13:39.048Z" }, +] + +[[package]] +name = "fastuuid" +version = "0.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/80/3c16a1edad2e6cd82fbd15ac998cc1b881f478bf1f80ca717d941c441874/fastuuid-0.13.5.tar.gz", hash = "sha256:d4976821ab424d41542e1ea39bc828a9d454c3f8a04067c06fca123c5b95a1a1", size = 18255, upload-time = "2025-09-26T09:05:38.281Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/ab/9351bfc04ff2144115758233130b5469993d3d379323903a4634cb9c78c1/fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c122558ca4b5487e2bd0863467e4ccfe636afd1274803741487d48f2e32ea0e1", size = 493910, upload-time = "2025-09-26T09:12:36.995Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ab/84fac529cc12a03d49595e70ac459380f7cb12c70f0fe401781b276f9e94/fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d7abd42a03a17a681abddd19aa4d44ca2747138cf8a48373b395cf1341a10de2", size = 252621, upload-time = "2025-09-26T09:12:22.222Z" }, + { url = "https://files.pythonhosted.org/packages/7f/9d/f4c734d7b74a04ca695781c58a1376f07b206fe2849e58e7778d476a0e94/fastuuid-0.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2705cf7c2d6f7c03053404b75a4c44f872a73f6f9d5ea34f1dc6bba400c4a97c", size = 244269, upload-time = "2025-09-26T09:08:31.921Z" }, + { url = "https://files.pythonhosted.org/packages/5b/da/b42b7eb84523d69cfe9dac82950e105061c8d59f4d4d2cc3e170dbd20937/fastuuid-0.13.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d220a056fcbad25932c1f25304261198612f271f4d150b2a84e81adb877daf7", size = 271528, upload-time = "2025-09-26T09:12:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/45/6eee36929119e9544b0906fd6591e685d682e4b51cfad4c25d96ccf04009/fastuuid-0.13.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f29f93b5a0c5f5579f97f77d5319e9bfefd61d8678ec59d850201544faf33bf", size = 272168, upload-time = "2025-09-26T09:07:04.238Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ac/75b70f13515e12194a25b0459dd8a8a33de4ab0a92142f0776d21e41ca84/fastuuid-0.13.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:399d86623fb806151b1feb9fdd818ebfc1d50387199a35f7264f98dfc1540af5", size = 290948, upload-time = "2025-09-26T09:07:53.433Z" }, + { url = "https://files.pythonhosted.org/packages/76/30/1801326a5b433aafc04eae906e6b005e8a3d1120fd996409fe88124edb06/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:689e8795a1edd573b2c9a455024e4edf605a9690339bba29709857f7180894ea", size = 452932, upload-time = "2025-09-26T09:09:28.017Z" }, + { url = "https://files.pythonhosted.org/packages/61/2a/080b6b2ac4ef2ead54a7463ae4162d66a52867bbd4447ad5354427b82ae2/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:25e82c4a1734da168b36f7308e397afbe9c9b353799a9c69563a605f11dd4641", size = 468384, upload-time = "2025-09-26T09:08:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d3/4a3ffcaf8d874f7f208dad7e98ded7c5359b6599073960e3aa0530ca6139/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f62299e3cca69aad6a6fb37e26e45055587954d498ad98903fea24382377ea0e", size = 444815, upload-time = "2025-09-26T09:06:38.691Z" }, + { url = "https://files.pythonhosted.org/packages/9d/a0/08dd8663f7bff3e9c0b2416708b01d1fb65f52bcd4bce18760f77c4735fd/fastuuid-0.13.5-cp311-cp311-win32.whl", hash = "sha256:68227f2230381b89fb1ad362ca6e433de85c6c11c36312b41757cad47b8a8e32", size = 144897, upload-time = "2025-09-26T09:14:53.695Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e2/2c2a37dcc56e2323c6214c38c8faac22f9d03d98c481f8a40843e0b9526a/fastuuid-0.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:4a32306982bd031cb20d5d1a726b7b958a55babebd2300ce6c8e352d3496e931", size = 150523, upload-time = "2025-09-26T09:12:24.031Z" }, + { url = "https://files.pythonhosted.org/packages/21/36/434f137c5970cac19e57834e1f7680e85301619d49891618c00666700c61/fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35fe8045e866bc6846f8de6fa05acb1de0c32478048484a995e96d31e21dff2a", size = 494638, upload-time = "2025-09-26T09:14:58.695Z" }, + { url = "https://files.pythonhosted.org/packages/ca/3c/083de2ac007b2b305523b9c006dba5051e5afd87a626ef1a39f76e2c6b82/fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:02a460333f52d731a006d18a52ef6fcb2d295a1f5b1a5938d30744191b2f77b7", size = 253138, upload-time = "2025-09-26T09:13:33.283Z" }, + { url = "https://files.pythonhosted.org/packages/73/5e/630cffa1c8775db526e39e9e4c5c7db0c27be0786bb21ba82c912ae19f63/fastuuid-0.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:74b0e4f8c307b9f477a5d7284db4431ce53a3c1e3f4173db7a97db18564a6202", size = 244521, upload-time = "2025-09-26T09:14:40.682Z" }, + { url = "https://files.pythonhosted.org/packages/4d/51/55d78705f4fbdadf88fb40f382f508d6c7a4941ceddd7825fafebb4cc778/fastuuid-0.13.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6955a99ef455c2986f3851f4e0ccc35dec56ac1a7720f2b92e88a75d6684512e", size = 271557, upload-time = "2025-09-26T09:15:09.75Z" }, + { url = "https://files.pythonhosted.org/packages/6a/2b/1b89e90a8635e5587ccdbbeb169c590672ce7637880f2c047482a0359950/fastuuid-0.13.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10c77b826738c1a27dcdaa92ea4dc1ec9d869748a99e1fde54f1379553d4854", size = 272334, upload-time = "2025-09-26T09:07:48.865Z" }, + { url = "https://files.pythonhosted.org/packages/0c/06/4c8207894eeb30414999e5c3f66ac039bc4003437eb4060d8a1bceb4cc6f/fastuuid-0.13.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb25dccbeb249d16d5e664f65f17ebec05136821d5ef462c4110e3f76b86fb86", size = 290594, upload-time = "2025-09-26T09:12:54.124Z" }, + { url = "https://files.pythonhosted.org/packages/50/69/96d221931a31d77a47cc2487bdfacfb3091edfc2e7a04b1795df1aec05df/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5becc646a3eeafb76ce0a6783ba190cd182e3790a8b2c78ca9db2b5e87af952", size = 452835, upload-time = "2025-09-26T09:14:00.994Z" }, + { url = "https://files.pythonhosted.org/packages/25/ef/bf045f0a47dcec96247497ef3f7a31d86ebc074330e2dccc34b8dbc0468a/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:69b34363752d06e9bb0dbdf02ae391ec56ac948c6f2eb00be90dad68e80774b9", size = 468225, upload-time = "2025-09-26T09:13:38.585Z" }, + { url = "https://files.pythonhosted.org/packages/30/46/4817ab5a3778927155a4bde92540d4c4fa996161ec8b8e080c8928b0984e/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57d0768afcad0eab8770c9b8cf904716bd3c547e8b9a4e755ee8a673b060a3a3", size = 444907, upload-time = "2025-09-26T09:14:30.163Z" }, + { url = "https://files.pythonhosted.org/packages/80/27/ab284117ce4dc9b356a7196bdbf220510285f201d27f1f078592cdc8187b/fastuuid-0.13.5-cp312-cp312-win32.whl", hash = "sha256:8ac6c6f5129d52eaa6ef9ea4b6e2f7c69468a053f3ab8e439661186b9c06bb85", size = 145415, upload-time = "2025-09-26T09:08:59.494Z" }, + { url = "https://files.pythonhosted.org/packages/f4/0c/f970a4222773b248931819f8940800b760283216ca3dda173ed027e94bdd/fastuuid-0.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:ad630e97715beefef07ec37c9c162336e500400774e2c1cbe1a0df6f80d15b9a", size = 150840, upload-time = "2025-09-26T09:13:46.115Z" }, ] [[package]] @@ -1842,11 +1871,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.19.1" +version = "3.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, ] [[package]] @@ -1877,17 +1906,18 @@ wheels = [ [[package]] name = "flask-compress" -version = "1.18" +version = "1.17" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "brotli", marker = "platform_python_implementation != 'PyPy'" }, { name = "brotlicffi", marker = "platform_python_implementation == 'PyPy'" }, { name = "flask" }, - { name = "pyzstd" }, + { name = "zstandard" }, + { name = "zstandard", marker = "platform_python_implementation == 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/77/7d3c1b071e29c09bd796a84f95442f3c75f24a1f2a9f2c86c857579ab4ec/flask_compress-1.18.tar.gz", hash = "sha256:fdbae1bd8e334dfdc8b19549829163987c796fafea7fa1c63f9a4add23c8413a", size = 16571, upload-time = "2025-07-11T14:08:13.496Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/1f/260db5a4517d59bfde7b4a0d71052df68fb84983bda9231100e3b80f5989/flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8", size = 15733, upload-time = "2024-10-14T08:13:33.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/d8/953232867e42b5b91899e9c6c4a2b89218a5fbbdbbb4493f48729770de81/flask_compress-1.18-py3-none-any.whl", hash = "sha256:9c3b7defbd0f29a06e51617b910eab07bd4db314507e4edc4c6b02a2e139fda9", size = 9340, upload-time = "2025-07-11T14:08:12.275Z" }, + { url = "https://files.pythonhosted.org/packages/f7/54/ff08f947d07c0a8a5d8f1c8e57b142c97748ca912b259db6467ab35983cd/Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20", size = 8723, upload-time = "2024-10-14T08:13:31.726Z" }, ] [[package]] @@ -1945,19 +1975,19 @@ wheels = [ [[package]] name = "flask-restx" -version = "1.3.0" +version = "1.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aniso8601" }, { name = "flask" }, { name = "importlib-resources" }, { name = "jsonschema" }, - { name = "pytz" }, + { name = "referencing" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/4c/2e7d84e2b406b47cf3bf730f521efe474977b404ee170d8ea68dc37e6733/flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728", size = 2814072, upload-time = "2023-12-10T14:48:55.575Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/89/9b9ca58cbb8e9ec46f4a510ba93878e0c88d518bf03c350e3b1b7ad85cbe/flask-restx-1.3.2.tar.gz", hash = "sha256:0ae13d77e7d7e4dce513970cfa9db45364aef210e99022de26d2b73eb4dbced5", size = 2814719, upload-time = "2025-09-23T20:34:25.21Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/bf/1907369f2a7ee614dde5152ff8f811159d357e77962aa3f8c2e937f63731/flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691", size = 2798683, upload-time = "2023-12-10T14:48:53.293Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3f/b82cd8e733a355db1abb8297afbf59ec972c00ef90bf8d4eed287958b204/flask_restx-1.3.2-py2.py3-none-any.whl", hash = "sha256:6e035496e8223668044fc45bf769e526352fd648d9e159bd631d94fd645a687b", size = 2799859, upload-time = "2025-09-23T20:34:23.055Z" }, ] [[package]] @@ -1975,54 +2005,52 @@ wheels = [ [[package]] name = "flatbuffers" -version = "25.2.10" +version = "25.9.23" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170, upload-time = "2025-02-11T04:26:46.257Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/1f/3ee70b0a55137442038f2a33469cc5fddd7e0ad2abf83d7497c18a2b6923/flatbuffers-25.9.23.tar.gz", hash = "sha256:676f9fa62750bb50cf531b42a0a2a118ad8f7f797a511eda12881c016f093b12", size = 22067, upload-time = "2025-09-24T05:25:30.106Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953, upload-time = "2025-02-11T04:26:44.484Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1b/00a78aa2e8fbd63f9af08c9c19e6deb3d5d66b4dda677a0f61654680ee89/flatbuffers-25.9.23-py2.py3-none-any.whl", hash = "sha256:255538574d6cb6d0a79a17ec8bc0d30985913b87513a01cce8bcdb6b4c44d0e2", size = 30869, upload-time = "2025-09-24T05:25:28.912Z" }, ] [[package]] name = "frozenlist" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, - { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, - { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, - { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, - { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, - { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, - { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, - { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, - { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, - { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, - { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, - { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, - { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, - { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, - { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, - { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, - { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, - { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, - { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, - { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, - { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, - { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, - { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, - { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, - { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, - { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, - { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] [[package]] @@ -2400,51 +2428,51 @@ wheels = [ [[package]] name = "grimp" -version = "3.11" +version = "3.12" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/5e/1be34b2aed713fca8b9274805fc295d54f9806fccbfb15451fdb60066b23/grimp-3.11.tar.gz", hash = "sha256:920d069a6c591b830d661e0f7e78743d276e05df1072dc139fc2ee314a5e723d", size = 844989, upload-time = "2025-09-01T07:25:34.148Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/a4/463903a1cfbc19d3e7125d6614bb900df2b34dd675c7d93544d154819d2b/grimp-3.12.tar.gz", hash = "sha256:1a733b1d719c42bd2fada58240975fa7d09936b57120c34b64cfb31e42701010", size = 845594, upload-time = "2025-10-09T09:51:02.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/f1/39fa82cf6738cea7ae454a739a0b4a233ccc2905e2506821cdcad85fef1c/grimp-3.11-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8271906dadd01f9a866c411aa8c4f15cf0469d8476734d3672f55d1fdad05ddf", size = 2015949, upload-time = "2025-09-01T07:24:38.836Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a2/19209b8680899034c74340c115770b3f0fe6186b2a8779ce3e578aa3ab30/grimp-3.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb20844c1ec8729627dcbf8ca18fe6e2fb0c0cd34683c6134cd89542538d12a1", size = 1929047, upload-time = "2025-09-01T07:24:31.813Z" }, - { url = "https://files.pythonhosted.org/packages/ee/b1/cef086ed0fc3c1b2bba413f55cae25ebdd3ff11bc683639ba8fc29b09d7b/grimp-3.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e39c47886320b2980d14f31351377d824683748d5982c34283461853b5528102", size = 2093705, upload-time = "2025-09-01T07:23:18.927Z" }, - { url = "https://files.pythonhosted.org/packages/92/4a/6945c6a5267d01d2e321ba622d1fc138552bd2a69d220c6baafb60a128da/grimp-3.11-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1add91bf2e024321c770f1271799576d22a3f7527ed662e304f40e73c6a14138", size = 2045422, upload-time = "2025-09-01T07:23:31.571Z" }, - { url = "https://files.pythonhosted.org/packages/49/1a/4bfb34cd6cbf4d712305c2f452e650772cbc43773f1484513375e9b83a31/grimp-3.11-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bb0bc0995de10135d3b5dc5dbe1450d88a0fa7331ec7885db31569ad61e4d9", size = 2194719, upload-time = "2025-09-01T07:24:13.206Z" }, - { url = "https://files.pythonhosted.org/packages/d6/93/e6d9f9a1fbc78df685b9e970c28d3339ae441f7da970567d65b63c7a199e/grimp-3.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9152657e63ad0dee6029fe612d5550fb1c029c987b496a53a4d49246e772bd7b", size = 2391047, upload-time = "2025-09-01T07:23:48.095Z" }, - { url = "https://files.pythonhosted.org/packages/0f/44/f28d0a88161a55751da335b22d252ef6e2fa3fa9e5111f5a5b26caa66e8f/grimp-3.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352ba7f1aba578315dddb00eff873e3fbc0c7386b3d64bbc1fe8e28d2e12eda2", size = 2241597, upload-time = "2025-09-01T07:24:00.354Z" }, - { url = "https://files.pythonhosted.org/packages/15/89/2957413b54c047e87f8ea6611929ef0bbaedbab00399166119b5a164a430/grimp-3.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1291a323bbf30b0387ee547655a693b034376d9354797a076c53839966149e3", size = 2153283, upload-time = "2025-09-01T07:24:22.706Z" }, - { url = "https://files.pythonhosted.org/packages/3d/83/69162edb2c49fff21a42fca68f51fbb93006a1b6a10c0f329a61a7a943e8/grimp-3.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d4b47faa3a35ccee75039343267d990f03c7f39af8abe01a99f41c83339c5df4", size = 2269299, upload-time = "2025-09-01T07:24:45.272Z" }, - { url = "https://files.pythonhosted.org/packages/5f/22/1bbf95e4bab491a847f0409d19d9c343a8c361ab1f2921b13318278d937a/grimp-3.11-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:cae0cc48584389df4f2ff037373cec5dbd4f3c7025583dc69724d5c453fc239b", size = 2305354, upload-time = "2025-09-01T07:24:57.413Z" }, - { url = "https://files.pythonhosted.org/packages/1f/fd/2d40ed913744202e5d7625936f8bd9e1d44d1a062abbfc25858e7c9acd6a/grimp-3.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3ba13bd9e58349c48a6d420a62f244b3eee2c47aedf99db64c44ba67d07e64d6", size = 2299647, upload-time = "2025-09-01T07:25:10.188Z" }, - { url = "https://files.pythonhosted.org/packages/15/be/6e721a258045285193a16f4be9e898f7df5cc28f0b903eb010d8a7035841/grimp-3.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2ee94b2a0ec7e8ca90d63a724d77527632ab3825381610bd36891fbcc49071", size = 2323713, upload-time = "2025-09-01T07:25:22.678Z" }, - { url = "https://files.pythonhosted.org/packages/5e/ad/0ae7a1753f4d60d5a9bebefd112bb83ef115541ec7b509565a9fbb712d60/grimp-3.11-cp311-cp311-win32.whl", hash = "sha256:b4810484e05300bc3dfffaeaaa89c07dcfd6e1712ddcbe2e14911c0da5737d40", size = 1707055, upload-time = "2025-09-01T07:25:43.719Z" }, - { url = "https://files.pythonhosted.org/packages/df/b7/af81165c2144043293b0729d6be92885c52a38aadff16e6ac9418baab30f/grimp-3.11-cp311-cp311-win_amd64.whl", hash = "sha256:31b9b8fd334dc959d3c3b0d7761f805decb628c4eac98ff7707c8b381576e48f", size = 1809864, upload-time = "2025-09-01T07:25:36.724Z" }, - { url = "https://files.pythonhosted.org/packages/06/ad/271c0f2b49be72119ad3724e4da3ba607c533c8aa2709078a51f21428fab/grimp-3.11-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2731b03deeea57ec3722325c3ebfa25b6ec4bc049d6b5a853ac45bb173843537", size = 2011143, upload-time = "2025-09-01T07:24:40.113Z" }, - { url = "https://files.pythonhosted.org/packages/40/85/858811346c77bbbe6e62ffaa5367f46990a30a47e77ce9f6c0f3d65a42bd/grimp-3.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39953c320e235e2fb7f0ad10b066ddd526ab26bc54b09dd45620999898ab2b33", size = 1927855, upload-time = "2025-09-01T07:24:33.468Z" }, - { url = "https://files.pythonhosted.org/packages/27/f8/5ce51d2fb641e25e187c10282a30f6c7f680dcc5938e0eb5670b7a08c735/grimp-3.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b363da88aa8aca5edc008c4473def9015f31d293493ca6c7e211a852b5ada6c", size = 2093246, upload-time = "2025-09-01T07:23:20.091Z" }, - { url = "https://files.pythonhosted.org/packages/09/17/217490c0d59bfcf254cb15c82d8292d6e67717cfa1b636a29f6368f59147/grimp-3.11-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dded52a319d31de2178a6e2f26da188b0974748e27af430756b3991478443b12", size = 2044921, upload-time = "2025-09-01T07:23:33.118Z" }, - { url = "https://files.pythonhosted.org/packages/04/85/54e5c723b2bd19c343c358866cc6359a38ccf980cf128ea2d7dfb5f59384/grimp-3.11-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9763b80ca072ec64384fae1ba54f18a00e88a36f527ba8dcf2e8456019e77de", size = 2195131, upload-time = "2025-09-01T07:24:14.496Z" }, - { url = "https://files.pythonhosted.org/packages/fd/15/8188cd73fff83055c1dca6e20c8315e947e2564ceaaf8b957b3ca7e1fa93/grimp-3.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e351c159834c84f723cfa1252f1b23d600072c362f4bfdc87df7eed9851004a", size = 2391156, upload-time = "2025-09-01T07:23:49.283Z" }, - { url = "https://files.pythonhosted.org/packages/c2/51/f2372c04b9b6e4628752ed9fc801bb05f968c8c4c4b28d78eb387ab96545/grimp-3.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19f2ab56e647cf65a2d6e8b2e02d5055b1a4cff72aee961cbd78afa0e9a1f698", size = 2245104, upload-time = "2025-09-01T07:24:01.54Z" }, - { url = "https://files.pythonhosted.org/packages/83/6d/bf4948b838bfc7d8c3f1da50f1bb2a8c44984af75845d41420aaa1b3f234/grimp-3.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30cc197decec63168a15c6c8a65ee8f2f095b4a7bf14244a4ed24e48b272843a", size = 2153265, upload-time = "2025-09-01T07:24:23.971Z" }, - { url = "https://files.pythonhosted.org/packages/52/18/ce2ff3f67adc286de245372b4ac163b10544635e1a86a2bc402502f1b721/grimp-3.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be27e9ecc4f8a9f96e5a09e8588b5785de289a70950b7c0c4b2bcafc96156a18", size = 2268265, upload-time = "2025-09-01T07:24:46.505Z" }, - { url = "https://files.pythonhosted.org/packages/23/b0/dc28cb7e01f578424c9efbb9a47273b14e5d3a2283197d019cbb5e6c3d4f/grimp-3.11-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab72874999a5a309a39ec91168f7e76c0acb7a81af2cc463431029202a661a5d", size = 2304895, upload-time = "2025-09-01T07:24:58.743Z" }, - { url = "https://files.pythonhosted.org/packages/9e/00/48916bf8284fc48f559ea4a9ccd47bd598493eac74dbb74c676780b664e7/grimp-3.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:55b08122a2896207ff09ffe349ad9f440a4382c092a7405191ac0512977a328f", size = 2299337, upload-time = "2025-09-01T07:25:11.886Z" }, - { url = "https://files.pythonhosted.org/packages/35/f9/6bcab18cdf1186185a6ae9abb4a5dcc43e19d46bc431becca65ac0ba1a71/grimp-3.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:54e6e5417bcd7ad44439ad1b8ef9e85f65332dcc42c9fbdbaf566da127a32d3d", size = 2322913, upload-time = "2025-09-01T07:25:24.529Z" }, - { url = "https://files.pythonhosted.org/packages/92/19/023e45fe46603172df7c55ced127bc74fcd14b8f87505ea31ea6ae9f86bc/grimp-3.11-cp312-cp312-win32.whl", hash = "sha256:41d67c29a8737b4dd7ffe11deedc6f1cfea3ce1b845a72a20c4938e8dd85b2fa", size = 1707368, upload-time = "2025-09-01T07:25:45.096Z" }, - { url = "https://files.pythonhosted.org/packages/71/ef/3cbe04829d7416f4b3c06b096ad1972622443bd11833da4d98178da22637/grimp-3.11-cp312-cp312-win_amd64.whl", hash = "sha256:c3c6fc76e1e5db2733800490ee4d46a710a5b4ac23eaa8a2313489a6e7bc60e2", size = 1811752, upload-time = "2025-09-01T07:25:38.071Z" }, - { url = "https://files.pythonhosted.org/packages/bd/6b/dca73b704e87609b4fb5170d97ae1e17fe25ffb4e8a6dee4ac21c31da9f4/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c634e77d4ee9959b618ca0526cb95d8eeaa7d716574d270fd4d880243e4e76", size = 2095005, upload-time = "2025-09-01T07:23:27.57Z" }, - { url = "https://files.pythonhosted.org/packages/35/f1/a7be1b866811eafa0798316baf988347cac10acaea1f48dbc4bc536bc82a/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41b55e2246aed2bd2f8a6c334b5c91c737d35fec9d1c1cd86884bff1b482ab9b", size = 2046301, upload-time = "2025-09-01T07:23:41.046Z" }, - { url = "https://files.pythonhosted.org/packages/d7/c5/15071e06972f2a04ccf7c0b9f6d0cd5851a7badc59ba3df5c4036af32275/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6400eff472b205787f5fc73d2b913534c5f1ddfacd5fbcacf9b0f46e3843898", size = 2194815, upload-time = "2025-09-01T07:24:20.256Z" }, - { url = "https://files.pythonhosted.org/packages/9f/27/73a08f322adeef2a3c2d22adb7089a0e6a134dae340293be265e70471166/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ddd0db48f1168bc430adae3b5457bf32bb9c7d479791d5f9f640fe752256d65", size = 2388925, upload-time = "2025-09-01T07:23:56.658Z" }, - { url = "https://files.pythonhosted.org/packages/9d/1b/4b372addef06433b37b035006cf102bc2767c3d573916a5ce6c9b50c96f5/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e744a031841413c06bd6e118e853b1e0f2d19a5081eee7c09bb7c4c8868ca81b", size = 2242506, upload-time = "2025-09-01T07:24:09.133Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2a/d618a74aa66a585ed09eebed981d71f6310ccd0c85fecdefca6a660338e3/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf5d4cbd033803ba433f445385f070759730f64f0798c75a11a3d60e7642bb9c", size = 2154028, upload-time = "2025-09-01T07:24:29.086Z" }, - { url = "https://files.pythonhosted.org/packages/2b/74/50255cc0af7b8a742d00b72ee6d825da8ce52b036260ee84d1e9e27a7fc7/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:70cf9196180226384352360ba02e1f7634e00e8e999a65087f4e7383ece78afb", size = 2270008, upload-time = "2025-09-01T07:24:53.195Z" }, - { url = "https://files.pythonhosted.org/packages/42/a0/1f441584ce68b9b818cb18f8bad2aa7bef695853f2711fb648526e0237b9/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:e5a9df811aeb2f3d764070835f9ac65f240af154ba9ba23bda7a4c4d4ad46744", size = 2306660, upload-time = "2025-09-01T07:25:06.031Z" }, - { url = "https://files.pythonhosted.org/packages/35/e9/c1b61b030b286c7c117024676d88db52cdf8b504e444430d813170a6b9f6/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:23ceffc0a19e7b85107b137435fadd3d15a3883cbe0b65d7f93f3b33a6805af7", size = 2300281, upload-time = "2025-09-01T07:25:18.5Z" }, - { url = "https://files.pythonhosted.org/packages/44/d0/124a230725e1bff859c0ad193d6e2a64d2d1273d6ae66e04138dbd0f1ca6/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e57baac1360b90b944e2fd0321b490650113e5b927d013b26e220c2889f6f275", size = 2324348, upload-time = "2025-09-01T07:25:31.409Z" }, + { url = "https://files.pythonhosted.org/packages/0f/b5/1c89600bf181d41502aed51b73b3a5889158dee35c534f51df3666779587/grimp-3.12-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e6c02e51eebfcf71146d42f47c9ce353ac1902ae446e18d0e663ab9fdaa0496c", size = 2062043, upload-time = "2025-10-09T09:49:57.035Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/bab32c5e26949a82299853ccb28ee30a7899d0355b0d209b535eb03bc04e/grimp-3.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:79bc2b0ff6072c43c0ddc4479b25b7a8198795486478cfe3be0503b2c7d32c7f", size = 1981378, upload-time = "2025-10-09T09:49:49.237Z" }, + { url = "https://files.pythonhosted.org/packages/b5/03/b9f7e465488e8593de9a1e88355c3cfba04c02c3a34a6b02cbe946e0d587/grimp-3.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3986f11a9dd4167a2943cf6e80b458c0a825b48609713736cc8f2de135000810", size = 2130579, upload-time = "2025-10-09T09:48:36.035Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d0/81c776327354f32f86f321dd8468b32ba6b52dc3511d912d24c4fac96da4/grimp-3.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7a2abe55844f9dad25499ff9456d680496f390d160b6b3a4e5aeabc0183813b4", size = 2091201, upload-time = "2025-10-09T09:48:52.57Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7e/116ac4c1e4407a123fba4bb076b2e880643d70b3f4f1621c3323b5d66e12/grimp-3.12-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e59112d0f557335b619bcf10263d11873579230bd3df4a4b19224ec18e7212d6", size = 2240782, upload-time = "2025-10-09T09:49:30.915Z" }, + { url = "https://files.pythonhosted.org/packages/06/7f/89bbec1241a8504499975f0f08befea0cf3d27c52f9808602fff8075c639/grimp-3.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b858e2e5a489c36710322970aa82bfbd3f1c4107c8564960629a59d2f17a53d0", size = 2423143, upload-time = "2025-10-09T09:49:05.18Z" }, + { url = "https://files.pythonhosted.org/packages/86/d7/2f416439b624b2a91bf2e0e456f58d74d51aa7ad239099cf4a8911d952c0/grimp-3.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d46cc1222dd301e0be371b97f0cdecae178089704e8a285e3edd4750ec46270a", size = 2303850, upload-time = "2025-10-09T09:49:19.073Z" }, + { url = "https://files.pythonhosted.org/packages/60/bd/8c2f48c26151eb9a65bc41f01004b43cb1b31791ffb61758d40d2f6b485a/grimp-3.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef06822f75856af28e7fcc580034043c543b1c99b07d2bd467bd173a7f10691", size = 2168571, upload-time = "2025-10-09T09:49:39.844Z" }, + { url = "https://files.pythonhosted.org/packages/5a/45/01a839434ff88be24317aa52cc1ba158833bd1d071efe0da1b14838af024/grimp-3.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4c19f1cba8a95c898473dd18f9c81358019d67f87f140b0b8401550e6d21c5a3", size = 2310869, upload-time = "2025-10-09T09:50:05.153Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7b/0dc45fdc15562c2faf8a95a8685d3805d27decdef6fcfb66d9b577ed2f12/grimp-3.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:600e8dbc1cd9c6decbc22089730221c65591b7ba5f89751d07fc7ad014d99aa1", size = 2353397, upload-time = "2025-10-09T09:50:17.755Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ec/07734ecc4f1489ffc071417f7bc881c939bcfdfba10eb585bce510ede1b2/grimp-3.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:259ba53b82cfb9c2c2d097b2237970c4e9903fa2d0b664b7e12329d9a64924f9", size = 2350166, upload-time = "2025-10-09T09:50:32.237Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f5/45d80e2fa205066a484f0c1a667a249408a49bb3b665d62677f879920aa0/grimp-3.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a593549b1f66b1c12574e71f9e8c0073b372888c6b6706e2617bba2713ae28c2", size = 2360590, upload-time = "2025-10-09T09:50:49.961Z" }, + { url = "https://files.pythonhosted.org/packages/e6/f2/7ab1bc4d613189183c17741ff0d03490d9749eb5130b8b56e82ed77098b0/grimp-3.12-cp311-cp311-win32.whl", hash = "sha256:356ee969443f06c6c3a270f5a7221f946f0cb135a8b8ece2009990b293504bb3", size = 1748183, upload-time = "2025-10-09T09:51:13.503Z" }, + { url = "https://files.pythonhosted.org/packages/91/62/195f37a68d07fab40c8934ae8e39f9ff1f9a5bf3e375059b9cf14ccba302/grimp-3.12-cp311-cp311-win_amd64.whl", hash = "sha256:75e1f0d74f3a242a1c34e464d775c36b1c8b9d8c92b35f46f221e73e9b2f0065", size = 1851099, upload-time = "2025-10-09T09:51:04.747Z" }, + { url = "https://files.pythonhosted.org/packages/12/ac/0f55980a59c07439a965d3975f1cf3a6574f7d773910b9d6924790e0dddf/grimp-3.12-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:af399fc0ffddfbd7ea6c2e8546be1ab5284ee800f15a445705bdda5d63501b34", size = 2058862, upload-time = "2025-10-09T09:49:58.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b1/5fdcb1db7cb3253c78d87a0b8c3f7f9c5214b273861300b51c897c55e6b8/grimp-3.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f08358acbaf9a4b324537bf344fd2d76b5f9b6f1bfaf9a431e9453fc0eaee5f", size = 1977586, upload-time = "2025-10-09T09:49:50.49Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b9/e5f6d265b71430f9641daa9476cde8c23549e396c558b39a0bdc7fee824f/grimp-3.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eeb1616cafe9074fcb390fcfc01e6e5a0e0ddd5acb9dd37579985b2879c239a", size = 2130610, upload-time = "2025-10-09T09:48:38.472Z" }, + { url = "https://files.pythonhosted.org/packages/da/e1/2d0601c9aac2ab7340504e85ca4cd55f2991501a03e421bec78f53a07478/grimp-3.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99e648e299f7cd3daaee2cb745192e7ea159c7d38df76b4dcca12a2ef68a3ede", size = 2092775, upload-time = "2025-10-09T09:48:53.841Z" }, + { url = "https://files.pythonhosted.org/packages/db/a1/e63315477127ed8f31a1a93911d084bf704d6e126ca27650e3c3389701a6/grimp-3.12-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b24c5ce351030d1f83e69acd76a06863dd87041ceb25572339f7334e210cbc4", size = 2239336, upload-time = "2025-10-09T09:49:32.185Z" }, + { url = "https://files.pythonhosted.org/packages/f2/09/cd76d35121f053a95a58fc5830756c62e5c9de74aa4e16b4dc27ce6ada2c/grimp-3.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd40a5ec09d1dfafaae88b53231ab79378183e2e9a03e7b26b7a30133d027d8a", size = 2421851, upload-time = "2025-10-09T09:49:06.893Z" }, + { url = "https://files.pythonhosted.org/packages/40/46/e8390a7c5ed85b4dbeff4e873f1ece8d9acf72d72f084b397ccc2facfa3b/grimp-3.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aebdfad66d6f4e8b0f7364ce0429d208be3510918097f969428165074d3103e", size = 2304849, upload-time = "2025-10-09T09:49:20.695Z" }, + { url = "https://files.pythonhosted.org/packages/bd/81/f73edbc48a283f634233b6153ac43e4e7b9f58108ffc19da803b0015cb60/grimp-3.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76fd06be98d6bea9ea8a804da22c80accf1d277fe04abd5f3dff05d087f056f7", size = 2168655, upload-time = "2025-10-09T09:49:41.118Z" }, + { url = "https://files.pythonhosted.org/packages/84/1a/8fa5752f725b8872010627bd10e1aedccdb406c3b4118ec3fe127155284e/grimp-3.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a73a42a43e268ac5b196386beae1ec646f4572409e731bccf2a99ab4ed5c46bf", size = 2311124, upload-time = "2025-10-09T09:50:06.477Z" }, + { url = "https://files.pythonhosted.org/packages/83/a0/02d6b2a86289a4ac73f44f59aaee43c1dc936c984204c73d2affe4570eb6/grimp-3.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:af990af7d5e64f484d12cdefacfaaed4ea9418ac4d0a5a928953fd91aaf8df80", size = 2354216, upload-time = "2025-10-09T09:50:19.114Z" }, + { url = "https://files.pythonhosted.org/packages/7b/48/0368289f5bbdf943a48305824b30411b35ef2c7cd8edf2bad48d67b3897e/grimp-3.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:82ee28c1e9835572af2c733f7e5913a44193c53ae8ca488039164593b4a750fa", size = 2348372, upload-time = "2025-10-09T09:50:37.479Z" }, + { url = "https://files.pythonhosted.org/packages/26/73/b4f90b4926791d720f6069fc8c8b3e204721d1db839a1c00fbcee1e2a36d/grimp-3.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afdceaea00e305909cb30d68e91b94fcf71d1a7234052549ea31148785a03a52", size = 2361167, upload-time = "2025-10-09T09:50:51.733Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ae/94d34c732d531c7165c8942d7995495aac64e9bb5c28cc6751349eacdcde/grimp-3.12-cp312-cp312-win32.whl", hash = "sha256:40f8e048254d2437dffcd383d2301a82c35d9a3082e878b707d87a6e8c539614", size = 1747179, upload-time = "2025-10-09T09:51:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/48bc396ee2f36e72d5c50ba8b4d7f817fc2cdac7b9ab77d2b097f50a4447/grimp-3.12-cp312-cp312-win_amd64.whl", hash = "sha256:199172d17f22199bf400a0bd5c4985784622201e887a023fe799ca3f3437dedf", size = 1850691, upload-time = "2025-10-09T09:51:05.984Z" }, + { url = "https://files.pythonhosted.org/packages/d9/31/c72e53a46692dc8358cff1af1a9494430a0fecd4c3f2d0d8e9c2eb5e828d/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:567d037a3db083e54bee621daba59a2e01fd1391364ae0a0c737995f6eed910b", size = 2131392, upload-time = "2025-10-09T09:48:46.857Z" }, + { url = "https://files.pythonhosted.org/packages/39/10/15e43be32734baaebeee090dca16f06ea5ba933b209b8e1c0d5986dabb32/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9b4cc756c91c3d8582ee70b5e013c0e34fdb31c7f808cefe9d15509c45fec31e", size = 2092481, upload-time = "2025-10-09T09:49:00.754Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4a/c9349dee284c2d9384714741896f0f84a1d66011a69cdc364e4d94e188b1/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bd47f9a8619cb8966f18cb6faf5f6cb8d35ade99312477dd8e9de3a9ae4cb7", size = 2242260, upload-time = "2025-10-09T09:49:37.183Z" }, + { url = "https://files.pythonhosted.org/packages/d8/63/3935823f89c12320840bbf018858eeaca7d5285f9769a48921587a88adeb/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f30e01855c67a39857c87e6c0eafe5e8891010a35e06cf2145f2cfce8ea9780", size = 2422371, upload-time = "2025-10-09T09:49:14.616Z" }, + { url = "https://files.pythonhosted.org/packages/71/8e/5a75c2335a2dc61738b19318dcdd16392015a984211e3d0b9f6679dc6c89/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d07e825f6b052186dabd8dbbcc7e008a3b56e551725e2ba47169fe1e4bde76ac", size = 2304257, upload-time = "2025-10-09T09:49:26.908Z" }, + { url = "https://files.pythonhosted.org/packages/40/99/462d86bc9401a39859f272b867331a678f4b5324a539dc771bdae6d36309/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f1a1289d4282be2891ada75ec5d3099e856518c4236b1196e367b630485f8ce", size = 2169360, upload-time = "2025-10-09T09:49:46.575Z" }, + { url = "https://files.pythonhosted.org/packages/d0/07/6d2929f05dae189265633588819d990df35644ad74b6ec74207091dff18d/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:85136b555aeb7d3965fdb40af4e4af2011f911b0fde8c20979bf4db7b06455f5", size = 2312280, upload-time = "2025-10-09T09:50:13.491Z" }, + { url = "https://files.pythonhosted.org/packages/5c/47/7e49417e2c496da0b6141e711dca40726d2b30a0adc6db9d04b74c7bafa7/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:963efd6ec86e7b47fde835b2526b6be7a3f489857a1cd47a747c94b3e670550a", size = 2354449, upload-time = "2025-10-09T09:50:27.596Z" }, + { url = "https://files.pythonhosted.org/packages/2c/08/2e1db56797e4e26334b3ee4ef1a5fbf56155d74a0318215ed4dcad02ef43/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:c9e2ee478b66f0e20c92af6123142ffd6b604c36e9b3a8d391ea9172cc18b6b3", size = 2350545, upload-time = "2025-10-09T09:50:45.623Z" }, + { url = "https://files.pythonhosted.org/packages/37/78/53594064f11b0ae9e72b3e9df5c055f00c5bff44962f7b777846504fc50d/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e8826362d4e403aa2e03d480e3e4d64284a6b6ccafc2c5777bb2bed2535bdc4e", size = 2361926, upload-time = "2025-10-09T09:50:58.605Z" }, ] [[package]] @@ -2463,30 +2491,33 @@ wheels = [ [[package]] name = "grpcio" -version = "1.74.0" +version = "1.75.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/77/b2f06db9f240a5abeddd23a0e49eae2b6ac54d85f0e5267784ce02269c3b/grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31", size = 5487368, upload-time = "2025-07-24T18:53:03.548Z" }, - { url = "https://files.pythonhosted.org/packages/48/99/0ac8678a819c28d9a370a663007581744a9f2a844e32f0fa95e1ddda5b9e/grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4", size = 10999804, upload-time = "2025-07-24T18:53:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/45/c6/a2d586300d9e14ad72e8dc211c7aecb45fe9846a51e558c5bca0c9102c7f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce", size = 5987667, upload-time = "2025-07-24T18:53:07.157Z" }, - { url = "https://files.pythonhosted.org/packages/c9/57/5f338bf56a7f22584e68d669632e521f0de460bb3749d54533fc3d0fca4f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3", size = 6655612, upload-time = "2025-07-24T18:53:09.244Z" }, - { url = "https://files.pythonhosted.org/packages/82/ea/a4820c4c44c8b35b1903a6c72a5bdccec92d0840cf5c858c498c66786ba5/grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182", size = 6219544, upload-time = "2025-07-24T18:53:11.221Z" }, - { url = "https://files.pythonhosted.org/packages/a4/17/0537630a921365928f5abb6d14c79ba4dcb3e662e0dbeede8af4138d9dcf/grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d", size = 6334863, upload-time = "2025-07-24T18:53:12.925Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a6/85ca6cb9af3f13e1320d0a806658dca432ff88149d5972df1f7b51e87127/grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f", size = 7019320, upload-time = "2025-07-24T18:53:15.002Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a7/fe2beab970a1e25d2eff108b3cf4f7d9a53c185106377a3d1989216eba45/grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4", size = 6514228, upload-time = "2025-07-24T18:53:16.999Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c2/2f9c945c8a248cebc3ccda1b7a1bf1775b9d7d59e444dbb18c0014e23da6/grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b", size = 3817216, upload-time = "2025-07-24T18:53:20.564Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d1/a9cf9c94b55becda2199299a12b9feef0c79946b0d9d34c989de6d12d05d/grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11", size = 4495380, upload-time = "2025-07-24T18:53:22.058Z" }, - { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" }, - { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" }, - { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" }, - { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" }, - { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" }, - { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" }, - { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" }, - { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" }, - { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" }, + { url = "https://files.pythonhosted.org/packages/0c/3c/35ca9747473a306bfad0cee04504953f7098527cd112a4ab55c55af9e7bd/grpcio-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:573855ca2e58e35032aff30bfbd1ee103fbcf4472e4b28d4010757700918e326", size = 5709761, upload-time = "2025-09-26T09:01:28.528Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2c/ecbcb4241e4edbe85ac2663f885726fea0e947767401288b50d8fdcb9200/grpcio-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6a4996a2c8accc37976dc142d5991adf60733e223e5c9a2219e157dc6a8fd3a2", size = 11496691, upload-time = "2025-09-26T09:01:31.214Z" }, + { url = "https://files.pythonhosted.org/packages/81/40/bc07aee2911f0d426fa53fe636216100c31a8ea65a400894f280274cb023/grpcio-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1ea1bbe77ecbc1be00af2769f4ae4a88ce93be57a4f3eebd91087898ed749f9", size = 6296084, upload-time = "2025-09-26T09:01:34.596Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d1/10c067f6c67396cbf46448b80f27583b5e8c4b46cdfbe18a2a02c2c2f290/grpcio-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e5b425aee54cc5e3e3c58f00731e8a33f5567965d478d516d35ef99fd648ab68", size = 6950403, upload-time = "2025-09-26T09:01:36.736Z" }, + { url = "https://files.pythonhosted.org/packages/3f/42/5f628abe360b84dfe8dd8f32be6b0606dc31dc04d3358eef27db791ea4d5/grpcio-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0049a7bf547dafaeeb1db17079ce79596c298bfe308fc084d023c8907a845b9a", size = 6470166, upload-time = "2025-09-26T09:01:39.474Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/a24035080251324019882ee2265cfde642d6476c0cf8eb207fc693fcebdc/grpcio-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b8ea230c7f77c0a1a3208a04a1eda164633fb0767b4cefd65a01079b65e5b1f", size = 7107828, upload-time = "2025-09-26T09:01:41.782Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/d18b984c1c9ba0318e3628dbbeb6af77a5007f02abc378c845070f2d3edd/grpcio-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:36990d629c3c9fb41e546414e5af52d0a7af37ce7113d9682c46d7e2919e4cca", size = 8045421, upload-time = "2025-09-26T09:01:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/4bf9aacff45deca5eac5562547ed212556b831064da77971a4e632917da3/grpcio-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b10ad908118d38c2453ade7ff790e5bce36580c3742919007a2a78e3a1e521ca", size = 7503290, upload-time = "2025-09-26T09:01:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/d8d69d10223cb54c887a2180bd29fe5fa2aec1d4995c8821f7aa6eaf72e4/grpcio-1.75.1-cp311-cp311-win32.whl", hash = "sha256:d6be2b5ee7bea656c954dcf6aa8093c6f0e6a3ef9945c99d99fcbfc88c5c0bfe", size = 3950631, upload-time = "2025-09-26T09:01:51.23Z" }, + { url = "https://files.pythonhosted.org/packages/8a/40/7b8642d45fff6f83300c24eaac0380a840e5e7fe0e8d80afd31b99d7134e/grpcio-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:61c692fb05956b17dd6d1ab480f7f10ad0536dba3bc8fd4e3c7263dc244ed772", size = 4646131, upload-time = "2025-09-26T09:01:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, + { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, + { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, ] [[package]] @@ -2568,17 +2599,17 @@ wheels = [ [[package]] name = "hf-xet" -version = "1.1.9" +version = "1.1.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/0f/5b60fc28ee7f8cc17a5114a584fd6b86e11c3e0a6e142a7f97a161e9640a/hf_xet-1.1.9.tar.gz", hash = "sha256:c99073ce404462e909f1d5839b2d14a3827b8fe75ed8aed551ba6609c026c803", size = 484242, upload-time = "2025-08-27T23:05:19.441Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97", size = 487910, upload-time = "2025-09-12T20:10:27.12Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/12/56e1abb9a44cdef59a411fe8a8673313195711b5ecce27880eb9c8fa90bd/hf_xet-1.1.9-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:a3b6215f88638dd7a6ff82cb4e738dcbf3d863bf667997c093a3c990337d1160", size = 2762553, upload-time = "2025-08-27T23:05:15.153Z" }, - { url = "https://files.pythonhosted.org/packages/3a/e6/2d0d16890c5f21b862f5df3146519c182e7f0ae49b4b4bf2bd8a40d0b05e/hf_xet-1.1.9-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9b486de7a64a66f9a172f4b3e0dfe79c9f0a93257c501296a2521a13495a698a", size = 2623216, upload-time = "2025-08-27T23:05:13.778Z" }, - { url = "https://files.pythonhosted.org/packages/81/42/7e6955cf0621e87491a1fb8cad755d5c2517803cea174229b0ec00ff0166/hf_xet-1.1.9-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c5a840c2c4e6ec875ed13703a60e3523bc7f48031dfd750923b2a4d1a5fc3c", size = 3186789, upload-time = "2025-08-27T23:05:12.368Z" }, - { url = "https://files.pythonhosted.org/packages/df/8b/759233bce05457f5f7ec062d63bbfd2d0c740b816279eaaa54be92aa452a/hf_xet-1.1.9-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:96a6139c9e44dad1c52c52520db0fffe948f6bce487cfb9d69c125f254bb3790", size = 3088747, upload-time = "2025-08-27T23:05:10.439Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3c/28cc4db153a7601a996985bcb564f7b8f5b9e1a706c7537aad4b4809f358/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ad1022e9a998e784c97b2173965d07fe33ee26e4594770b7785a8cc8f922cd95", size = 3251429, upload-time = "2025-08-27T23:05:16.471Z" }, - { url = "https://files.pythonhosted.org/packages/84/17/7caf27a1d101bfcb05be85850d4aa0a265b2e1acc2d4d52a48026ef1d299/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:86754c2d6d5afb11b0a435e6e18911a4199262fe77553f8c50d75e21242193ea", size = 3354643, upload-time = "2025-08-27T23:05:17.828Z" }, - { url = "https://files.pythonhosted.org/packages/cd/50/0c39c9eed3411deadcc98749a6699d871b822473f55fe472fad7c01ec588/hf_xet-1.1.9-cp37-abi3-win_amd64.whl", hash = "sha256:5aad3933de6b725d61d51034e04174ed1dce7a57c63d530df0014dea15a40127", size = 2804797, upload-time = "2025-08-27T23:05:20.77Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d", size = 2761466, upload-time = "2025-09-12T20:10:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b", size = 2623807, upload-time = "2025-09-12T20:10:21.118Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435", size = 3186960, upload-time = "2025-09-12T20:10:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c", size = 3087167, upload-time = "2025-09-12T20:10:17.255Z" }, + { url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06", size = 3248612, upload-time = "2025-09-12T20:10:24.093Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f", size = 3353360, upload-time = "2025-09-12T20:10:25.563Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045", size = 2804691, upload-time = "2025-09-12T20:10:28.433Z" }, ] [[package]] @@ -2668,24 +2699,24 @@ wheels = [ [[package]] name = "httptools" -version = "0.6.4" +version = "0.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029, upload-time = "2024-10-16T19:44:18.427Z" }, - { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492, upload-time = "2024-10-16T19:44:19.515Z" }, - { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891, upload-time = "2024-10-16T19:44:21.067Z" }, - { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788, upload-time = "2024-10-16T19:44:22.958Z" }, - { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214, upload-time = "2024-10-16T19:44:24.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120, upload-time = "2024-10-16T19:44:26.295Z" }, - { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565, upload-time = "2024-10-16T19:44:29.188Z" }, - { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, - { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, - { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, - { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, ] [[package]] @@ -2714,16 +2745,16 @@ socks = [ [[package]] name = "httpx-sse" -version = "0.4.1" +version = "0.4.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, ] [[package]] name = "huggingface-hub" -version = "0.34.4" +version = "0.35.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -2735,9 +2766,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/c9/bdbe19339f76d12985bc03572f330a01a93c04dffecaaea3061bdd7fb892/huggingface_hub-0.34.4.tar.gz", hash = "sha256:a4228daa6fb001be3f4f4bdaf9a0db00e1739235702848df00885c9b5742c85c", size = 459768, upload-time = "2025-08-08T09:14:52.365Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/7e/a0a97de7c73671863ca6b3f61fa12518caf35db37825e43d63a70956738c/huggingface_hub-0.35.3.tar.gz", hash = "sha256:350932eaa5cc6a4747efae85126ee220e4ef1b54e29d31c3b45c5612ddf0b32a", size = 461798, upload-time = "2025-09-29T14:29:58.625Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/7b/bb06b061991107cd8783f300adff3e7b7f284e330fd82f507f2a1417b11d/huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a", size = 561452, upload-time = "2025-08-08T09:14:50.159Z" }, + { url = "https://files.pythonhosted.org/packages/31/a0/651f93d154cb72323358bf2bbae3e642bdb5d2f1bfc874d096f7cb159fa0/huggingface_hub-0.35.3-py3-none-any.whl", hash = "sha256:0e3a01829c19d86d03793e4577816fe3bdfc1602ac62c7fb220d593d351224ba", size = 564262, upload-time = "2025-09-29T14:29:55.813Z" }, ] [[package]] @@ -2763,38 +2794,38 @@ wheels = [ [[package]] name = "hypothesis" -version = "6.138.15" +version = "6.140.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/68/adc338edec178cf6c08b4843ea2b2d639d47bed4b06ea9331433b71acc0a/hypothesis-6.138.15.tar.gz", hash = "sha256:6b0e1aa182eacde87110995a3543530d69ef411f642162a656efcd46c2823ad1", size = 466116, upload-time = "2025-09-08T05:34:15.956Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/7f/946343e32881b56adc0eba64e428ad2f85251f9ef16e3e4ec1b6ab80199b/hypothesis-6.140.3.tar.gz", hash = "sha256:4f4a09bf77af21e0cc3dffed1ea639812dc75d38f81308ec9fb0e33f8557b0cb", size = 466925, upload-time = "2025-10-04T22:29:44.499Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/49/911eb0cd17884a7a6f510e78acf0a70592e414d194695a0c7c1db91645b2/hypothesis-6.138.15-py3-none-any.whl", hash = "sha256:b7cf743d461c319eb251a13c8e1dcf00f4ef7085e4ab5bf5abf102b2a5ffd694", size = 533621, upload-time = "2025-09-08T05:34:12.272Z" }, + { url = "https://files.pythonhosted.org/packages/65/2a/0553ac2a8af432df92f2ffc05ca97e7ed64e00c97a371b019ae2690de325/hypothesis-6.140.3-py3-none-any.whl", hash = "sha256:a2cfff51641a58a56081f5c90ae1da6ccf3d043404f411805f7f0e0d75742d0e", size = 534534, upload-time = "2025-10-04T22:29:40.635Z" }, ] [[package]] name = "idna" -version = "3.10" +version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] name = "import-linter" -version = "2.4" +version = "2.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "grimp" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/db/33/e3c29beb4d8a33cfacdbe2858a3a4533694a0c1d0c060daaa761eff6d929/import_linter-2.4.tar.gz", hash = "sha256:4888fde83dd18bdbecd57ea1a98a1f3d52c6b6507d700f89f8678b44306c0ab4", size = 29942, upload-time = "2025-08-15T06:57:23.423Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/fd/49913b98fdeb5a8a120ca756abfc9aa7fdef7c20da1d728173e98ce11160/import_linter-2.5.2.tar.gz", hash = "sha256:d8f2dc6432975cc35edc4cc0bfcf1b811f05500b377ce0c3f62729d68f46c698", size = 159664, upload-time = "2025-10-09T10:53:24.635Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/11/2c108fc1138e506762db332c4a7ebc589cb379bc443939a81ec738b4cf73/import_linter-2.4-py3-none-any.whl", hash = "sha256:2ad6d5a164cdcd5ebdda4172cf0169f73dde1a8925ef7216672c321cd38f8499", size = 42355, upload-time = "2025-08-15T06:57:22.221Z" }, + { url = "https://files.pythonhosted.org/packages/c1/f4/f20eeb9e6ab178ce011457cd936877202556f14b7af3ef2b3c3e26f3758a/import_linter-2.5.2-py3-none-any.whl", hash = "sha256:a70b64c2451dc6b96ff9ef5af4e3f6a2c8b63532a66a3c96a7c31ca086b10003", size = 44140, upload-time = "2025-10-09T10:53:23.367Z" }, ] [[package]] @@ -2874,34 +2905,35 @@ wheels = [ [[package]] name = "jiter" -version = "0.10.0" +version = "0.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/dd/6cefc6bd68b1c3c979cecfa7029ab582b57690a31cd2f346c4d0ce7951b6/jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978", size = 317473, upload-time = "2025-05-18T19:03:25.942Z" }, - { url = "https://files.pythonhosted.org/packages/be/cf/fc33f5159ce132be1d8dd57251a1ec7a631c7df4bd11e1cd198308c6ae32/jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc", size = 321971, upload-time = "2025-05-18T19:03:27.255Z" }, - { url = "https://files.pythonhosted.org/packages/68/a4/da3f150cf1d51f6c472616fb7650429c7ce053e0c962b41b68557fdf6379/jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d", size = 345574, upload-time = "2025-05-18T19:03:28.63Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/6e8d412e60ff06b186040e77da5f83bc158e9735759fcae65b37d681f28b/jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2", size = 371028, upload-time = "2025-05-18T19:03:30.292Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d9/9ee86173aae4576c35a2f50ae930d2ccb4c4c236f6cb9353267aa1d626b7/jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61", size = 491083, upload-time = "2025-05-18T19:03:31.654Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2c/f955de55e74771493ac9e188b0f731524c6a995dffdcb8c255b89c6fb74b/jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db", size = 388821, upload-time = "2025-05-18T19:03:33.184Z" }, - { url = "https://files.pythonhosted.org/packages/81/5a/0e73541b6edd3f4aada586c24e50626c7815c561a7ba337d6a7eb0a915b4/jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5", size = 352174, upload-time = "2025-05-18T19:03:34.965Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c0/61eeec33b8c75b31cae42be14d44f9e6fe3ac15a4e58010256ac3abf3638/jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606", size = 391869, upload-time = "2025-05-18T19:03:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/41/22/5beb5ee4ad4ef7d86f5ea5b4509f680a20706c4a7659e74344777efb7739/jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605", size = 523741, upload-time = "2025-05-18T19:03:38.168Z" }, - { url = "https://files.pythonhosted.org/packages/ea/10/768e8818538e5817c637b0df52e54366ec4cebc3346108a4457ea7a98f32/jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5", size = 514527, upload-time = "2025-05-18T19:03:39.577Z" }, - { url = "https://files.pythonhosted.org/packages/73/6d/29b7c2dc76ce93cbedabfd842fc9096d01a0550c52692dfc33d3cc889815/jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7", size = 210765, upload-time = "2025-05-18T19:03:41.271Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c9/d394706deb4c660137caf13e33d05a031d734eb99c051142e039d8ceb794/jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812", size = 209234, upload-time = "2025-05-18T19:03:42.918Z" }, - { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262, upload-time = "2025-05-18T19:03:44.637Z" }, - { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124, upload-time = "2025-05-18T19:03:46.341Z" }, - { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330, upload-time = "2025-05-18T19:03:47.596Z" }, - { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670, upload-time = "2025-05-18T19:03:49.334Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057, upload-time = "2025-05-18T19:03:50.66Z" }, - { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372, upload-time = "2025-05-18T19:03:51.98Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038, upload-time = "2025-05-18T19:03:53.703Z" }, - { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538, upload-time = "2025-05-18T19:03:55.046Z" }, - { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557, upload-time = "2025-05-18T19:03:56.386Z" }, - { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202, upload-time = "2025-05-18T19:03:57.675Z" }, - { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781, upload-time = "2025-05-18T19:03:59.025Z" }, - { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176, upload-time = "2025-05-18T19:04:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, + { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, + { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, + { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, ] [[package]] @@ -2924,11 +2956,11 @@ wheels = [ [[package]] name = "json-repair" -version = "0.50.1" +version = "0.52.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/71/6d57ed93e43e98cdd124e82ab6231c6817f06a10743e7ae4bc6f66d03a02/json_repair-0.50.1.tar.gz", hash = "sha256:4ee69bc4be7330fbb90a3f19e890852c5fe1ceacec5ed1d2c25cdeeebdfaec76", size = 34864, upload-time = "2025-09-06T05:43:34.331Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/63/2c3c3c8cc1c28a0a20a9ab0eff5439c989ce3cc5956d8a4c7cf1eae0a06e/json_repair-0.52.0.tar.gz", hash = "sha256:0eee59cb3145b462b0734d4cf3246b797686caa669d52eee8dd30e09ea6d7876", size = 35384, upload-time = "2025-10-05T17:18:12.387Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/be/b1e05740d9c6f333dab67910f3894e2e2416c1ef00f9f7e20a327ab1f396/json_repair-0.50.1-py3-none-any.whl", hash = "sha256:9b78358bb7572a6e0b8effe7a8bd8cb959a3e311144842b1d2363fe39e2f13c5", size = 26020, upload-time = "2025-09-06T05:43:32.718Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7f/3a4e456da9a0f9ac54d9842ed51e96960826a98456f0826a9b3e808713c4/json_repair-0.52.0-py3-none-any.whl", hash = "sha256:c783069906a456f62e2a553fbef32a420a4745ff943e2014411728edcc7bf60a", size = 26350, upload-time = "2025-10-05T17:18:10.859Z" }, ] [[package]] @@ -2984,13 +3016,12 @@ wheels = [ [[package]] name = "kubernetes" -version = "33.1.0" +version = "34.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "durationpy" }, { name = "google-auth" }, - { name = "oauthlib" }, { name = "python-dateutil" }, { name = "pyyaml" }, { name = "requests" }, @@ -2999,9 +3030,9 @@ dependencies = [ { name = "urllib3" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779, upload-time = "2025-06-09T21:57:58.521Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/55/3f880ef65f559cbed44a9aa20d3bdbc219a2c3a3bac4a30a513029b03ee9/kubernetes-34.1.0.tar.gz", hash = "sha256:8fe8edb0b5d290a2f3ac06596b23f87c658977d46b5f8df9d0f4ea83d0003912", size = 1083771, upload-time = "2025-09-29T20:23:49.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/65f7d563aa4a62dd58777e8f6aa882f15db53b14eb29aba0c28a20f7eb26/kubernetes-34.1.0-py2.py3-none-any.whl", hash = "sha256:bffba2272534e224e6a7a74d582deb0b545b7c9879d2cd9e4aae9481d1f2cc2a", size = 2008380, upload-time = "2025-09-29T20:23:47.684Z" }, ] [[package]] @@ -3049,88 +3080,92 @@ wheels = [ [[package]] name = "litellm" -version = "1.63.7" +version = "1.77.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "click" }, + { name = "fastuuid" }, { name = "httpx" }, { name = "importlib-metadata" }, { name = "jinja2" }, { name = "jsonschema" }, { name = "openai" }, + { name = "pondpond" }, { name = "pydantic" }, { name = "python-dotenv" }, { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5c/7a/6c1994a239abd1b335001a46ae47fa055a24c493b6de19a9fa1872187fe9/litellm-1.63.7.tar.gz", hash = "sha256:2fbd7236d5e5379eee18556857ed62a5ed49f4f09e03ff33cf15932306b984f1", size = 6598034, upload-time = "2025-03-12T19:26:40.915Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/b7/0d3c6dbcff3064238d123f90ae96764a85352f3f5caab6695a55007fd019/litellm-1.77.4.tar.gz", hash = "sha256:ce652e10ecf5b36767bfdf58e53b2802e22c3de383b03554e6ee1a4a66fa743d", size = 10330773, upload-time = "2025-09-24T17:52:44.876Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/44/255c7ecb8b6f3f730a37422736509c21cb1bf4da66cc060d872005bda9f5/litellm-1.63.7-py3-none-any.whl", hash = "sha256:fbdee39a894506c68f158c6b4e0079f9e9c023441fff7215e7b8e42162dba0a7", size = 6909807, upload-time = "2025-03-12T19:26:37.788Z" }, + { url = "https://files.pythonhosted.org/packages/3c/32/90f8587818d146d604ed6eec95f96378363fda06b14817399cc68853383e/litellm-1.77.4-py3-none-any.whl", hash = "sha256:66c2bb776f1e19ceddfa977a2bbf7f05e6f26c4b1fec8b2093bd171d842701b8", size = 9138493, upload-time = "2025-09-24T17:52:40.764Z" }, ] [[package]] name = "llvmlite" -version = "0.44.0" +version = "0.45.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/8d/5baf1cef7f9c084fb35a8afbde88074f0d6a727bc63ef764fe0e7543ba40/llvmlite-0.45.1.tar.gz", hash = "sha256:09430bb9d0bb58fc45a45a57c7eae912850bedc095cd0810a57de109c69e1c32", size = 185600, upload-time = "2025-10-01T17:59:52.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/e2/86b245397052386595ad726f9742e5223d7aea999b18c518a50e96c3aca4/llvmlite-0.44.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:eed7d5f29136bda63b6d7804c279e2b72e08c952b7c5df61f45db408e0ee52f3", size = 28132305, upload-time = "2025-01-20T11:12:53.936Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ec/506902dc6870249fbe2466d9cf66d531265d0f3a1157213c8f986250c033/llvmlite-0.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ace564d9fa44bb91eb6e6d8e7754977783c68e90a471ea7ce913bff30bd62427", size = 26201090, upload-time = "2025-01-20T11:12:59.847Z" }, - { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858, upload-time = "2025-01-20T11:13:07.623Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200, upload-time = "2025-01-20T11:13:20.058Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c6/258801143975a6d09a373f2641237992496e15567b907a4d401839d671b8/llvmlite-0.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8489634d43c20cd0ad71330dde1d5bc7b9966937a263ff1ec1cebb90dc50955", size = 30331193, upload-time = "2025-01-20T11:13:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/15/86/e3c3195b92e6e492458f16d233e58a1a812aa2bfbef9bdd0fbafcec85c60/llvmlite-0.44.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:1d671a56acf725bf1b531d5ef76b86660a5ab8ef19bb6a46064a705c6ca80aad", size = 28132297, upload-time = "2025-01-20T11:13:32.57Z" }, - { url = "https://files.pythonhosted.org/packages/d6/53/373b6b8be67b9221d12b24125fd0ec56b1078b660eeae266ec388a6ac9a0/llvmlite-0.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f79a728e0435493611c9f405168682bb75ffd1fbe6fc360733b850c80a026db", size = 26201105, upload-time = "2025-01-20T11:13:38.744Z" }, - { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901, upload-time = "2025-01-20T11:13:46.711Z" }, - { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247, upload-time = "2025-01-20T11:13:56.159Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3b/a9a17366af80127bd09decbe2a54d8974b6d8b274b39bf47fbaedeec6307/llvmlite-0.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:eae7e2d4ca8f88f89d315b48c6b741dcb925d6a1042da694aa16ab3dd4cbd3a1", size = 30332380, upload-time = "2025-01-20T11:14:02.442Z" }, + { url = "https://files.pythonhosted.org/packages/04/ad/9bdc87b2eb34642c1cfe6bcb4f5db64c21f91f26b010f263e7467e7536a3/llvmlite-0.45.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:60f92868d5d3af30b4239b50e1717cb4e4e54f6ac1c361a27903b318d0f07f42", size = 43043526, upload-time = "2025-10-01T18:03:15.051Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ea/c25c6382f452a943b4082da5e8c1665ce29a62884e2ec80608533e8e82d5/llvmlite-0.45.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98baab513e19beb210f1ef39066288784839a44cd504e24fff5d17f1b3cf0860", size = 37253118, upload-time = "2025-10-01T18:04:06.783Z" }, + { url = "https://files.pythonhosted.org/packages/fe/af/85fc237de98b181dbbe8647324331238d6c52a3554327ccdc83ced28efba/llvmlite-0.45.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3adc2355694d6a6fbcc024d59bb756677e7de506037c878022d7b877e7613a36", size = 56288209, upload-time = "2025-10-01T18:01:00.168Z" }, + { url = "https://files.pythonhosted.org/packages/0a/df/3daf95302ff49beff4230065e3178cd40e71294968e8d55baf4a9e560814/llvmlite-0.45.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f3377a6db40f563058c9515dedcc8a3e562d8693a106a28f2ddccf2c8fcf6ca", size = 55140958, upload-time = "2025-10-01T18:02:11.199Z" }, + { url = "https://files.pythonhosted.org/packages/a4/56/4c0d503fe03bac820ecdeb14590cf9a248e120f483bcd5c009f2534f23f0/llvmlite-0.45.1-cp311-cp311-win_amd64.whl", hash = "sha256:f9c272682d91e0d57f2a76c6d9ebdfccc603a01828cdbe3d15273bdca0c3363a", size = 38132232, upload-time = "2025-10-01T18:04:52.181Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7c/82cbd5c656e8991bcc110c69d05913be2229302a92acb96109e166ae31fb/llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:28e763aba92fe9c72296911e040231d486447c01d4f90027c8e893d89d49b20e", size = 43043524, upload-time = "2025-10-01T18:03:30.666Z" }, + { url = "https://files.pythonhosted.org/packages/9d/bc/5314005bb2c7ee9f33102c6456c18cc81745d7055155d1218f1624463774/llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a53f4b74ee9fd30cb3d27d904dadece67a7575198bd80e687ee76474620735f", size = 37253123, upload-time = "2025-10-01T18:04:18.177Z" }, + { url = "https://files.pythonhosted.org/packages/96/76/0f7154952f037cb320b83e1c952ec4a19d5d689cf7d27cb8a26887d7bbc1/llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b3796b1b1e1c14dcae34285d2f4ea488402fbd2c400ccf7137603ca3800864f", size = 56288211, upload-time = "2025-10-01T18:01:24.079Z" }, + { url = "https://files.pythonhosted.org/packages/00/b1/0b581942be2683ceb6862d558979e87387e14ad65a1e4db0e7dd671fa315/llvmlite-0.45.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:779e2f2ceefef0f4368548685f0b4adde34e5f4b457e90391f570a10b348d433", size = 55140958, upload-time = "2025-10-01T18:02:30.482Z" }, + { url = "https://files.pythonhosted.org/packages/33/94/9ba4ebcf4d541a325fd8098ddc073b663af75cc8b065b6059848f7d4dce7/llvmlite-0.45.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e6c9949baf25d9aa9cd7cf0f6d011b9ca660dd17f5ba2b23bdbdb77cc86b116", size = 38132231, upload-time = "2025-10-01T18:05:03.664Z" }, ] [[package]] name = "lxml" -version = "6.0.1" +version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8f/bd/f9d01fd4132d81c6f43ab01983caea69ec9614b913c290a26738431a015d/lxml-6.0.1.tar.gz", hash = "sha256:2b3a882ebf27dd026df3801a87cf49ff791336e0f94b0fad195db77e01240690", size = 4070214, upload-time = "2025-08-22T10:37:53.525Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/c8/262c1d19339ef644cdc9eb5aad2e85bd2d1fa2d7c71cdef3ede1a3eed84d/lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6acde83f7a3d6399e6d83c1892a06ac9b14ea48332a5fbd55d60b9897b9570a", size = 8422719, upload-time = "2025-08-22T10:32:24.848Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d4/1b0afbeb801468a310642c3a6f6704e53c38a4a6eb1ca6faea013333e02f/lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d21c9cacb6a889cbb8eeb46c77ef2c1dd529cde10443fdeb1de847b3193c541", size = 4575763, upload-time = "2025-08-22T10:32:27.057Z" }, - { url = "https://files.pythonhosted.org/packages/5b/c1/8db9b5402bf52ceb758618313f7423cd54aea85679fcf607013707d854a8/lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:847458b7cd0d04004895f1fb2cca8e7c0f8ec923c49c06b7a72ec2d48ea6aca2", size = 4943244, upload-time = "2025-08-22T10:32:28.847Z" }, - { url = "https://files.pythonhosted.org/packages/e7/78/838e115358dd2369c1c5186080dd874a50a691fb5cd80db6afe5e816e2c6/lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1dc13405bf315d008fe02b1472d2a9d65ee1c73c0a06de5f5a45e6e404d9a1c0", size = 5081725, upload-time = "2025-08-22T10:32:30.666Z" }, - { url = "https://files.pythonhosted.org/packages/c7/b6/bdcb3a3ddd2438c5b1a1915161f34e8c85c96dc574b0ef3be3924f36315c/lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f540c229a8c0a770dcaf6d5af56a5295e0fc314fc7ef4399d543328054bcea", size = 5021238, upload-time = "2025-08-22T10:32:32.49Z" }, - { url = "https://files.pythonhosted.org/packages/73/e5/1bfb96185dc1a64c7c6fbb7369192bda4461952daa2025207715f9968205/lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:d2f73aef768c70e8deb8c4742fca4fd729b132fda68458518851c7735b55297e", size = 5343744, upload-time = "2025-08-22T10:32:34.385Z" }, - { url = "https://files.pythonhosted.org/packages/a2/ae/df3ea9ebc3c493b9c6bdc6bd8c554ac4e147f8d7839993388aab57ec606d/lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e7f4066b85a4fa25ad31b75444bd578c3ebe6b8ed47237896341308e2ce923c3", size = 5223477, upload-time = "2025-08-22T10:32:36.256Z" }, - { url = "https://files.pythonhosted.org/packages/37/b3/65e1e33600542c08bc03a4c5c9c306c34696b0966a424a3be6ffec8038ed/lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:0cce65db0cd8c750a378639900d56f89f7d6af11cd5eda72fde054d27c54b8ce", size = 4676626, upload-time = "2025-08-22T10:32:38.793Z" }, - { url = "https://files.pythonhosted.org/packages/7a/46/ee3ed8f3a60e9457d7aea46542d419917d81dbfd5700fe64b2a36fb5ef61/lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c372d42f3eee5844b69dcab7b8d18b2f449efd54b46ac76970d6e06b8e8d9a66", size = 5066042, upload-time = "2025-08-22T10:32:41.134Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b9/8394538e7cdbeb3bfa36bc74924be1a4383e0bb5af75f32713c2c4aa0479/lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2e2b0e042e1408bbb1c5f3cfcb0f571ff4ac98d8e73f4bf37c5dd179276beedd", size = 4724714, upload-time = "2025-08-22T10:32:43.94Z" }, - { url = "https://files.pythonhosted.org/packages/b3/21/3ef7da1ea2a73976c1a5a311d7cde5d379234eec0968ee609517714940b4/lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cc73bb8640eadd66d25c5a03175de6801f63c535f0f3cf50cac2f06a8211f420", size = 5247376, upload-time = "2025-08-22T10:32:46.263Z" }, - { url = "https://files.pythonhosted.org/packages/26/7d/0980016f124f00c572cba6f4243e13a8e80650843c66271ee692cddf25f3/lxml-6.0.1-cp311-cp311-win32.whl", hash = "sha256:7c23fd8c839708d368e406282d7953cee5134f4592ef4900026d84566d2b4c88", size = 3609499, upload-time = "2025-08-22T10:32:48.156Z" }, - { url = "https://files.pythonhosted.org/packages/b1/08/28440437521f265eff4413eb2a65efac269c4c7db5fd8449b586e75d8de2/lxml-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:2516acc6947ecd3c41a4a4564242a87c6786376989307284ddb115f6a99d927f", size = 4036003, upload-time = "2025-08-22T10:32:50.662Z" }, - { url = "https://files.pythonhosted.org/packages/7b/dc/617e67296d98099213a505d781f04804e7b12923ecd15a781a4ab9181992/lxml-6.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:cb46f8cfa1b0334b074f40c0ff94ce4d9a6755d492e6c116adb5f4a57fb6ad96", size = 3679662, upload-time = "2025-08-22T10:32:52.739Z" }, - { url = "https://files.pythonhosted.org/packages/b0/a9/82b244c8198fcdf709532e39a1751943a36b3e800b420adc739d751e0299/lxml-6.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c03ac546adaabbe0b8e4a15d9ad815a281afc8d36249c246aecf1aaad7d6f200", size = 8422788, upload-time = "2025-08-22T10:32:56.612Z" }, - { url = "https://files.pythonhosted.org/packages/c9/8d/1ed2bc20281b0e7ed3e6c12b0a16e64ae2065d99be075be119ba88486e6d/lxml-6.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33b862c7e3bbeb4ba2c96f3a039f925c640eeba9087a4dc7a572ec0f19d89392", size = 4593547, upload-time = "2025-08-22T10:32:59.016Z" }, - { url = "https://files.pythonhosted.org/packages/76/53/d7fd3af95b72a3493bf7fbe842a01e339d8f41567805cecfecd5c71aa5ee/lxml-6.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a3ec1373f7d3f519de595032d4dcafae396c29407cfd5073f42d267ba32440d", size = 4948101, upload-time = "2025-08-22T10:33:00.765Z" }, - { url = "https://files.pythonhosted.org/packages/9d/51/4e57cba4d55273c400fb63aefa2f0d08d15eac021432571a7eeefee67bed/lxml-6.0.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03b12214fb1608f4cffa181ec3d046c72f7e77c345d06222144744c122ded870", size = 5108090, upload-time = "2025-08-22T10:33:03.108Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6e/5f290bc26fcc642bc32942e903e833472271614e24d64ad28aaec09d5dae/lxml-6.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:207ae0d5f0f03b30f95e649a6fa22aa73f5825667fee9c7ec6854d30e19f2ed8", size = 5021791, upload-time = "2025-08-22T10:33:06.972Z" }, - { url = "https://files.pythonhosted.org/packages/13/d4/2e7551a86992ece4f9a0f6eebd4fb7e312d30f1e372760e2109e721d4ce6/lxml-6.0.1-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:32297b09ed4b17f7b3f448de87a92fb31bb8747496623483788e9f27c98c0f00", size = 5358861, upload-time = "2025-08-22T10:33:08.967Z" }, - { url = "https://files.pythonhosted.org/packages/8a/5f/cb49d727fc388bf5fd37247209bab0da11697ddc5e976ccac4826599939e/lxml-6.0.1-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7e18224ea241b657a157c85e9cac82c2b113ec90876e01e1f127312006233756", size = 5652569, upload-time = "2025-08-22T10:33:10.815Z" }, - { url = "https://files.pythonhosted.org/packages/ca/b8/66c1ef8c87ad0f958b0a23998851e610607c74849e75e83955d5641272e6/lxml-6.0.1-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a07a994d3c46cd4020c1ea566345cf6815af205b1e948213a4f0f1d392182072", size = 5252262, upload-time = "2025-08-22T10:33:12.673Z" }, - { url = "https://files.pythonhosted.org/packages/1a/ef/131d3d6b9590e64fdbb932fbc576b81fcc686289da19c7cb796257310e82/lxml-6.0.1-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:2287fadaa12418a813b05095485c286c47ea58155930cfbd98c590d25770e225", size = 4710309, upload-time = "2025-08-22T10:33:14.952Z" }, - { url = "https://files.pythonhosted.org/packages/bc/3f/07f48ae422dce44902309aa7ed386c35310929dc592439c403ec16ef9137/lxml-6.0.1-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b4e597efca032ed99f418bd21314745522ab9fa95af33370dcee5533f7f70136", size = 5265786, upload-time = "2025-08-22T10:33:16.721Z" }, - { url = "https://files.pythonhosted.org/packages/11/c7/125315d7b14ab20d9155e8316f7d287a4956098f787c22d47560b74886c4/lxml-6.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9696d491f156226decdd95d9651c6786d43701e49f32bf23715c975539aa2b3b", size = 5062272, upload-time = "2025-08-22T10:33:18.478Z" }, - { url = "https://files.pythonhosted.org/packages/8b/c3/51143c3a5fc5168a7c3ee626418468ff20d30f5a59597e7b156c1e61fba8/lxml-6.0.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e4e3cd3585f3c6f87cdea44cda68e692cc42a012f0131d25957ba4ce755241a7", size = 4786955, upload-time = "2025-08-22T10:33:20.34Z" }, - { url = "https://files.pythonhosted.org/packages/11/86/73102370a420ec4529647b31c4a8ce8c740c77af3a5fae7a7643212d6f6e/lxml-6.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:45cbc92f9d22c28cd3b97f8d07fcefa42e569fbd587dfdac76852b16a4924277", size = 5673557, upload-time = "2025-08-22T10:33:22.282Z" }, - { url = "https://files.pythonhosted.org/packages/d7/2d/aad90afaec51029aef26ef773b8fd74a9e8706e5e2f46a57acd11a421c02/lxml-6.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:f8c9bcfd2e12299a442fba94459adf0b0d001dbc68f1594439bfa10ad1ecb74b", size = 5254211, upload-time = "2025-08-22T10:33:24.15Z" }, - { url = "https://files.pythonhosted.org/packages/63/01/c9e42c8c2d8b41f4bdefa42ab05448852e439045f112903dd901b8fbea4d/lxml-6.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1e9dc2b9f1586e7cd77753eae81f8d76220eed9b768f337dc83a3f675f2f0cf9", size = 5275817, upload-time = "2025-08-22T10:33:26.007Z" }, - { url = "https://files.pythonhosted.org/packages/bc/1f/962ea2696759abe331c3b0e838bb17e92224f39c638c2068bf0d8345e913/lxml-6.0.1-cp312-cp312-win32.whl", hash = "sha256:987ad5c3941c64031f59c226167f55a04d1272e76b241bfafc968bdb778e07fb", size = 3610889, upload-time = "2025-08-22T10:33:28.169Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/22c86a990b51b44442b75c43ecb2f77b8daba8c4ba63696921966eac7022/lxml-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:abb05a45394fd76bf4a60c1b7bec0e6d4e8dfc569fc0e0b1f634cd983a006ddc", size = 4010925, upload-time = "2025-08-22T10:33:29.874Z" }, - { url = "https://files.pythonhosted.org/packages/b2/21/dc0c73325e5eb94ef9c9d60dbb5dcdcb2e7114901ea9509735614a74e75a/lxml-6.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:c4be29bce35020d8579d60aa0a4e95effd66fcfce31c46ffddf7e5422f73a299", size = 3671922, upload-time = "2025-08-22T10:33:31.535Z" }, - { url = "https://files.pythonhosted.org/packages/41/37/41961f53f83ded57b37e65e4f47d1c6c6ef5fd02cb1d6ffe028ba0efa7d4/lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b556aaa6ef393e989dac694b9c95761e32e058d5c4c11ddeef33f790518f7a5e", size = 3903412, upload-time = "2025-08-22T10:37:40.758Z" }, - { url = "https://files.pythonhosted.org/packages/3d/47/8631ea73f3dc776fb6517ccde4d5bd5072f35f9eacbba8c657caa4037a69/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:64fac7a05ebb3737b79fd89fe5a5b6c5546aac35cfcfd9208eb6e5d13215771c", size = 4224810, upload-time = "2025-08-22T10:37:42.839Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b8/39ae30ca3b1516729faeef941ed84bf8f12321625f2644492ed8320cb254/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:038d3c08babcfce9dc89aaf498e6da205efad5b7106c3b11830a488d4eadf56b", size = 4329221, upload-time = "2025-08-22T10:37:45.223Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ea/048dea6cdfc7a72d40ae8ed7e7d23cf4a6b6a6547b51b492a3be50af0e80/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:445f2cee71c404ab4259bc21e20339a859f75383ba2d7fb97dfe7c163994287b", size = 4270228, upload-time = "2025-08-22T10:37:47.276Z" }, - { url = "https://files.pythonhosted.org/packages/6b/d4/c2b46e432377c45d611ae2f669aa47971df1586c1a5240675801d0f02bac/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e352d8578e83822d70bea88f3d08b9912528e4c338f04ab707207ab12f4b7aac", size = 4416077, upload-time = "2025-08-22T10:37:49.822Z" }, - { url = "https://files.pythonhosted.org/packages/b6/db/8f620f1ac62cf32554821b00b768dd5957ac8e3fd051593532be5b40b438/lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:51bd5d1a9796ca253db6045ab45ca882c09c071deafffc22e06975b7ace36300", size = 3518127, upload-time = "2025-08-22T10:37:51.66Z" }, + { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, + { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" }, + { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" }, + { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" }, + { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, + { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, + { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, + { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" }, + { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" }, ] [[package]] @@ -3167,19 +3202,10 @@ wheels = [ ] [[package]] -name = "mailchimp-transactional" -version = "1.0.56" +name = "madoka" +version = "0.7.1" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "python-dateutil" }, - { name = "requests" }, - { name = "six" }, - { name = "urllib3" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/bc/cb60d02c00996839bbd87444a97d0ba5ac271b1a324001562afb8f685251/mailchimp_transactional-1.0.56-py3-none-any.whl", hash = "sha256:a76ea88b90a2d47d8b5134586aabbd3a96c459f6066d8886748ab59e50de36eb", size = 31660, upload-time = "2024-02-01T18:39:19.717Z" }, -] +sdist = { url = "https://files.pythonhosted.org/packages/da/eb/95288b1c4aa541eb296a6271e3f8c7ece03b78923ac47dbe95d2287d9f5e/madoka-0.7.1.tar.gz", hash = "sha256:e258baa84fc0a3764365993b8bf5e1b065383a6ca8c9f862fb3e3e709843fae7", size = 81413, upload-time = "2019-02-10T18:38:01.382Z" } [[package]] name = "mako" @@ -3216,30 +3242,32 @@ wheels = [ [[package]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, ] [[package]] @@ -3342,16 +3370,16 @@ wheels = [ [[package]] name = "msal" -version = "1.33.0" +version = "1.34.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "pyjwt", extra = ["crypto"] }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/da/81acbe0c1fd7e9e4ec35f55dadeba9833a847b9a6ba2e2d1e4432da901dd/msal-1.33.0.tar.gz", hash = "sha256:836ad80faa3e25a7d71015c990ce61f704a87328b1e73bcbb0623a18cbf17510", size = 153801, upload-time = "2025-07-22T19:36:33.693Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/5b/fbc73e91f7727ae1e79b21ed833308e99dc11cc1cd3d4717f579775de5e9/msal-1.33.0-py3-none-any.whl", hash = "sha256:c0cd41cecf8eaed733ee7e3be9e040291eba53b0f262d3ae9c58f38b04244273", size = 116853, upload-time = "2025-07-22T19:36:32.403Z" }, + { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, ] [[package]] @@ -3366,65 +3394,49 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, ] -[[package]] -name = "msrest" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "certifi" }, - { name = "isodate" }, - { name = "requests" }, - { name = "requests-oauthlib" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/77/8397c8fb8fc257d8ea0fa66f8068e073278c65f05acb17dcb22a02bfdc42/msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9", size = 175332, upload-time = "2022-06-13T22:41:25.111Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/cf/f2966a2638144491f8696c27320d5219f48a072715075d168b31d3237720/msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32", size = 85384, upload-time = "2022-06-13T22:41:22.42Z" }, -] - [[package]] name = "multidict" -version = "6.6.4" +version = "6.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, - { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, - { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, - { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, - { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, - { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, - { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, - { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, - { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, - { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, - { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, - { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, - { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, - { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, - { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, - { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, - { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, - { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, - { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, - { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, - { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, - { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, - { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, - { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, - { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, - { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, - { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, - { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, - { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, - { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, - { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, - { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] [[package]] @@ -3455,14 +3467,14 @@ wheels = [ [[package]] name = "mypy-boto3-bedrock-runtime" -version = "1.40.21" +version = "1.40.41" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/ff/074a1e1425d04e7294c962803655e85e20e158734534ce8d302efaa8230a/mypy_boto3_bedrock_runtime-1.40.21.tar.gz", hash = "sha256:fa9401e86d42484a53803b1dba0782d023ab35c817256e707fbe4fff88aeb881", size = 28326, upload-time = "2025-08-29T19:25:09.405Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/38/79989f7bce998776ed1a01c17f3f58e7bc6f5fc2bcbdff929701526fa2f1/mypy_boto3_bedrock_runtime-1.40.41.tar.gz", hash = "sha256:ee9bda6d6d478c8d0995e84e884bdf1798e150d437974ae27c175774a58ffaa5", size = 28333, upload-time = "2025-09-29T19:26:04.804Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/02/9d3b881bee5552600c6f456e446069d5beffd2b7862b99e1e945d60d6a9b/mypy_boto3_bedrock_runtime-1.40.21-py3-none-any.whl", hash = "sha256:4c9ea181ef00cb3d15f9b051a50e3b78272122d24cd24ac34938efe6ddfecc62", size = 34149, upload-time = "2025-08-29T19:25:03.941Z" }, + { url = "https://files.pythonhosted.org/packages/3d/6c/d3431dadf473bb76aa590b1ed8cc91726a48b029b542eff9d3024f2d70b9/mypy_boto3_bedrock_runtime-1.40.41-py3-none-any.whl", hash = "sha256:d65dff200986ff06c6b3579ddcea102555f2067c8987fca379bf4f9ed8ba3121", size = 34181, upload-time = "2025-09-29T19:26:01.898Z" }, ] [[package]] @@ -3474,6 +3486,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] +[[package]] +name = "mysql-connector-python" +version = "9.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/77/2b45e6460d05b1f1b7a4c8eb79a50440b4417971973bb78c9ef6cad630a6/mysql_connector_python-9.4.0.tar.gz", hash = "sha256:d111360332ae78933daf3d48ff497b70739aa292ab0017791a33e826234e743b", size = 12185532, upload-time = "2025-07-22T08:02:05.788Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/0c/4365a802129be9fa63885533c38be019f1c6b6f5bcf8844ac53902314028/mysql_connector_python-9.4.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:7df1a8ddd182dd8adc914f6dc902a986787bf9599705c29aca7b2ce84e79d361", size = 17501627, upload-time = "2025-07-22T07:57:45.416Z" }, + { url = "https://files.pythonhosted.org/packages/c0/bf/ca596c00d7a6eaaf8ef2f66c9b23cd312527f483073c43ffac7843049cb4/mysql_connector_python-9.4.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3892f20472e13e63b1fb4983f454771dd29f211b09724e69a9750e299542f2f8", size = 18369494, upload-time = "2025-07-22T07:57:49.714Z" }, + { url = "https://files.pythonhosted.org/packages/25/14/6510a11ed9f80d77f743dc207773092c4ab78d5efa454b39b48480315d85/mysql_connector_python-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d3e87142103d71c4df647ece30f98e85e826652272ed1c74822b56f6acdc38e7", size = 33516187, upload-time = "2025-07-22T07:57:55.294Z" }, + { url = "https://files.pythonhosted.org/packages/16/a8/4f99d80f1cf77733ce9a44b6adb7f0dd7079e7afa51ca4826515ef0c3e16/mysql_connector_python-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b27fcd403436fe83bafb2fe7fcb785891e821e639275c4ad3b3bd1e25f533206", size = 33917818, upload-time = "2025-07-22T07:58:00.523Z" }, + { url = "https://files.pythonhosted.org/packages/15/9c/127f974ca9d5ee25373cb5433da06bb1f36e05f2a6b7436da1fe9c6346b0/mysql_connector_python-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd6ff5afb9c324b0bbeae958c93156cce4168c743bf130faf224d52818d1f0ee", size = 16392378, upload-time = "2025-07-22T07:58:04.669Z" }, + { url = "https://files.pythonhosted.org/packages/03/7c/a543fb17c2dfa6be8548dfdc5879a0c7924cd5d1c79056c48472bb8fe858/mysql_connector_python-9.4.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:4efa3898a24aba6a4bfdbf7c1f5023c78acca3150d72cc91199cca2ccd22f76f", size = 17503693, upload-time = "2025-07-22T07:58:08.96Z" }, + { url = "https://files.pythonhosted.org/packages/cb/6e/c22fbee05f5cfd6ba76155b6d45f6261d8d4c1e36e23de04e7f25fbd01a4/mysql_connector_python-9.4.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:665c13e7402235162e5b7a2bfdee5895192121b64ea455c90a81edac6a48ede5", size = 18371987, upload-time = "2025-07-22T07:58:13.273Z" }, + { url = "https://files.pythonhosted.org/packages/b4/fd/f426f5f35a3d3180c7f84d1f96b4631be2574df94ca1156adab8618b236c/mysql_connector_python-9.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:815aa6cad0f351c1223ef345781a538f2e5e44ef405fdb3851eb322bd9c4ca2b", size = 33516214, upload-time = "2025-07-22T07:58:18.967Z" }, + { url = "https://files.pythonhosted.org/packages/45/5a/1b053ae80b43cd3ccebc4bb99a98826969b3b0f8adebdcc2530750ad76ed/mysql_connector_python-9.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b3436a2c8c0ec7052932213e8d01882e6eb069dbab33402e685409084b133a1c", size = 33918565, upload-time = "2025-07-22T07:58:25.28Z" }, + { url = "https://files.pythonhosted.org/packages/cb/69/36b989de675d98ba8ff7d45c96c30c699865c657046f2e32db14e78f13d9/mysql_connector_python-9.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:57b0c224676946b70548c56798d5023f65afa1ba5b8ac9f04a143d27976c7029", size = 16392563, upload-time = "2025-07-22T07:58:29.623Z" }, + { url = "https://files.pythonhosted.org/packages/36/34/b6165e15fd45a8deb00932d8e7d823de7650270873b4044c4db6688e1d8f/mysql_connector_python-9.4.0-py2.py3-none-any.whl", hash = "sha256:56e679169c704dab279b176fab2a9ee32d2c632a866c0f7cd48a8a1e2cf802c4", size = 406574, upload-time = "2025-07-22T07:59:08.394Z" }, +] + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -3494,7 +3525,7 @@ wheels = [ [[package]] name = "nltk" -version = "3.9.1" +version = "3.9.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -3502,74 +3533,74 @@ dependencies = [ { name = "regex" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691, upload-time = "2024-08-18T19:48:37.769Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629, upload-time = "2025-10-01T07:19:23.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442, upload-time = "2024-08-18T19:48:21.909Z" }, + { url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404, upload-time = "2025-10-01T07:19:21.648Z" }, ] [[package]] name = "nodejs-wheel-binaries" -version = "22.19.0" +version = "22.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/ca/6033f80b7aebc23cb31ed8b09608b6308c5273c3522aedd043e8a0644d83/nodejs_wheel_binaries-22.19.0.tar.gz", hash = "sha256:e69b97ef443d36a72602f7ed356c6a36323873230f894799f4270a853932fdb3", size = 8060, upload-time = "2025-09-12T10:33:46.935Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/54/02f58c8119e2f1984e2572cc77a7b469dbaf4f8d171ad376e305749ef48e/nodejs_wheel_binaries-22.20.0.tar.gz", hash = "sha256:a62d47c9fd9c32191dff65bbe60261504f26992a0a19fe8b4d523256a84bd351", size = 8058, upload-time = "2025-09-26T09:48:00.906Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/a2/0d055fd1d8c9a7a971c4db10cf42f3bba57c964beb6cf383ca053f2cdd20/nodejs_wheel_binaries-22.19.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:43eca1526455a1fb4cb777095198f7ebe5111a4444749c87f5c2b84645aaa72a", size = 50902454, upload-time = "2025-09-12T10:33:18.3Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f5/446f7b3c5be1d2f5145ffa3c9aac3496e06cdf0f436adeb21a1f95dd79a7/nodejs_wheel_binaries-22.19.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:feb06709e1320790d34babdf71d841ec7f28e4c73217d733e7f5023060a86bfc", size = 51837860, upload-time = "2025-09-12T10:33:21.599Z" }, - { url = "https://files.pythonhosted.org/packages/1e/4e/d0a036f04fd0f5dc3ae505430657044b8d9853c33be6b2d122bb171aaca3/nodejs_wheel_binaries-22.19.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9f5777292491430457c99228d3a267decf12a09d31246f0692391e3513285e", size = 57841528, upload-time = "2025-09-12T10:33:25.433Z" }, - { url = "https://files.pythonhosted.org/packages/e2/11/4811d27819f229cc129925c170db20c12d4f01ad366a0066f06d6eb833cf/nodejs_wheel_binaries-22.19.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1392896f1a05a88a8a89b26e182d90fdf3020b4598a047807b91b65731e24c00", size = 58368815, upload-time = "2025-09-12T10:33:29.083Z" }, - { url = "https://files.pythonhosted.org/packages/6e/94/df41416856b980e38a7ff280cfb59f142a77955ccdbec7cc4260d8ab2e78/nodejs_wheel_binaries-22.19.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9164c876644f949cad665e3ada00f75023e18f381e78a1d7b60ccbbfb4086e73", size = 59690937, upload-time = "2025-09-12T10:33:32.771Z" }, - { url = "https://files.pythonhosted.org/packages/d1/39/8d0d5f84b7616bdc4eca725f5d64a1cfcac3d90cf3f30cae17d12f8e987f/nodejs_wheel_binaries-22.19.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6b4b75166134010bc9cfebd30dc57047796a27049fef3fc22316216d76bc0af7", size = 60751996, upload-time = "2025-09-12T10:33:36.962Z" }, - { url = "https://files.pythonhosted.org/packages/41/93/2d66b5b60055dd1de6e37e35bef563c15e4cafa5cfe3a6990e0ab358e515/nodejs_wheel_binaries-22.19.0-py2.py3-none-win_amd64.whl", hash = "sha256:3f271f5abfc71b052a6b074225eca8c1223a0f7216863439b86feaca814f6e5a", size = 40026140, upload-time = "2025-09-12T10:33:40.33Z" }, - { url = "https://files.pythonhosted.org/packages/a3/46/c9cf7ff7e3c71f07ca8331c939afd09b6e59fc85a2944ea9411e8b29ce50/nodejs_wheel_binaries-22.19.0-py2.py3-none-win_arm64.whl", hash = "sha256:666a355fe0c9bde44a9221cd543599b029045643c8196b8eedb44f28dc192e06", size = 38804500, upload-time = "2025-09-12T10:33:43.302Z" }, + { url = "https://files.pythonhosted.org/packages/24/6d/333e5458422f12318e3c3e6e7f194353aa68b0d633217c7e89833427ca01/nodejs_wheel_binaries-22.20.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:455add5ac4f01c9c830ab6771dbfad0fdf373f9b040d3aabe8cca9b6c56654fb", size = 53246314, upload-time = "2025-09-26T09:47:32.536Z" }, + { url = "https://files.pythonhosted.org/packages/56/30/dcd6879d286a35b3c4c8f9e5e0e1bcf4f9e25fe35310fc77ecf97f915a23/nodejs_wheel_binaries-22.20.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:5d8c12f97eea7028b34a84446eb5ca81829d0c428dfb4e647e09ac617f4e21fa", size = 53644391, upload-time = "2025-09-26T09:47:36.093Z" }, + { url = "https://files.pythonhosted.org/packages/58/be/c7b2e7aa3bb281d380a1c531f84d0ccfe225832dfc3bed1ca171753b9630/nodejs_wheel_binaries-22.20.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a2b0989194148f66e9295d8f11bc463bde02cbe276517f4d20a310fb84780ae", size = 60282516, upload-time = "2025-09-26T09:47:39.88Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c5/8befacf4190e03babbae54cb0809fb1a76e1600ec3967ab8ee9f8fc85b65/nodejs_wheel_binaries-22.20.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5c500aa4dc046333ecb0a80f183e069e5c30ce637f1c1a37166b2c0b642dc21", size = 60347290, upload-time = "2025-09-26T09:47:43.712Z" }, + { url = "https://files.pythonhosted.org/packages/c0/bd/cfffd1e334277afa0714962c6ec432b5fe339340a6bca2e5fa8e678e7590/nodejs_wheel_binaries-22.20.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3279eb1b99521f0d20a850bbfc0159a658e0e85b843b3cf31b090d7da9f10dfc", size = 62178798, upload-time = "2025-09-26T09:47:47.752Z" }, + { url = "https://files.pythonhosted.org/packages/08/14/10b83a9c02faac985b3e9f5e65d63a34fc0f46b48d8a2c3e4caa3e1e7318/nodejs_wheel_binaries-22.20.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d29705797b33bade62d79d8f106c2453c8a26442a9b2a5576610c0f7e7c351ed", size = 62772957, upload-time = "2025-09-26T09:47:51.266Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a9/c6a480259aa0d6b270aac2c6ba73a97444b9267adde983a5b7e34f17e45a/nodejs_wheel_binaries-22.20.0-py2.py3-none-win_amd64.whl", hash = "sha256:4bd658962f24958503541963e5a6f2cc512a8cb301e48a69dc03c879f40a28ae", size = 40120431, upload-time = "2025-09-26T09:47:54.363Z" }, + { url = "https://files.pythonhosted.org/packages/42/b1/6a4eb2c6e9efa028074b0001b61008c9d202b6b46caee9e5d1b18c088216/nodejs_wheel_binaries-22.20.0-py2.py3-none-win_arm64.whl", hash = "sha256:1fccac931faa210d22b6962bcdbc99269d16221d831b9a118bbb80fe434a60b8", size = 38844133, upload-time = "2025-09-26T09:47:57.357Z" }, ] [[package]] name = "numba" -version = "0.61.2" +version = "0.62.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "llvmlite" }, { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/20/33dbdbfe60e5fd8e3dbfde299d106279a33d9f8308346022316781368591/numba-0.62.1.tar.gz", hash = "sha256:7b774242aa890e34c21200a1fc62e5b5757d5286267e71103257f4e2af0d5161", size = 2749817, upload-time = "2025-09-29T10:46:31.551Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/97/c99d1056aed767503c228f7099dc11c402906b42a4757fec2819329abb98/numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2", size = 2775825, upload-time = "2025-04-09T02:57:43.442Z" }, - { url = "https://files.pythonhosted.org/packages/95/9e/63c549f37136e892f006260c3e2613d09d5120672378191f2dc387ba65a2/numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b", size = 2778695, upload-time = "2025-04-09T02:57:44.968Z" }, - { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227, upload-time = "2025-04-09T02:57:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422, upload-time = "2025-04-09T02:57:48.222Z" }, - { url = "https://files.pythonhosted.org/packages/0f/a4/2b309a6a9f6d4d8cfba583401c7c2f9ff887adb5d54d8e2e130274c0973f/numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1", size = 2831505, upload-time = "2025-04-09T02:57:50.108Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a0/c6b7b9c615cfa3b98c4c63f4316e3f6b3bbe2387740277006551784218cd/numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2", size = 2776626, upload-time = "2025-04-09T02:57:51.857Z" }, - { url = "https://files.pythonhosted.org/packages/92/4a/fe4e3c2ecad72d88f5f8cd04e7f7cff49e718398a2fac02d2947480a00ca/numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8", size = 2779287, upload-time = "2025-04-09T02:57:53.658Z" }, - { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928, upload-time = "2025-04-09T02:57:55.206Z" }, - { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115, upload-time = "2025-04-09T02:57:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/68/1d/ddb3e704c5a8fb90142bf9dc195c27db02a08a99f037395503bfbc1d14b3/numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18", size = 2831929, upload-time = "2025-04-09T02:57:58.45Z" }, + { url = "https://files.pythonhosted.org/packages/dd/5f/8b3491dd849474f55e33c16ef55678ace1455c490555337899c35826836c/numba-0.62.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f43e24b057714e480fe44bc6031de499e7cf8150c63eb461192caa6cc8530bc8", size = 2684279, upload-time = "2025-09-29T10:43:37.213Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/71969149bfeb65a629e652b752b80167fe8a6a6f6e084f1f2060801f7f31/numba-0.62.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:57cbddc53b9ee02830b828a8428757f5c218831ccc96490a314ef569d8342b7b", size = 2687330, upload-time = "2025-09-29T10:43:59.601Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7d/403be3fecae33088027bc8a95dc80a2fda1e3beff3e0e5fc4374ada3afbe/numba-0.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:604059730c637c7885386521bb1b0ddcbc91fd56131a6dcc54163d6f1804c872", size = 3739727, upload-time = "2025-09-29T10:42:45.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c3/3d910d08b659a6d4c62ab3cd8cd93c4d8b7709f55afa0d79a87413027ff6/numba-0.62.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6c540880170bee817011757dc9049dba5a29db0c09b4d2349295991fe3ee55f", size = 3445490, upload-time = "2025-09-29T10:43:12.692Z" }, + { url = "https://files.pythonhosted.org/packages/5b/82/9d425c2f20d9f0a37f7cb955945a553a00fa06a2b025856c3550227c5543/numba-0.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:03de6d691d6b6e2b76660ba0f38f37b81ece8b2cc524a62f2a0cfae2bfb6f9da", size = 2745550, upload-time = "2025-09-29T10:44:20.571Z" }, + { url = "https://files.pythonhosted.org/packages/5e/fa/30fa6873e9f821c0ae755915a3ca444e6ff8d6a7b6860b669a3d33377ac7/numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:1b743b32f8fa5fff22e19c2e906db2f0a340782caf024477b97801b918cf0494", size = 2685346, upload-time = "2025-09-29T10:43:43.677Z" }, + { url = "https://files.pythonhosted.org/packages/a9/d5/504ce8dc46e0dba2790c77e6b878ee65b60fe3e7d6d0006483ef6fde5a97/numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90fa21b0142bcf08ad8e32a97d25d0b84b1e921bc9423f8dda07d3652860eef6", size = 2688139, upload-time = "2025-09-29T10:44:04.894Z" }, + { url = "https://files.pythonhosted.org/packages/50/5f/6a802741176c93f2ebe97ad90751894c7b0c922b52ba99a4395e79492205/numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ef84d0ac19f1bf80431347b6f4ce3c39b7ec13f48f233a48c01e2ec06ecbc59", size = 3796453, upload-time = "2025-09-29T10:42:52.771Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/efd21527d25150c4544eccc9d0b7260a5dec4b7e98b5a581990e05a133c0/numba-0.62.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9315cc5e441300e0ca07c828a627d92a6802bcbf27c5487f31ae73783c58da53", size = 3496451, upload-time = "2025-09-29T10:43:19.279Z" }, + { url = "https://files.pythonhosted.org/packages/80/44/79bfdab12a02796bf4f1841630355c82b5a69933b1d50eb15c7fa37dabe8/numba-0.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:44e3aa6228039992f058f5ebfcfd372c83798e9464297bdad8cc79febcf7891e", size = 2745552, upload-time = "2025-09-29T10:44:26.399Z" }, ] [[package]] name = "numexpr" -version = "2.12.1" +version = "2.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/08/211c9ae8a230f20976f3b0b9a3308264c62bd05caf92aba7c59beebf6049/numexpr-2.12.1.tar.gz", hash = "sha256:e239faed0af001d1f1ea02934f7b3bb2bb6711ddb98e7a7bef61be5f45ff54ab", size = 115053, upload-time = "2025-09-11T11:04:04.36Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/2f/fdba158c9dbe5caca9c3eca3eaffffb251f2fb8674bf8e2d0aed5f38d319/numexpr-2.14.1.tar.gz", hash = "sha256:4be00b1086c7b7a5c32e31558122b7b80243fe098579b170967da83f3152b48b", size = 119400, upload-time = "2025-10-13T16:17:27.351Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/a1/e10d3812e352eeedacea964ae7078181f5da659f77f65f4ff75aca67372c/numexpr-2.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ac38131930d6a1c4760f384621b9bd6fd8ab557147e81b7bcce777d557ee81", size = 154204, upload-time = "2025-09-11T11:02:20.607Z" }, - { url = "https://files.pythonhosted.org/packages/a2/fc/8e30453e82ffa2a25ccc263a69cb90bad4c195ce91d2c53c6d8699564b95/numexpr-2.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea09d6e669de2f7a92228d38d58ca0e59eeb83100a9b93b6467547ffdf93ceeb", size = 144226, upload-time = "2025-09-11T11:02:21.957Z" }, - { url = "https://files.pythonhosted.org/packages/3d/3a/4ea9dca5d82e8654ad54f788af6215d72ad9afc650f8f21098923391b8a8/numexpr-2.12.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:05ec71d3feae4a96c177d696de608d6003de96a0ed6c725e229d29c6ea495a2e", size = 422124, upload-time = "2025-09-11T11:02:23.017Z" }, - { url = "https://files.pythonhosted.org/packages/4e/42/26432c6d691c2534edcdd66d8c8aefeac90a71b6c767ab569609d2683869/numexpr-2.12.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09375dbc588c1042e99963289bcf2092d427a27e680ad267fe7e83fd1913d57f", size = 411888, upload-time = "2025-09-11T11:02:24.525Z" }, - { url = "https://files.pythonhosted.org/packages/49/20/c00814929daad00193e3d07f176066f17d83c064dec26699bd02e64cefbd/numexpr-2.12.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c6a16946a7a9c6fe6e68da87b822eaa9c2edb0e0d36885218c1b8122772f8068", size = 1387205, upload-time = "2025-09-11T11:02:25.701Z" }, - { url = "https://files.pythonhosted.org/packages/a8/1f/61c7d82321face677fb8fdd486c1a8fe64bcbcf184f65cc76c8ff2ee0c19/numexpr-2.12.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aa47f6d3798e9f9677acdea40ff6dd72fd0f2993b87fc1a85e120acbac99323b", size = 1434537, upload-time = "2025-09-11T11:02:26.937Z" }, - { url = "https://files.pythonhosted.org/packages/09/0e/7996ad143e2a5b4f295da718dba70c2108e6070bcff494c4a55f0b19c315/numexpr-2.12.1-cp311-cp311-win32.whl", hash = "sha256:d77311ce7910c14ebf45dec6ac98a597493b63e146a86bfd94128bdcdd7d2a3f", size = 156808, upload-time = "2025-09-11T11:02:28.126Z" }, - { url = "https://files.pythonhosted.org/packages/ce/7b/6ea78f0f5a39057cc10057bcd0d9e814ff60dc3698cbcd36b178c7533931/numexpr-2.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:4c3d6e524c4a386bc77cd3472b370c1bbe50e23c0a6d66960a006ad90db61d4d", size = 151235, upload-time = "2025-09-11T11:02:29.098Z" }, - { url = "https://files.pythonhosted.org/packages/7b/17/817f21537fc7827b55691990e44f1260e295be7e68bb37d4bc8741439723/numexpr-2.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cba7e922b813fd46415fbeac618dd78169a6acb6bd10e6055c1cd8a8f8bebd6e", size = 153915, upload-time = "2025-09-11T11:02:30.15Z" }, - { url = "https://files.pythonhosted.org/packages/0a/11/65d9d918339e6b9116f8cda9210249a3127843aef9f147d50cd2dad10d60/numexpr-2.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33e5f20bc5a64c163beeed6c57e75497247c779531266e255f93c76c57248a49", size = 144358, upload-time = "2025-09-11T11:02:31.173Z" }, - { url = "https://files.pythonhosted.org/packages/64/1d/8d349126ea9c00002b574aa5310a5eb669d3cf4e82e45ff643aa01ac48fe/numexpr-2.12.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:59958402930d13fafbf8c9fdff5b0866f0ea04083f877743b235447725aaea97", size = 423752, upload-time = "2025-09-11T11:02:32.208Z" }, - { url = "https://files.pythonhosted.org/packages/ba/4a/a16aba2aa141c6634bf619bf8d069942c3f875b71ae0650172bcff0200ec/numexpr-2.12.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12bb47518bfbc740afe4119fe141d20e715ab29e910250c96954d2794c0e6aa4", size = 413612, upload-time = "2025-09-11T11:02:33.656Z" }, - { url = "https://files.pythonhosted.org/packages/d0/61/91b85d42541a6517cc1a9f9dabc730acc56b724f4abdc5c84513558a0c79/numexpr-2.12.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e579d9a4a183f09affe102577e757e769150c0145c3ee46fbd00345d531d42b", size = 1388903, upload-time = "2025-09-11T11:02:35.229Z" }, - { url = "https://files.pythonhosted.org/packages/8d/58/2913b7938bd656e412fd41213dcd56cb72978a72d3b03636ab021eadc4ee/numexpr-2.12.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:69ba864878665f4289ef675997276439a854012044b442ce9048a03e39b8191e", size = 1436092, upload-time = "2025-09-11T11:02:36.363Z" }, - { url = "https://files.pythonhosted.org/packages/fc/31/c1863597c26d92554af29a3fff5b05d4c1885cf5450a690724c7cee04af9/numexpr-2.12.1-cp312-cp312-win32.whl", hash = "sha256:713410f76c0bbe08947c3d49477db05944ce0094449845591859e250866ba074", size = 156948, upload-time = "2025-09-11T11:02:37.518Z" }, - { url = "https://files.pythonhosted.org/packages/f5/ca/c9bc0f460d352ab5934d659a4cb5bc9529e20e78ac60f906d7e41cbfbd42/numexpr-2.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:c32f934066608a32501e06d99b93e6f2dded33606905f9af40e1f4649973ae6e", size = 151370, upload-time = "2025-09-11T11:02:38.445Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a3/67999bdd1ed1f938d38f3fedd4969632f2f197b090e50505f7cc1fa82510/numexpr-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d03fcb4644a12f70a14d74006f72662824da5b6128bf1bcd10cc3ed80e64c34", size = 163195, upload-time = "2025-10-13T16:16:31.212Z" }, + { url = "https://files.pythonhosted.org/packages/25/95/d64f680ea1fc56d165457287e0851d6708800f9fcea346fc1b9957942ee6/numexpr-2.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2773ee1133f77009a1fc2f34fe236f3d9823779f5f75450e183137d49f00499f", size = 152088, upload-time = "2025-10-13T16:16:33.186Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7f/3bae417cb13ae08afd86d08bb0301c32440fe0cae4e6262b530e0819aeda/numexpr-2.14.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebe4980f9494b9f94d10d2e526edc29e72516698d3bf95670ba79415492212a4", size = 451126, upload-time = "2025-10-13T16:13:22.248Z" }, + { url = "https://files.pythonhosted.org/packages/4c/1a/edbe839109518364ac0bd9e918cf874c755bb2c128040e920f198c494263/numexpr-2.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a381e5e919a745c9503bcefffc1c7f98c972c04ec58fc8e999ed1a929e01ba6", size = 442012, upload-time = "2025-10-13T16:14:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/b1/be4ce99bff769a5003baddac103f34681997b31d4640d5a75c0e8ed59c78/numexpr-2.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d08856cfc1b440eb1caaa60515235369654321995dd68eb9377577392020f6cb", size = 1415975, upload-time = "2025-10-13T16:13:26.088Z" }, + { url = "https://files.pythonhosted.org/packages/e7/33/b33b8fdc032a05d9ebb44a51bfcd4b92c178a2572cd3e6c1b03d8a4b45b2/numexpr-2.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03130afa04edf83a7b590d207444f05a00363c9b9ea5d81c0f53b1ea13fad55a", size = 1464683, upload-time = "2025-10-13T16:14:58.87Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b2/ddcf0ac6cf0a1d605e5aecd4281507fd79a9628a67896795ab2e975de5df/numexpr-2.14.1-cp311-cp311-win32.whl", hash = "sha256:db78fa0c9fcbaded3ae7453faf060bd7a18b0dc10299d7fcd02d9362be1213ed", size = 166838, upload-time = "2025-10-13T16:17:06.765Z" }, + { url = "https://files.pythonhosted.org/packages/64/72/4ca9bd97b2eb6dce9f5e70a3b6acec1a93e1fb9b079cb4cba2cdfbbf295d/numexpr-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e9b2f957798c67a2428be96b04bce85439bed05efe78eb78e4c2ca43737578e7", size = 160069, upload-time = "2025-10-13T16:17:08.752Z" }, + { url = "https://files.pythonhosted.org/packages/9d/20/c473fc04a371f5e2f8c5749e04505c13e7a8ede27c09e9f099b2ad6f43d6/numexpr-2.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ebae0ab18c799b0e6b8c5a8d11e1fa3848eb4011271d99848b297468a39430", size = 162790, upload-time = "2025-10-13T16:16:34.903Z" }, + { url = "https://files.pythonhosted.org/packages/45/93/b6760dd1904c2a498e5f43d1bb436f59383c3ddea3815f1461dfaa259373/numexpr-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47041f2f7b9e69498fb311af672ba914a60e6e6d804011caacb17d66f639e659", size = 152196, upload-time = "2025-10-13T16:16:36.593Z" }, + { url = "https://files.pythonhosted.org/packages/72/94/cc921e35593b820521e464cbbeaf8212bbdb07f16dc79fe283168df38195/numexpr-2.14.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d686dfb2c1382d9e6e0ee0b7647f943c1886dba3adbf606c625479f35f1956c1", size = 452468, upload-time = "2025-10-13T16:13:29.531Z" }, + { url = "https://files.pythonhosted.org/packages/d9/43/560e9ba23c02c904b5934496486d061bcb14cd3ebba2e3cf0e2dccb6c22b/numexpr-2.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee6d4fbbbc368e6cdd0772734d6249128d957b3b8ad47a100789009f4de7083", size = 443631, upload-time = "2025-10-13T16:15:02.473Z" }, + { url = "https://files.pythonhosted.org/packages/7b/6c/78f83b6219f61c2c22d71ab6e6c2d4e5d7381334c6c29b77204e59edb039/numexpr-2.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a2839efa25f3c8d4133252ea7342d8f81226c7c4dda81f97a57e090b9d87a48", size = 1417670, upload-time = "2025-10-13T16:13:33.464Z" }, + { url = "https://files.pythonhosted.org/packages/0e/bb/1ccc9dcaf46281568ce769888bf16294c40e98a5158e4b16c241de31d0d3/numexpr-2.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9f9137f1351b310436662b5dc6f4082a245efa8950c3b0d9008028df92fefb9b", size = 1466212, upload-time = "2025-10-13T16:15:12.828Z" }, + { url = "https://files.pythonhosted.org/packages/31/9f/203d82b9e39dadd91d64bca55b3c8ca432e981b822468dcef41a4418626b/numexpr-2.14.1-cp312-cp312-win32.whl", hash = "sha256:36f8d5c1bd1355df93b43d766790f9046cccfc1e32b7c6163f75bcde682cda07", size = 166996, upload-time = "2025-10-13T16:17:10.369Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/ffe750b5452eb66de788c34e7d21ec6d886abb4d7c43ad1dc88ceb3d998f/numexpr-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:fdd886f4b7dbaf167633ee396478f0d0aa58ea2f9e7ccc3c6431019623e8d68f", size = 160187, upload-time = "2025-10-13T16:17:11.974Z" }, ] [[package]] @@ -3637,7 +3668,7 @@ wheels = [ [[package]] name = "onnxruntime" -version = "1.22.1" +version = "1.23.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coloredlogs" }, @@ -3648,19 +3679,21 @@ dependencies = [ { name = "sympy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/82/ff/4a1a6747e039ef29a8d4ee4510060e9a805982b6da906a3da2306b7a3be6/onnxruntime-1.22.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:f4581bccb786da68725d8eac7c63a8f31a89116b8761ff8b4989dc58b61d49a0", size = 34324148, upload-time = "2025-07-10T19:15:26.584Z" }, - { url = "https://files.pythonhosted.org/packages/0b/05/9f1929723f1cca8c9fb1b2b97ac54ce61362c7201434d38053ea36ee4225/onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ae7526cf10f93454beb0f751e78e5cb7619e3b92f9fc3bd51aa6f3b7a8977e5", size = 14473779, upload-time = "2025-07-10T19:15:30.183Z" }, - { url = "https://files.pythonhosted.org/packages/59/f3/c93eb4167d4f36ea947930f82850231f7ce0900cb00e1a53dc4995b60479/onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f6effa1299ac549a05c784d50292e3378dbbf010346ded67400193b09ddc2f04", size = 16460799, upload-time = "2025-07-10T19:15:33.005Z" }, - { url = "https://files.pythonhosted.org/packages/a8/01/e536397b03e4462d3260aee5387e6f606c8fa9d2b20b1728f988c3c72891/onnxruntime-1.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:f28a42bb322b4ca6d255531bb334a2b3e21f172e37c1741bd5e66bc4b7b61f03", size = 12689881, upload-time = "2025-07-10T19:15:35.501Z" }, - { url = "https://files.pythonhosted.org/packages/48/70/ca2a4d38a5deccd98caa145581becb20c53684f451e89eb3a39915620066/onnxruntime-1.22.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:a938d11c0dc811badf78e435daa3899d9af38abee950d87f3ab7430eb5b3cf5a", size = 34342883, upload-time = "2025-07-10T19:15:38.223Z" }, - { url = "https://files.pythonhosted.org/packages/29/e5/00b099b4d4f6223b610421080d0eed9327ef9986785c9141819bbba0d396/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:984cea2a02fcc5dfea44ade9aca9fe0f7a8a2cd6f77c258fc4388238618f3928", size = 14473861, upload-time = "2025-07-10T19:15:42.911Z" }, - { url = "https://files.pythonhosted.org/packages/0a/50/519828a5292a6ccd8d5cd6d2f72c6b36ea528a2ef68eca69647732539ffa/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d39a530aff1ec8d02e365f35e503193991417788641b184f5b1e8c9a6d5ce8d", size = 16475713, upload-time = "2025-07-10T19:15:45.452Z" }, - { url = "https://files.pythonhosted.org/packages/5d/54/7139d463bb0a312890c9a5db87d7815d4a8cce9e6f5f28d04f0b55fcb160/onnxruntime-1.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:6a64291d57ea966a245f749eb970f4fa05a64d26672e05a83fdb5db6b7d62f87", size = 12690910, upload-time = "2025-07-10T19:15:47.478Z" }, + { url = "https://files.pythonhosted.org/packages/8a/61/ee52bb2c9402cd1a0d550fc65b826c174f8eed49677dd3833ac1bfc0e35a/onnxruntime-1.23.1-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:9ba6e52fb7bc2758a61d1e421d060cf71d5e4259f95ea8a6f72320ae4415f229", size = 17194265, upload-time = "2025-10-08T04:25:24.479Z" }, + { url = "https://files.pythonhosted.org/packages/d3/67/67122b7b4138815090e0d304c8893fefb77370066a847d08e185f04f75fe/onnxruntime-1.23.1-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:7f130f4b0d31ba17c8789053a641958d0d341d96a1bff578d613fb52ded218c2", size = 19150493, upload-time = "2025-10-08T04:24:21.839Z" }, + { url = "https://files.pythonhosted.org/packages/73/e6/66cebc4dcdb217ccb1027cfcbcc01d6399e999c294d986806991c144cbe7/onnxruntime-1.23.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b89fd116f20b70e1140a77286954a7715eb9347260ff2008ee7ec94994df039", size = 15216531, upload-time = "2025-10-08T04:24:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/38/47/083847220c4a429e272ce9407bc8c47fa77b62e0c787ef2cc94fe9776c1b/onnxruntime-1.23.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61139a29d536b71db6045c75462e593a53feecc19756dc222531971cd08e5efe", size = 17368047, upload-time = "2025-10-08T04:24:48.426Z" }, + { url = "https://files.pythonhosted.org/packages/ac/8e/b3d861a7d199fd9c6a0b4af9b5d813bcc853d2e4dd4dac2c70b6c23097ed/onnxruntime-1.23.1-cp311-cp311-win_amd64.whl", hash = "sha256:7973186e8eb66e32ea20cb238ae92b604091e4d1df632653ec830abf7584d0b3", size = 13466816, upload-time = "2025-10-08T04:25:15.037Z" }, + { url = "https://files.pythonhosted.org/packages/00/3c/4b4f56b5df4596d1d95aafe13cbc987d050a89364ff5b2f90308376901fb/onnxruntime-1.23.1-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:564d6add1688efdb0720cf2158b50314fc35b744ad2623155ee3b805c381d9ce", size = 17194708, upload-time = "2025-10-08T04:25:27.188Z" }, + { url = "https://files.pythonhosted.org/packages/b4/97/05529b97142c1a09bde2caefea4fd29f71329b9275b52bacdbc2c4f9e964/onnxruntime-1.23.1-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:3864c39307714eff1753149215ad86324a9372e3172a0275d5b16ffd296574bf", size = 19152841, upload-time = "2025-10-08T04:24:24.157Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b9/1232fd295fa9c818aa2a7883d87a2f864fb5edee56ec757c6e857fdd1863/onnxruntime-1.23.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e6b6b5ea80a96924f67fe1e5519f6c6f9cd716fdb5a4fd1ecb4f2b0971e8d00", size = 15223749, upload-time = "2025-10-08T04:24:08.088Z" }, + { url = "https://files.pythonhosted.org/packages/c4/b0/4663a333a82c77f159e48fe8639b1f03e4a05036625be9129c20c4d71d12/onnxruntime-1.23.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:576502dad714ffe5f3b4e1918c5b3368766b222063c585e5fd88415c063e4c80", size = 17378483, upload-time = "2025-10-08T04:24:50.712Z" }, + { url = "https://files.pythonhosted.org/packages/7c/60/8100d98690cbf1de03e08d1f3eff33ff00c652806c7130658a48a8f60584/onnxruntime-1.23.1-cp312-cp312-win_amd64.whl", hash = "sha256:1b89b7c4d4c00a67debc2b0a1484d7f51b23fef85fbd80ac83ed2d17b2161bd6", size = 13467773, upload-time = "2025-10-08T04:25:17.097Z" }, ] [[package]] name = "openai" -version = "1.61.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -3672,9 +3705,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/cf/61e71ce64cf0a38f029da0f9a5f10c9fa0e69a7a977b537126dac50adfea/openai-1.61.1.tar.gz", hash = "sha256:ce1851507218209961f89f3520e06726c0aa7d0512386f0f977e3ac3e4f2472e", size = 350784, upload-time = "2025-02-05T14:34:15.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/90/8f26554d24d63ed4f94d33c24271559863223a67e624f4d2e65ba8e48dca/openai-2.3.0.tar.gz", hash = "sha256:8d213ee5aaf91737faea2d7fc1cd608657a5367a18966372a3756ceaabfbd812", size = 589616, upload-time = "2025-10-10T01:12:50.851Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/b6/2e2a011b2dc27a6711376808b4cd8c922c476ea0f1420b39892117fa8563/openai-1.61.1-py3-none-any.whl", hash = "sha256:72b0826240ce26026ac2cd17951691f046e5be82ad122d20a8e1b30ca18bd11e", size = 463126, upload-time = "2025-02-05T14:34:13.643Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5b/4be258ff072ed8ee15f6bfd8d5a1a4618aa4704b127c0c5959212ad177d6/openai-2.3.0-py3-none-any.whl", hash = "sha256:a7aa83be6f7b0ab2e4d4d7bcaf36e3d790874c0167380c5d0afd0ed99a86bd7b", size = 999768, upload-time = "2025-10-10T01:12:48.647Z" }, ] [[package]] @@ -3695,7 +3728,7 @@ wheels = [ [[package]] name = "openinference-instrumentation" -version = "0.1.38" +version = "0.1.40" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "openinference-semantic-conventions" }, @@ -3703,18 +3736,18 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/87/71c599f804203077f3766e7c6ce831cdfd0ca202278c35877a704e00b2cf/openinference_instrumentation-0.1.38.tar.gz", hash = "sha256:b45e5d19b5c0d14e884a11ed5b888deda03d955c6e6f4478d8cefd3edaea089d", size = 23749, upload-time = "2025-09-02T21:06:22.025Z" } +sdist = { url = "https://files.pythonhosted.org/packages/37/59/750c25a353260a72287e618b9ccabd57f02db6bfd571c6dbf132202abeff/openinference_instrumentation-0.1.40.tar.gz", hash = "sha256:3080785479793a56023806c71dccbc39418925947407667794c651f992f700a2", size = 23824, upload-time = "2025-10-10T03:48:48.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/f7/72bd2dbb8bbdd785512c9d128f2056e2eaadccfaecb09d2ae59bde6d4af2/openinference_instrumentation-0.1.38-py3-none-any.whl", hash = "sha256:5c45d73c5f3c79e9d9e44fbf4b2c3bdae514be74396cc1880cb845b9b7acc78f", size = 29885, upload-time = "2025-09-02T21:06:20.845Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fd/2b6ea9d95f3eb1deba10975a14b80d7fe79528258111771580a0437d4f44/openinference_instrumentation-0.1.40-py3-none-any.whl", hash = "sha256:d2e894f25addb1dfba563789213139876c5a01fca0a1fa8aa52a455a988a11d4", size = 29967, upload-time = "2025-10-10T03:48:46.518Z" }, ] [[package]] name = "openinference-semantic-conventions" -version = "0.1.21" +version = "0.1.24" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/0f/b794eb009846d4b10af50e205a323ca359f284563ef4d1778f35a80522ac/openinference_semantic_conventions-0.1.21.tar.gz", hash = "sha256:328405b9f79ff72a659c7712b8429c0d7ea68c6a4a1679e3eb44372aa228119b", size = 12534, upload-time = "2025-06-13T05:22:18.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/15/be7566a4bba4b57f7c70b088f42735f2005e2c0adce646a537f63dcf21de/openinference_semantic_conventions-0.1.24.tar.gz", hash = "sha256:3223b8c3958525457a369d58ebf0c56230a1f00567ae1e99f1c2049a8ac2cacd", size = 12741, upload-time = "2025-10-10T03:49:13.987Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/4d/092766f8e610f2c513e483c4adc892eea1634945022a73371fe01f621165/openinference_semantic_conventions-0.1.21-py3-none-any.whl", hash = "sha256:acde8282c20da1de900cdc0d6258a793ec3eb8031bfc496bd823dae17d32e326", size = 10167, upload-time = "2025-06-13T05:22:18.118Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c5/fa81b19042b387826151f984a91fa3d0b52b08374e4d5786521ac2d9e704/openinference_semantic_conventions-0.1.24-py3-none-any.whl", hash = "sha256:b2d650ca7e39c5fb02bf908b8049d6ece2a2657757448e1925a38b59548a80b3", size = 10373, upload-time = "2025-10-10T03:49:00.318Z" }, ] [[package]] @@ -3911,6 +3944,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588, upload-time = "2024-08-28T21:26:58.504Z" }, ] +[[package]] +name = "opentelemetry-instrumentation-httpx" +version = "0.48b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/d9/c65d818607c16d1b7ea8d2de6111c6cecadf8d2fd38c1885a72733a7c6d3/opentelemetry_instrumentation_httpx-0.48b0.tar.gz", hash = "sha256:ee977479e10398931921fb995ac27ccdeea2e14e392cb27ef012fc549089b60a", size = 16931, upload-time = "2024-08-28T21:28:03.794Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/fe/f2daa9d6d988c093b8c7b1d35df675761a8ece0b600b035dc04982746c9d/opentelemetry_instrumentation_httpx-0.48b0-py3-none-any.whl", hash = "sha256:d94f9d612c82d09fe22944d1904a30a464c19bea2ba76be656c99a28ad8be8e5", size = 13900, upload-time = "2024-08-28T21:27:01.566Z" }, +] + [[package]] name = "opentelemetry-instrumentation-redis" version = "0.48b0" @@ -3926,21 +3974,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/40/892f30d400091106309cc047fd3f6d76a828fedd984a953fd5386b78a2fb/opentelemetry_instrumentation_redis-0.48b0-py3-none-any.whl", hash = "sha256:48c7f2e25cbb30bde749dc0d8b9c74c404c851f554af832956b9630b27f5bcb7", size = 11610, upload-time = "2024-08-28T21:27:18.759Z" }, ] -[[package]] -name = "opentelemetry-instrumentation-requests" -version = "0.48b0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "opentelemetry-instrumentation" }, - { name = "opentelemetry-semantic-conventions" }, - { name = "opentelemetry-util-http" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/52/ac/5eb78efde21ff21d0ad5dc8c6cc6a0f8ae482ce8a46293c2f45a628b6166/opentelemetry_instrumentation_requests-0.48b0.tar.gz", hash = "sha256:67ab9bd877a0352ee0db4616c8b4ae59736ddd700c598ed907482d44f4c9a2b3", size = 14120, upload-time = "2024-08-28T21:28:16.933Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/df/0df9226d1b14f29d23c07e6194b9fd5ad50e7d987b7fd13df7dcf718aeb1/opentelemetry_instrumentation_requests-0.48b0-py3-none-any.whl", hash = "sha256:d4f01852121d0bd4c22f14f429654a735611d4f7bf3cf93f244bdf1489b2233d", size = 12366, upload-time = "2024-08-28T21:27:20.771Z" }, -] - [[package]] name = "opentelemetry-instrumentation-sqlalchemy" version = "0.48b0" @@ -4035,7 +4068,7 @@ wheels = [ [[package]] name = "opik" -version = "1.7.43" +version = "1.8.74" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "boto3-stubs", extra = ["bedrock-runtime"] }, @@ -4054,21 +4087,21 @@ dependencies = [ { name = "tqdm" }, { name = "uuid6" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/52/cea0317bc3207bc967b48932781995d9cdb2c490e7e05caa00ff660f7205/opik-1.7.43.tar.gz", hash = "sha256:0b02522b0b74d0a67b141939deda01f8bb69690eda6b04a7cecb1c7f0649ccd0", size = 326886, upload-time = "2025-07-07T10:30:07.715Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/31/887f133aa82aeb4cb8a01d98ad6ae73cb0580c2c9395d76bae1d67dbb6f6/opik-1.8.74.tar.gz", hash = "sha256:4b18248dbd741dab16dab399c1ab7197f1f6c6775ee06285ff07d3d22e1810de", size = 412504, upload-time = "2025-10-13T13:43:03.117Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/ae/f3566bdc3c49a1a8f795b1b6e726ef211c87e31f92d870ca6d63999c9bbf/opik-1.7.43-py3-none-any.whl", hash = "sha256:a66395c8b5ea7c24846f72dafc70c74d5b8f24ffbc4c8a1b3a7f9456e550568d", size = 625356, upload-time = "2025-07-07T10:30:06.389Z" }, + { url = "https://files.pythonhosted.org/packages/37/5d/11c12e2471880effa7a597d96bce848271fa93007f7f543ed607fb31822a/opik-1.8.74-py3-none-any.whl", hash = "sha256:34ffbff2c447da117e58bcc2fdf53b3b534dd1ffe9a293eb912f5419fc9904c3", size = 772547, upload-time = "2025-10-13T13:43:01.29Z" }, ] [[package]] name = "optype" -version = "0.13.4" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/7f/daa32a35b2a6a564a79723da49c0ddc464c462e67a906fc2b66a0d64f28e/optype-0.13.4.tar.gz", hash = "sha256:131d8e0f1c12d8095d553e26b54598597133830983233a6a2208886e7a388432", size = 99547, upload-time = "2025-08-19T19:52:44.242Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/ca/d3a2abcf12cc8c18ccac1178ef87ab50a235bf386d2401341776fdad18aa/optype-0.14.0.tar.gz", hash = "sha256:925cf060b7d1337647f880401f6094321e7d8e837533b8e159b9a92afa3157c6", size = 100880, upload-time = "2025-10-01T04:49:56.232Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/bb/b51940f2d91071325d5ae2044562aa698470a105474d9317b9dbdaad63df/optype-0.13.4-py3-none-any.whl", hash = "sha256:500c89cfac82e2f9448a54ce0a5d5c415b6976b039c2494403cd6395bd531979", size = 87919, upload-time = "2025-08-19T19:52:41.314Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/11b0eb65eeafa87260d36858b69ec4e0072d09e37ea6714280960030bc93/optype-0.14.0-py3-none-any.whl", hash = "sha256:50d02edafd04edf2e5e27d6249760a51b2198adb9f6ffd778030b3d2806b026b", size = 89465, upload-time = "2025-10-01T04:49:54.674Z" }, ] [package.optional-dependencies] @@ -4079,23 +4112,23 @@ numpy = [ [[package]] name = "oracledb" -version = "3.0.0" +version = "3.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/39/712f797b75705c21148fa1d98651f63c2e5cc6876e509a0a9e2f5b406572/oracledb-3.0.0.tar.gz", hash = "sha256:64dc86ee5c032febc556798b06e7b000ef6828bb0252084f6addacad3363db85", size = 840431, upload-time = "2025-03-03T19:36:12.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/c9/fae18fa5d803712d188486f8e86ad4f4e00316793ca19745d7c11092c360/oracledb-3.3.0.tar.gz", hash = "sha256:e830d3544a1578296bcaa54c6e8c8ae10a58c7db467c528c4b27adbf9c8b4cb0", size = 811776, upload-time = "2025-07-29T22:34:10.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/bf/d872c4b3fc15cd3261fe0ea72b21d181700c92dbc050160e161654987062/oracledb-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:52daa9141c63dfa75c07d445e9bb7f69f43bfb3c5a173ecc48c798fe50288d26", size = 4312963, upload-time = "2025-03-03T19:36:32.576Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ea/01ee29e76a610a53bb34fdc1030f04b7669c3f80b25f661e07850fc6160e/oracledb-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af98941789df4c6aaaf4338f5b5f6b7f2c8c3fe6f8d6a9382f177f350868747a", size = 2661536, upload-time = "2025-03-03T19:36:34.904Z" }, - { url = "https://files.pythonhosted.org/packages/3d/8e/ad380e34a46819224423b4773e58c350bc6269643c8969604097ced8c3bc/oracledb-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9812bb48865aaec35d73af54cd1746679f2a8a13cbd1412ab371aba2e39b3943", size = 2867461, upload-time = "2025-03-03T19:36:36.508Z" }, - { url = "https://files.pythonhosted.org/packages/96/09/ecc4384a27fd6e1e4de824ae9c160e4ad3aaebdaade5b4bdcf56a4d1ff63/oracledb-3.0.0-cp311-cp311-win32.whl", hash = "sha256:6c27fe0de64f2652e949eb05b3baa94df9b981a4a45fa7f8a991e1afb450c8e2", size = 1752046, upload-time = "2025-03-03T19:36:38.313Z" }, - { url = "https://files.pythonhosted.org/packages/62/e8/f34bde24050c6e55eeba46b23b2291f2dd7fd272fa8b322dcbe71be55778/oracledb-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f922709672002f0b40997456f03a95f03e5712a86c61159951c5ce09334325e0", size = 2101210, upload-time = "2025-03-03T19:36:40.669Z" }, - { url = "https://files.pythonhosted.org/packages/6f/fc/24590c3a3d41e58494bd3c3b447a62835138e5f9b243d9f8da0cfb5da8dc/oracledb-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:acd0e747227dea01bebe627b07e958bf36588a337539f24db629dc3431d3f7eb", size = 4351993, upload-time = "2025-03-03T19:36:42.577Z" }, - { url = "https://files.pythonhosted.org/packages/b7/b6/1f3b0b7bb94d53e8857d77b2e8dbdf6da091dd7e377523e24b79dac4fd71/oracledb-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8b402f77c22af031cd0051aea2472ecd0635c1b452998f511aa08b7350c90a4", size = 2532640, upload-time = "2025-03-03T19:36:45.066Z" }, - { url = "https://files.pythonhosted.org/packages/72/1a/1815f6c086ab49c00921cf155ff5eede5267fb29fcec37cb246339a5ce4d/oracledb-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:378a27782e9a37918bd07a5a1427a77cb6f777d0a5a8eac9c070d786f50120ef", size = 2765949, upload-time = "2025-03-03T19:36:47.47Z" }, - { url = "https://files.pythonhosted.org/packages/33/8d/208900f8d372909792ee70b2daad3f7361181e55f2217c45ed9dff658b54/oracledb-3.0.0-cp312-cp312-win32.whl", hash = "sha256:54a28c2cb08316a527cd1467740a63771cc1c1164697c932aa834c0967dc4efc", size = 1709373, upload-time = "2025-03-03T19:36:49.67Z" }, - { url = "https://files.pythonhosted.org/packages/0c/5e/c21754f19c896102793c3afec2277e2180aa7d505e4d7fcca24b52d14e4f/oracledb-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8289bad6d103ce42b140e40576cf0c81633e344d56e2d738b539341eacf65624", size = 2056452, upload-time = "2025-03-03T19:36:51.363Z" }, + { url = "https://files.pythonhosted.org/packages/3f/35/95d9a502fdc48ce1ef3a513ebd027488353441e15aa0448619abb3d09d32/oracledb-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d9adb74f837838e21898d938e3a725cf73099c65f98b0b34d77146b453e945e0", size = 3963945, upload-time = "2025-07-29T22:34:28.633Z" }, + { url = "https://files.pythonhosted.org/packages/16/a7/8f1ef447d995bb51d9fdc36356697afeceb603932f16410c12d52b2df1a4/oracledb-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b063d1007882570f170ebde0f364e78d4a70c8f015735cc900663278b9ceef7", size = 2449385, upload-time = "2025-07-29T22:34:30.592Z" }, + { url = "https://files.pythonhosted.org/packages/b3/fa/6a78480450bc7d256808d0f38ade3385735fb5a90dab662167b4257dcf94/oracledb-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:187728f0a2d161676b8c581a9d8f15d9631a8fea1e628f6d0e9fa2f01280cd22", size = 2634943, upload-time = "2025-07-29T22:34:33.142Z" }, + { url = "https://files.pythonhosted.org/packages/5b/90/ea32b569a45fb99fac30b96f1ac0fb38b029eeebb78357bc6db4be9dde41/oracledb-3.3.0-cp311-cp311-win32.whl", hash = "sha256:920f14314f3402c5ab98f2efc5932e0547e9c0a4ca9338641357f73844e3e2b1", size = 1483549, upload-time = "2025-07-29T22:34:35.015Z" }, + { url = "https://files.pythonhosted.org/packages/81/55/ae60f72836eb8531b630299f9ed68df3fe7868c6da16f820a108155a21f9/oracledb-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:825edb97976468db1c7e52c78ba38d75ce7e2b71a2e88f8629bcf02be8e68a8a", size = 1834737, upload-time = "2025-07-29T22:34:36.824Z" }, + { url = "https://files.pythonhosted.org/packages/08/a8/f6b7809d70e98e113786d5a6f1294da81c046d2fa901ad656669fc5d7fae/oracledb-3.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9d25e37d640872731ac9b73f83cbc5fc4743cd744766bdb250488caf0d7696a8", size = 3943512, upload-time = "2025-07-29T22:34:39.237Z" }, + { url = "https://files.pythonhosted.org/packages/df/b9/8145ad8991f4864d3de4a911d439e5bc6cdbf14af448f3ab1e846a54210c/oracledb-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0bf7cdc2b668f939aa364f552861bc7a149d7cd3f3794730d43ef07613b2bf9", size = 2276258, upload-time = "2025-07-29T22:34:41.547Z" }, + { url = "https://files.pythonhosted.org/packages/56/bf/f65635ad5df17d6e4a2083182750bb136ac663ff0e9996ce59d77d200f60/oracledb-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe20540fde64a6987046807ea47af93be918fd70b9766b3eb803c01e6d4202e", size = 2458811, upload-time = "2025-07-29T22:34:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/7d/30/e0c130b6278c10b0e6cd77a3a1a29a785c083c549676cf701c5d180b8e63/oracledb-3.3.0-cp312-cp312-win32.whl", hash = "sha256:db080be9345cbf9506ffdaea3c13d5314605355e76d186ec4edfa49960ffb813", size = 1445525, upload-time = "2025-07-29T22:34:46.603Z" }, + { url = "https://files.pythonhosted.org/packages/1a/5c/7254f5e1a33a5d6b8bf6813d4f4fdcf5c4166ec8a7af932d987879d5595c/oracledb-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:be81e3afe79f6c8ece79a86d6067ad1572d2992ce1c590a086f3755a09535eb4", size = 1789976, upload-time = "2025-07-29T22:34:48.5Z" }, ] [[package]] @@ -4228,16 +4261,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683, upload-time = "2025-05-27T15:24:28.4Z" }, ] -[[package]] -name = "pandoc" -version = "2.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "plumbum" }, - { name = "ply" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/10/9a/e3186e760c57ee5f1c27ea5cea577a0ff9abfca51eefcb4d9a4cd39aff2e/pandoc-2.4.tar.gz", hash = "sha256:ecd1f8cbb7f4180c6b5db4a17a7c1a74df519995f5f186ef81ce72a9cbd0dd9a", size = 34635, upload-time = "2024-08-07T14:33:58.016Z" } - [[package]] name = "pathspec" version = "0.12.1" @@ -4249,15 +4272,15 @@ wheels = [ [[package]] name = "pdfminer-six" -version = "20240706" +version = "20250506" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "charset-normalizer" }, { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e3/37/63cb918ffa21412dd5d54e32e190e69bfc340f3d6aa072ad740bec9386bb/pdfminer.six-20240706.tar.gz", hash = "sha256:c631a46d5da957a9ffe4460c5dce21e8431dabb615fee5f9f4400603a58d95a6", size = 7363505, upload-time = "2024-07-06T13:48:50.795Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/46/5223d613ac4963e1f7c07b2660fe0e9e770102ec6bda8c038400113fb215/pdfminer_six-20250506.tar.gz", hash = "sha256:b03cc8df09cf3c7aba8246deae52e0bca7ebb112a38895b5e1d4f5dd2b8ca2e7", size = 7387678, upload-time = "2025-05-06T16:17:00.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/7d/44d6b90e5a293d3a975cefdc4e12a932ebba814995b2a07e37e599dd27c6/pdfminer.six-20240706-py3-none-any.whl", hash = "sha256:f4f70e74174b4b3542fcb8406a210b6e2e27cd0f0b5fd04534a8cc0d8951e38c", size = 5615414, upload-time = "2024-07-06T13:48:48.408Z" }, + { url = "https://files.pythonhosted.org/packages/73/16/7a432c0101fa87457e75cb12c879e1749c5870a786525e2e0f42871d6462/pdfminer_six-20250506-py3-none-any.whl", hash = "sha256:d81ad173f62e5f841b53a8ba63af1a4a355933cfc0ffabd608e568b9193909e3", size = 5620187, upload-time = "2025-05-06T16:16:58.669Z" }, ] [[package]] @@ -4328,11 +4351,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.4.0" +version = "4.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, ] [[package]] @@ -4344,18 +4367,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] -[[package]] -name = "plumbum" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f0/5d/49ba324ad4ae5b1a4caefafbce7a1648540129344481f2ed4ef6bb68d451/plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219", size = 319083, upload-time = "2024-10-05T05:59:27.059Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/9d/d03542c93bb3d448406731b80f39c3d5601282f778328c22c77d270f4ed4/plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5", size = 127970, upload-time = "2024-10-05T05:59:25.102Z" }, -] - [[package]] name = "ply" version = "3.11" @@ -4367,7 +4378,7 @@ wheels = [ [[package]] name = "polyfile-weave" -version = "0.5.6" +version = "0.5.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "abnf" }, @@ -4385,9 +4396,21 @@ dependencies = [ { name = "pyyaml" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/16/11/7e0b3908a4f5436197b1fc11713c628cd7f9136dc7c1fb00ac8879991f87/polyfile_weave-0.5.6.tar.gz", hash = "sha256:a9fc41b456272c95a3788a2cab791e052acc24890c512fc5a6f9f4e221d24ed1", size = 5987173, upload-time = "2025-07-28T20:26:32.092Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/c3/5a2a2ba06850bc5ec27f83ac8b92210dff9ff6736b2c42f700b489b3fd86/polyfile_weave-0.5.7.tar.gz", hash = "sha256:c3d863f51c30322c236bdf385e116ac06d4e7de9ec25a3aae14d42b1d528e33b", size = 5987445, upload-time = "2025-09-22T19:21:11.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/63/04c5c7c2093cf69c9eeea338f4757522a5d048703a35b3ac8a5580ed2369/polyfile_weave-0.5.6-py3-none-any.whl", hash = "sha256:658e5b6ed040a973279a0cd7f54f4566249c85b977dee556788fa6f903c1d30b", size = 1655007, upload-time = "2025-07-28T20:26:30.132Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f6/d1efedc0f9506e47699616e896d8efe39e8f0b6a7d1d590c3e97455ecf4a/polyfile_weave-0.5.7-py3-none-any.whl", hash = "sha256:880454788bc383408bf19eefd6d1c49a18b965d90c99bccb58f4da65870c82dd", size = 1655397, upload-time = "2025-09-22T19:21:09.142Z" }, +] + +[[package]] +name = "pondpond" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "madoka" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/9b/8411458ca8ce8b5b9b135e4a19823f1caf958ca9985883db104323492982/pondpond-1.4.1.tar.gz", hash = "sha256:8afa34b869d1434d21dd2ec12644abc3b1733fcda8fcf355300338a13a79bb7b", size = 15237, upload-time = "2024-03-01T07:08:06.756Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/d4/f18d6985157cc68f76469480182cbee2a03a45858456955acf57f9dcbb4c/pondpond-1.4.1-py3-none-any.whl", hash = "sha256:641028ead4e8018ca6de1220c660ddd6d6fbf62a60e72f410655dd0451d82880", size = 14498, upload-time = "2024-03-01T07:08:04.63Z" }, ] [[package]] @@ -4418,7 +4441,7 @@ wheels = [ [[package]] name = "posthog" -version = "6.7.4" +version = "6.7.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff" }, @@ -4428,9 +4451,9 @@ dependencies = [ { name = "six" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/40/d7f585e09e47f492ebaeb8048a8e2ce5d9f49a3896856a7a975cbc1484fa/posthog-6.7.4.tar.gz", hash = "sha256:2bfa74f321ac18efe4a48a256d62034a506ca95477af7efa32292ed488a742c5", size = 118209, upload-time = "2025-09-05T15:29:21.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/ce/11d6fa30ab517018796e1d675498992da585479e7079770ec8fa99a61561/posthog-6.7.6.tar.gz", hash = "sha256:ee5c5ad04b857d96d9b7a4f715e23916a2f206bfcf25e5a9d328a3d27664b0d3", size = 119129, upload-time = "2025-09-22T18:11:12.365Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/95/e795059ef73d480a7f11f1be201087f65207509525920897fb514a04914c/posthog-6.7.4-py3-none-any.whl", hash = "sha256:7f1872c53ec7e9a29b088a5a1ad03fa1be3b871d10d70c8bf6c2dafb91beaac5", size = 136409, upload-time = "2025-09-05T15:29:19.995Z" }, + { url = "https://files.pythonhosted.org/packages/de/84/586422d8861b5391c8414360b10f603c0b7859bb09ad688e64430ed0df7b/posthog-6.7.6-py3-none-any.whl", hash = "sha256:b09a7e65a042ec416c28874b397d3accae412a80a8b0ef3fa686fbffc99e4d4b", size = 137348, upload-time = "2025-09-22T18:11:10.807Z" }, ] [[package]] @@ -4447,43 +4470,41 @@ wheels = [ [[package]] name = "propcache" -version = "0.3.2" +version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, - { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, - { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, - { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, - { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, - { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, - { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, - { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, - { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, - { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, - { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, - { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, - { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, - { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, - { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, - { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] [[package]] @@ -4514,17 +4535,18 @@ wheels = [ [[package]] name = "psutil" -version = "7.0.0" +version = "7.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/31/4723d756b59344b643542936e37a31d1d3204bcdc42a7daa8ee9eb06fb50/psutil-7.1.0.tar.gz", hash = "sha256:655708b3c069387c8b77b072fc429a57d0e214221d01c0a772df7dfedcb3bcd2", size = 497660, upload-time = "2025-09-17T20:14:52.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, + { url = "https://files.pythonhosted.org/packages/46/62/ce4051019ee20ce0ed74432dd73a5bb087a6704284a470bb8adff69a0932/psutil-7.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76168cef4397494250e9f4e73eb3752b146de1dd950040b29186d0cce1d5ca13", size = 245242, upload-time = "2025-09-17T20:14:56.126Z" }, + { url = "https://files.pythonhosted.org/packages/38/61/f76959fba841bf5b61123fbf4b650886dc4094c6858008b5bf73d9057216/psutil-7.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:5d007560c8c372efdff9e4579c2846d71de737e4605f611437255e81efcca2c5", size = 246682, upload-time = "2025-09-17T20:14:58.25Z" }, + { url = "https://files.pythonhosted.org/packages/88/7a/37c99d2e77ec30d63398ffa6a660450b8a62517cabe44b3e9bae97696e8d/psutil-7.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22e4454970b32472ce7deaa45d045b34d3648ce478e26a04c7e858a0a6e75ff3", size = 287994, upload-time = "2025-09-17T20:14:59.901Z" }, + { url = "https://files.pythonhosted.org/packages/9d/de/04c8c61232f7244aa0a4b9a9fbd63a89d5aeaf94b2fc9d1d16e2faa5cbb0/psutil-7.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70e113920d51e89f212dd7be06219a9b88014e63a4cec69b684c327bc474e3", size = 291163, upload-time = "2025-09-17T20:15:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/58/c4f976234bf6d4737bc8c02a81192f045c307b72cf39c9e5c5a2d78927f6/psutil-7.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d4a113425c037300de3ac8b331637293da9be9713855c4fc9d2d97436d7259d", size = 293625, upload-time = "2025-09-17T20:15:04.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/87/157c8e7959ec39ced1b11cc93c730c4fb7f9d408569a6c59dbd92ceb35db/psutil-7.1.0-cp37-abi3-win32.whl", hash = "sha256:09ad740870c8d219ed8daae0ad3b726d3bf9a028a198e7f3080f6a1888b99bca", size = 244812, upload-time = "2025-09-17T20:15:07.462Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/b44c4f697276a7a95b8e94d0e320a7bf7f3318521b23de69035540b39838/psutil-7.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:57f5e987c36d3146c0dd2528cd42151cf96cd359b9d67cfff836995cc5df9a3d", size = 247965, upload-time = "2025-09-17T20:15:09.673Z" }, + { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, ] [[package]] @@ -4535,34 +4557,28 @@ sdist = { url = "https://files.pythonhosted.org/packages/eb/72/4a7965cf54e341006 [[package]] name = "psycopg2-binary" -version = "2.9.10" +version = "2.9.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397, upload-time = "2024-10-16T11:19:40.033Z" }, - { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806, upload-time = "2024-10-16T11:19:43.5Z" }, - { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370, upload-time = "2024-10-16T11:19:46.986Z" }, - { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780, upload-time = "2024-10-16T11:19:50.242Z" }, - { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583, upload-time = "2024-10-16T11:19:54.424Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831, upload-time = "2024-10-16T11:19:57.762Z" }, - { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822, upload-time = "2024-10-16T11:20:04.693Z" }, - { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975, upload-time = "2024-10-16T11:20:11.401Z" }, - { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320, upload-time = "2024-10-16T11:20:17.959Z" }, - { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617, upload-time = "2024-10-16T11:20:24.711Z" }, - { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618, upload-time = "2024-10-16T11:20:27.718Z" }, - { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816, upload-time = "2024-10-16T11:20:30.777Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" }, - { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" }, - { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" }, - { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" }, - { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" }, - { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" }, - { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" }, - { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" }, - { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" }, - { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, ] [[package]] @@ -4658,7 +4674,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -4666,9 +4682,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/54/ecab642b3bed45f7d5f59b38443dcb36ef50f85af192e6ece103dbfe9587/pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423", size = 788494, upload-time = "2025-10-04T10:40:41.338Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/bd/1f/73c53fcbfb0b5a78f91176df41945ca466e71e9d9d836e5c522abda39ee7/pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a", size = 444823, upload-time = "2025-10-04T10:40:39.055Z" }, ] [[package]] @@ -4721,15 +4737,15 @@ wheels = [ [[package]] name = "pydantic-extra-types" -version = "2.10.5" +version = "2.10.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/ba/4178111ec4116c54e1dc7ecd2a1ff8f54256cdbd250e576882911e8f710a/pydantic_extra_types-2.10.5.tar.gz", hash = "sha256:1dcfa2c0cf741a422f088e0dbb4690e7bfadaaf050da3d6f80d6c3cf58a2bad8", size = 138429, upload-time = "2025-06-02T09:31:52.713Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/10/fb64987804cde41bcc39d9cd757cd5f2bb5d97b389d81aa70238b14b8a7e/pydantic_extra_types-2.10.6.tar.gz", hash = "sha256:c63d70bf684366e6bbe1f4ee3957952ebe6973d41e7802aea0b770d06b116aeb", size = 141858, upload-time = "2025-10-08T13:47:49.483Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/1a/5f4fd9e7285f10c44095a4f9fe17d0f358d1702a7c74a9278c794e8a7537/pydantic_extra_types-2.10.5-py3-none-any.whl", hash = "sha256:b60c4e23d573a69a4f1a16dd92888ecc0ef34fb0e655b4f305530377fa70e7a8", size = 38315, upload-time = "2025-06-02T09:31:51.229Z" }, + { url = "https://files.pythonhosted.org/packages/93/04/5c918669096da8d1c9ec7bb716bd72e755526103a61bc5e76a3e4fb23b53/pydantic_extra_types-2.10.6-py3-none-any.whl", hash = "sha256:6106c448316d30abf721b5b9fecc65e983ef2614399a24142d689c7546cc246a", size = 40949, upload-time = "2025-10-08T13:47:48.268Z" }, ] [[package]] @@ -4771,7 +4787,7 @@ crypto = [ [[package]] name = "pymilvus" -version = "2.5.15" +version = "2.5.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, @@ -4782,9 +4798,9 @@ dependencies = [ { name = "setuptools" }, { name = "ujson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/f9/dee7f0d42979bf4cbe0bf23f8db9bf4c331b53c4c9f8692d2e027073c928/pymilvus-2.5.15.tar.gz", hash = "sha256:350396ef3bb40aa62c8a2ecaccb5c664bbb1569eef8593b74dd1d5125eb0deb2", size = 1278109, upload-time = "2025-08-21T11:57:58.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/e2/5613bc7b2af0ccd760177ca4255243c284cfc0f2cba3f10ff63325c4ca34/pymilvus-2.5.16.tar.gz", hash = "sha256:65f56b81806bc217cca3cf29b70a27d053dea4b1ffada910cf63a38f96381618", size = 1280614, upload-time = "2025-09-19T07:02:14.747Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/af/10a620686025e5b59889d7075f5d426e45e57a0180c4465051645a88ccb0/pymilvus-2.5.15-py3-none-any.whl", hash = "sha256:a155a3b436e2e3ca4b85aac80c92733afe0bd172c497c3bc0dfaca0b804b90c9", size = 241683, upload-time = "2025-08-21T11:57:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/c6/09/b67a55abee0a53ea50ba0de0cba6e1c0f7ca7ce2c15ffd6f40c059c25e88/pymilvus-2.5.16-py3-none-any.whl", hash = "sha256:76258a324f19c60fee247467e11cd7d6f35a64d2a9c753f5d7b1a5fa15dd6c8a", size = 243272, upload-time = "2025-09-19T07:02:12.443Z" }, ] [[package]] @@ -4838,20 +4854,20 @@ wheels = [ [[package]] name = "pyparsing" -version = "3.2.3" +version = "3.2.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, ] [[package]] name = "pypdf" -version = "6.0.0" +version = "6.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/ac/a300a03c3b34967c050677ccb16e7a4b65607ee5df9d51e8b6d713de4098/pypdf-6.0.0.tar.gz", hash = "sha256:282a99d2cc94a84a3a3159f0d9358c0af53f85b4d28d76ea38b96e9e5ac2a08d", size = 5033827, upload-time = "2025-08-11T14:22:02.352Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/85/4c0f12616db83c2e3ef580c3cfa98bd082e88fc8d02e136bad3bede1e3fa/pypdf-6.1.1.tar.gz", hash = "sha256:10f44d49bf2a82e54c3c5ba3cdcbb118f2a44fc57df8ce51d6fb9b1ed9bfbe8b", size = 5074507, upload-time = "2025-09-28T13:29:16.165Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/83/2cacc506eb322bb31b747bc06ccb82cc9aa03e19ee9c1245e538e49d52be/pypdf-6.0.0-py3-none-any.whl", hash = "sha256:56ea60100ce9f11fc3eec4f359da15e9aec3821b036c1f06d2b660d35683abb8", size = 310465, upload-time = "2025-08-11T14:22:00.481Z" }, + { url = "https://files.pythonhosted.org/packages/07/ed/adae13756d9dabdddee483fc7712905bb5585fbf6e922b1a19aca3a29cd1/pypdf-6.1.1-py3-none-any.whl", hash = "sha256:7781f99493208a37a7d4275601d883e19af24e62a525c25844d22157c2e4cde7", size = 323455, upload-time = "2025-09-28T13:29:14.392Z" }, ] [[package]] @@ -5130,75 +5146,29 @@ wheels = [ [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, -] - -[[package]] -name = "pyzstd" -version = "0.17.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8f/a2/54d860ccbd07e3c67e4d0321d1c29fc7963ac82cf801a078debfc4ef7c15/pyzstd-0.17.0.tar.gz", hash = "sha256:d84271f8baa66c419204c1dd115a4dec8b266f8a2921da21b81764fa208c1db6", size = 1212160, upload-time = "2025-05-10T14:14:49.764Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/4a/81ca9a6a759ae10a51cb72f002c149b602ec81b3a568ca6292b117f6da0d/pyzstd-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06d1e7afafe86b90f3d763f83d2f6b6a437a8d75119fe1ff52b955eb9df04eaa", size = 377827, upload-time = "2025-05-10T14:12:54.102Z" }, - { url = "https://files.pythonhosted.org/packages/a1/09/584c12c8a918c9311a55be0c667e57a8ee73797367299e2a9f3fc3bf7a39/pyzstd-0.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc827657f644e4510211b49f5dab6b04913216bc316206d98f9a75214361f16e", size = 297579, upload-time = "2025-05-10T14:12:55.748Z" }, - { url = "https://files.pythonhosted.org/packages/e1/89/dc74cd83f30b97f95d42b028362e32032e61a8f8e6cc2a8e47b70976d99a/pyzstd-0.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecffadaa2ee516ecea3e432ebf45348fa8c360017f03b88800dd312d62ecb063", size = 443132, upload-time = "2025-05-10T14:12:57.098Z" }, - { url = "https://files.pythonhosted.org/packages/a8/12/fe93441228a324fe75d10f5f13d5e5d5ed028068810dfdf9505d89d704a0/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:596de361948d3aad98a837c98fcee4598e51b608f7e0912e0e725f82e013f00f", size = 390644, upload-time = "2025-05-10T14:12:58.379Z" }, - { url = "https://files.pythonhosted.org/packages/9d/d1/aa7cdeb9bf8995d9df9936c71151be5f4e7b231561d553e73bbf340c2281/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd3a8d0389c103e93853bf794b9a35ac5d0d11ca3e7e9f87e3305a10f6dfa6b2", size = 478070, upload-time = "2025-05-10T14:12:59.706Z" }, - { url = "https://files.pythonhosted.org/packages/95/62/7e5c450790bfd3db954694d4d877446d0b6d192aae9c73df44511f17b75c/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1356f72c7b8bb99b942d582b61d1a93c5065e66b6df3914dac9f2823136c3228", size = 421240, upload-time = "2025-05-10T14:13:01.151Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b5/d20c60678c0dfe2430f38241d118308f12516ccdb44f9edce27852ee2187/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f514c339b013b0b0a2ed8ea6e44684524223bd043267d7644d7c3a70e74a0dd", size = 412908, upload-time = "2025-05-10T14:13:02.904Z" }, - { url = "https://files.pythonhosted.org/packages/d2/a0/3ae0f1af2982b6cdeacc2a1e1cd20869d086d836ea43e0f14caee8664101/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d4de16306821021c2d82a45454b612e2a8683d99bfb98cff51a883af9334bea0", size = 415572, upload-time = "2025-05-10T14:13:04.828Z" }, - { url = "https://files.pythonhosted.org/packages/7d/84/cb0a10c3796f4cd5f09c112cbd72405ffd019f7c0d1e2e5e99ccc803c60c/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aeb9759c04b6a45c1b56be21efb0a738e49b0b75c4d096a38707497a7ff2be82", size = 445334, upload-time = "2025-05-10T14:13:06.5Z" }, - { url = "https://files.pythonhosted.org/packages/d6/d6/8c5cf223067b69aa63f9ecf01846535d4ba82d98f8c9deadfc0092fa16ca/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7a5b31ddeada0027e67464d99f09167cf08bab5f346c3c628b2d3c84e35e239a", size = 518748, upload-time = "2025-05-10T14:13:08.286Z" }, - { url = "https://files.pythonhosted.org/packages/bf/1c/dc7bab00a118d0ae931239b23e05bf703392005cf3bb16942b7b2286452a/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8338e4e91c52af839abcf32f1f65f3b21e2597ffe411609bdbdaf10274991bd0", size = 562487, upload-time = "2025-05-10T14:13:09.714Z" }, - { url = "https://files.pythonhosted.org/packages/e0/a4/fca96c0af643e4de38bce0dc25dab60ea558c49444c30b9dbe8b7a1714be/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:628e93862feb372b4700085ec4d1d389f1283ac31900af29591ae01019910ff3", size = 432319, upload-time = "2025-05-10T14:13:11.296Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a3/7c924478f6c14b369fec8c5cd807b069439c6ecbf98c4783c5791036d3ad/pyzstd-0.17.0-cp311-cp311-win32.whl", hash = "sha256:c27773f9c95ebc891cfcf1ef282584d38cde0a96cb8d64127953ad752592d3d7", size = 220005, upload-time = "2025-05-10T14:13:13.188Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f6/d081b6b29cf00780c971b07f7889a19257dd884e64a842a5ebc406fd3992/pyzstd-0.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:c043a5766e00a2b7844705c8fa4563b7c195987120afee8f4cf594ecddf7e9ac", size = 246224, upload-time = "2025-05-10T14:13:14.478Z" }, - { url = "https://files.pythonhosted.org/packages/61/f3/f42c767cde8e3b94652baf85863c25476fd463f3bd61f73ed4a02c1db447/pyzstd-0.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:efd371e41153ef55bf51f97e1ce4c1c0b05ceb59ed1d8972fc9aa1e9b20a790f", size = 223036, upload-time = "2025-05-10T14:13:15.752Z" }, - { url = "https://files.pythonhosted.org/packages/76/50/7fa47d0a13301b1ce20972aa0beb019c97f7ee8b0658d7ec66727b5967f9/pyzstd-0.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ac330fc4f64f97a411b6f3fc179d2fe3050b86b79140e75a9a6dd9d6d82087f", size = 379056, upload-time = "2025-05-10T14:13:17.091Z" }, - { url = "https://files.pythonhosted.org/packages/9d/f2/67b03b1fa4e2a0b05e147cc30ac6d271d3d11017b47b30084cb4699451f4/pyzstd-0.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:725180c0c4eb2e643b7048ebfb45ddf43585b740535907f70ff6088f5eda5096", size = 298381, upload-time = "2025-05-10T14:13:18.812Z" }, - { url = "https://files.pythonhosted.org/packages/01/8b/807ff0a13cf3790fe5de85e18e10c22b96d92107d2ce88699cefd3f890cb/pyzstd-0.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c20fe0a60019685fa1f7137cb284f09e3f64680a503d9c0d50be4dd0a3dc5ec", size = 443770, upload-time = "2025-05-10T14:13:20.495Z" }, - { url = "https://files.pythonhosted.org/packages/f0/88/832d8d8147691ee37736a89ea39eaf94ceac5f24a6ce2be316ff5276a1f8/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d97f7aaadc3b6e2f8e51bfa6aa203ead9c579db36d66602382534afaf296d0db", size = 391167, upload-time = "2025-05-10T14:13:22.236Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a5/2e09bee398dfb0d94ca43f3655552a8770a6269881dc4710b8f29c7f71aa/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42dcb34c5759b59721997036ff2d94210515d3ef47a9de84814f1c51a1e07e8a", size = 478960, upload-time = "2025-05-10T14:13:23.584Z" }, - { url = "https://files.pythonhosted.org/packages/da/b5/1f3b778ad1ccc395161fab7a3bf0dfbd85232234b6657c93213ed1ceda7e/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6bf05e18be6f6c003c7129e2878cffd76fcbebda4e7ebd7774e34ae140426cbf", size = 421891, upload-time = "2025-05-10T14:13:25.417Z" }, - { url = "https://files.pythonhosted.org/packages/83/c4/6bfb4725f4f38e9fe9735697060364fb36ee67546e7e8d78135044889619/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f7c3a5144aa4fbccf37c30411f6b1db4c0f2cb6ad4df470b37929bffe6ca0", size = 413608, upload-time = "2025-05-10T14:13:26.75Z" }, - { url = "https://files.pythonhosted.org/packages/95/a2/c48b543e3a482e758b648ea025b94efb1abe1f4859c5185ff02c29596035/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9efd4007f8369fd0890701a4fc77952a0a8c4cb3bd30f362a78a1adfb3c53c12", size = 416429, upload-time = "2025-05-10T14:13:28.096Z" }, - { url = "https://files.pythonhosted.org/packages/5c/62/2d039ee4dbc8116ca1f2a2729b88a1368f076f5dadad463f165993f7afa8/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5f8add139b5fd23b95daa844ca13118197f85bd35ce7507e92fcdce66286cc34", size = 446671, upload-time = "2025-05-10T14:13:29.772Z" }, - { url = "https://files.pythonhosted.org/packages/be/ec/9ec9f0957cf5b842c751103a2b75ecb0a73cf3d99fac57e0436aab6748e0/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:259a60e8ce9460367dcb4b34d8b66e44ca3d8c9c30d53ed59ae7037622b3bfc7", size = 520290, upload-time = "2025-05-10T14:13:31.585Z" }, - { url = "https://files.pythonhosted.org/packages/cc/42/2e2f4bb641c2a9ab693c31feebcffa1d7c24e946d8dde424bba371e4fcce/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:86011a93cc3455c5d2e35988feacffbf2fa106812a48e17eb32c2a52d25a95b3", size = 563785, upload-time = "2025-05-10T14:13:32.971Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/25e198d382faa4d322f617d7a5ff82af4dc65749a10d90f1423af2d194f6/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:425c31bc3de80313054e600398e4f1bd229ee61327896d5d015e2cd0283c9012", size = 433390, upload-time = "2025-05-10T14:13:34.668Z" }, - { url = "https://files.pythonhosted.org/packages/ad/7c/1ab970f5404ace9d343a36a86f1bd0fcf2dc1adf1ef8886394cf0a58bd9e/pyzstd-0.17.0-cp312-cp312-win32.whl", hash = "sha256:7c4b88183bb36eb2cebbc0352e6e9fe8e2d594f15859ae1ef13b63ebc58be158", size = 220291, upload-time = "2025-05-10T14:13:36.005Z" }, - { url = "https://files.pythonhosted.org/packages/b2/52/d35bf3e4f0676a74359fccef015eabe3ceaba95da4ac2212f8be4dde16de/pyzstd-0.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c31947e0120468342d74e0fa936d43f7e1dad66a2262f939735715aa6c730e8", size = 246451, upload-time = "2025-05-10T14:13:37.712Z" }, - { url = "https://files.pythonhosted.org/packages/34/da/a44705fe44dd87e0f09861b062f93ebb114365640dbdd62cbe80da9b8306/pyzstd-0.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:1d0346418abcef11507356a31bef5470520f6a5a786d4e2c69109408361b1020", size = 222967, upload-time = "2025-05-10T14:13:38.94Z" }, - { url = "https://files.pythonhosted.org/packages/b8/95/b1ae395968efdba92704c23f2f8e027d08e00d1407671e42f65ac914d211/pyzstd-0.17.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3ce6bac0c4c032c5200647992a8efcb9801c918633ebe11cceba946afea152d9", size = 368391, upload-time = "2025-05-10T14:14:33.064Z" }, - { url = "https://files.pythonhosted.org/packages/c7/72/856831cacef58492878b8307353e28a3ba4326a85c3c82e4803a95ad0d14/pyzstd-0.17.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:a00998144b35be7c485a383f739fe0843a784cd96c3f1f2f53f1a249545ce49a", size = 283561, upload-time = "2025-05-10T14:14:34.469Z" }, - { url = "https://files.pythonhosted.org/packages/a4/a7/a86e55cd9f3e630a71c0bf78ac6da0c6b50dc428ca81aa7c5adbc66eb880/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8521d7bbd00e0e1c1fd222c1369a7600fba94d24ba380618f9f75ee0c375c277", size = 356912, upload-time = "2025-05-10T14:14:35.722Z" }, - { url = "https://files.pythonhosted.org/packages/ad/b7/de2b42dd96dfdb1c0feb5f43d53db2d3a060607f878da7576f35dff68789/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da65158c877eac78dcc108861d607c02fb3703195c3a177f2687e0bcdfd519d0", size = 329417, upload-time = "2025-05-10T14:14:37.487Z" }, - { url = "https://files.pythonhosted.org/packages/52/65/d4e8196e068e6b430499fb2a5092380eb2cb7eecf459b9d4316cff7ecf6c/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:226ca0430e2357abae1ade802585231a2959b010ec9865600e416652121ba80b", size = 349448, upload-time = "2025-05-10T14:14:38.797Z" }, - { url = "https://files.pythonhosted.org/packages/9e/15/b5ed5ad8c8d2d80c5f5d51e6c61b2cc05f93aaf171164f67ccc7ade815cd/pyzstd-0.17.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e3a19e8521c145a0e2cd87ca464bf83604000c5454f7e0746092834fd7de84d1", size = 241668, upload-time = "2025-05-10T14:14:40.18Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, ] [[package]] @@ -5308,52 +5278,52 @@ hiredis = [ [[package]] name = "referencing" -version = "0.36.2" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "rpds-py" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, ] [[package]] name = "regex" -version = "2025.9.1" +version = "2025.9.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/5a/4c63457fbcaf19d138d72b2e9b39405954f98c0349b31c601bfcb151582c/regex-2025.9.1.tar.gz", hash = "sha256:88ac07b38d20b54d79e704e38aa3bd2c0f8027432164226bdee201a1c0c9c9ff", size = 400852, upload-time = "2025-09-01T22:10:10.479Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hash = "sha256:c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4", size = 400917, upload-time = "2025-09-19T00:38:35.79Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/4d/f741543c0c59f96c6625bc6c11fea1da2e378b7d293ffff6f318edc0ce14/regex-2025.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e5bcf112b09bfd3646e4db6bf2e598534a17d502b0c01ea6550ba4eca780c5e6", size = 484811, upload-time = "2025-09-01T22:08:12.834Z" }, - { url = "https://files.pythonhosted.org/packages/c2/bd/27e73e92635b6fbd51afc26a414a3133243c662949cd1cda677fe7bb09bd/regex-2025.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:67a0295a3c31d675a9ee0238d20238ff10a9a2fdb7a1323c798fc7029578b15c", size = 288977, upload-time = "2025-09-01T22:08:14.499Z" }, - { url = "https://files.pythonhosted.org/packages/eb/7d/7dc0c6efc8bc93cd6e9b947581f5fde8a5dbaa0af7c4ec818c5729fdc807/regex-2025.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea8267fbadc7d4bd7c1301a50e85c2ff0de293ff9452a1a9f8d82c6cafe38179", size = 286606, upload-time = "2025-09-01T22:08:15.881Z" }, - { url = "https://files.pythonhosted.org/packages/d1/01/9b5c6dd394f97c8f2c12f6e8f96879c9ac27292a718903faf2e27a0c09f6/regex-2025.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6aeff21de7214d15e928fb5ce757f9495214367ba62875100d4c18d293750cc1", size = 792436, upload-time = "2025-09-01T22:08:17.38Z" }, - { url = "https://files.pythonhosted.org/packages/fc/24/b7430cfc6ee34bbb3db6ff933beb5e7692e5cc81e8f6f4da63d353566fb0/regex-2025.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d89f1bbbbbc0885e1c230f7770d5e98f4f00b0ee85688c871d10df8b184a6323", size = 858705, upload-time = "2025-09-01T22:08:19.037Z" }, - { url = "https://files.pythonhosted.org/packages/d6/98/155f914b4ea6ae012663188545c4f5216c11926d09b817127639d618b003/regex-2025.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca3affe8ddea498ba9d294ab05f5f2d3b5ad5d515bc0d4a9016dd592a03afe52", size = 905881, upload-time = "2025-09-01T22:08:20.377Z" }, - { url = "https://files.pythonhosted.org/packages/8a/a7/a470e7bc8259c40429afb6d6a517b40c03f2f3e455c44a01abc483a1c512/regex-2025.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91892a7a9f0a980e4c2c85dd19bc14de2b219a3a8867c4b5664b9f972dcc0c78", size = 798968, upload-time = "2025-09-01T22:08:22.081Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/33f6fec4d41449fea5f62fdf5e46d668a1c046730a7f4ed9f478331a8e3a/regex-2025.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e1cb40406f4ae862710615f9f636c1e030fd6e6abe0e0f65f6a695a2721440c6", size = 781884, upload-time = "2025-09-01T22:08:23.832Z" }, - { url = "https://files.pythonhosted.org/packages/42/de/2b45f36ab20da14eedddf5009d370625bc5942d9953fa7e5037a32d66843/regex-2025.9.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94f6cff6f7e2149c7e6499a6ecd4695379eeda8ccbccb9726e8149f2fe382e92", size = 852935, upload-time = "2025-09-01T22:08:25.536Z" }, - { url = "https://files.pythonhosted.org/packages/1e/f9/878f4fc92c87e125e27aed0f8ee0d1eced9b541f404b048f66f79914475a/regex-2025.9.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6c0226fb322b82709e78c49cc33484206647f8a39954d7e9de1567f5399becd0", size = 844340, upload-time = "2025-09-01T22:08:27.141Z" }, - { url = "https://files.pythonhosted.org/packages/90/c2/5b6f2bce6ece5f8427c718c085eca0de4bbb4db59f54db77aa6557aef3e9/regex-2025.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a12f59c7c380b4fcf7516e9cbb126f95b7a9518902bcf4a852423ff1dcd03e6a", size = 787238, upload-time = "2025-09-01T22:08:28.75Z" }, - { url = "https://files.pythonhosted.org/packages/47/66/1ef1081c831c5b611f6f55f6302166cfa1bc9574017410ba5595353f846a/regex-2025.9.1-cp311-cp311-win32.whl", hash = "sha256:49865e78d147a7a4f143064488da5d549be6bfc3f2579e5044cac61f5c92edd4", size = 264118, upload-time = "2025-09-01T22:08:30.388Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e0/8adc550d7169df1d6b9be8ff6019cda5291054a0107760c2f30788b6195f/regex-2025.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:d34b901f6f2f02ef60f4ad3855d3a02378c65b094efc4b80388a3aeb700a5de7", size = 276151, upload-time = "2025-09-01T22:08:32.073Z" }, - { url = "https://files.pythonhosted.org/packages/cb/bd/46fef29341396d955066e55384fb93b0be7d64693842bf4a9a398db6e555/regex-2025.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:47d7c2dab7e0b95b95fd580087b6ae196039d62306a592fa4e162e49004b6299", size = 268460, upload-time = "2025-09-01T22:08:33.281Z" }, - { url = "https://files.pythonhosted.org/packages/39/ef/a0372febc5a1d44c1be75f35d7e5aff40c659ecde864d7fa10e138f75e74/regex-2025.9.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:84a25164bd8dcfa9f11c53f561ae9766e506e580b70279d05a7946510bdd6f6a", size = 486317, upload-time = "2025-09-01T22:08:34.529Z" }, - { url = "https://files.pythonhosted.org/packages/b5/25/d64543fb7eb41a1024786d518cc57faf1ce64aa6e9ddba097675a0c2f1d2/regex-2025.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:645e88a73861c64c1af558dd12294fb4e67b5c1eae0096a60d7d8a2143a611c7", size = 289698, upload-time = "2025-09-01T22:08:36.162Z" }, - { url = "https://files.pythonhosted.org/packages/d8/dc/fbf31fc60be317bd9f6f87daa40a8a9669b3b392aa8fe4313df0a39d0722/regex-2025.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10a450cba5cd5409526ee1d4449f42aad38dd83ac6948cbd6d7f71ca7018f7db", size = 287242, upload-time = "2025-09-01T22:08:37.794Z" }, - { url = "https://files.pythonhosted.org/packages/0f/74/f933a607a538f785da5021acf5323961b4620972e2c2f1f39b6af4b71db7/regex-2025.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9dc5991592933a4192c166eeb67b29d9234f9c86344481173d1bc52f73a7104", size = 797441, upload-time = "2025-09-01T22:08:39.108Z" }, - { url = "https://files.pythonhosted.org/packages/89/d0/71fc49b4f20e31e97f199348b8c4d6e613e7b6a54a90eb1b090c2b8496d7/regex-2025.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a32291add816961aab472f4fad344c92871a2ee33c6c219b6598e98c1f0108f2", size = 862654, upload-time = "2025-09-01T22:08:40.586Z" }, - { url = "https://files.pythonhosted.org/packages/59/05/984edce1411a5685ba9abbe10d42cdd9450aab4a022271f9585539788150/regex-2025.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:588c161a68a383478e27442a678e3b197b13c5ba51dbba40c1ccb8c4c7bee9e9", size = 910862, upload-time = "2025-09-01T22:08:42.416Z" }, - { url = "https://files.pythonhosted.org/packages/b2/02/5c891bb5fe0691cc1bad336e3a94b9097fbcf9707ec8ddc1dce9f0397289/regex-2025.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47829ffaf652f30d579534da9085fe30c171fa2a6744a93d52ef7195dc38218b", size = 801991, upload-time = "2025-09-01T22:08:44.072Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ae/fd10d6ad179910f7a1b3e0a7fde1ef8bb65e738e8ac4fd6ecff3f52252e4/regex-2025.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e978e5a35b293ea43f140c92a3269b6ab13fe0a2bf8a881f7ac740f5a6ade85", size = 786651, upload-time = "2025-09-01T22:08:46.079Z" }, - { url = "https://files.pythonhosted.org/packages/30/cf/9d686b07bbc5bf94c879cc168db92542d6bc9fb67088d03479fef09ba9d3/regex-2025.9.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf09903e72411f4bf3ac1eddd624ecfd423f14b2e4bf1c8b547b72f248b7bf7", size = 856556, upload-time = "2025-09-01T22:08:48.376Z" }, - { url = "https://files.pythonhosted.org/packages/91/9d/302f8a29bb8a49528abbab2d357a793e2a59b645c54deae0050f8474785b/regex-2025.9.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d016b0f77be63e49613c9e26aaf4a242f196cd3d7a4f15898f5f0ab55c9b24d2", size = 849001, upload-time = "2025-09-01T22:08:50.067Z" }, - { url = "https://files.pythonhosted.org/packages/93/fa/b4c6dbdedc85ef4caec54c817cd5f4418dbfa2453214119f2538082bf666/regex-2025.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:656563e620de6908cd1c9d4f7b9e0777e3341ca7db9d4383bcaa44709c90281e", size = 788138, upload-time = "2025-09-01T22:08:51.933Z" }, - { url = "https://files.pythonhosted.org/packages/4a/1b/91ee17a3cbf87f81e8c110399279d0e57f33405468f6e70809100f2ff7d8/regex-2025.9.1-cp312-cp312-win32.whl", hash = "sha256:df33f4ef07b68f7ab637b1dbd70accbf42ef0021c201660656601e8a9835de45", size = 264524, upload-time = "2025-09-01T22:08:53.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/28/6ba31cce05b0f1ec6b787921903f83bd0acf8efde55219435572af83c350/regex-2025.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:5aba22dfbc60cda7c0853516104724dc904caa2db55f2c3e6e984eb858d3edf3", size = 275489, upload-time = "2025-09-01T22:08:55.037Z" }, - { url = "https://files.pythonhosted.org/packages/bd/ed/ea49f324db00196e9ef7fe00dd13c6164d5173dd0f1bbe495e61bb1fb09d/regex-2025.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:ec1efb4c25e1849c2685fa95da44bfde1b28c62d356f9c8d861d4dad89ed56e9", size = 268589, upload-time = "2025-09-01T22:08:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a", size = 484832, upload-time = "2025-09-19T00:35:30.011Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8", size = 288994, upload-time = "2025-09-19T00:35:31.733Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414", size = 286619, upload-time = "2025-09-19T00:35:33.221Z" }, + { url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a", size = 792454, upload-time = "2025-09-19T00:35:35.361Z" }, + { url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4", size = 858723, upload-time = "2025-09-19T00:35:36.949Z" }, + { url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a", size = 905899, upload-time = "2025-09-19T00:35:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f", size = 798981, upload-time = "2025-09-19T00:35:40.416Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a", size = 781900, upload-time = "2025-09-19T00:35:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9", size = 852952, upload-time = "2025-09-19T00:35:43.751Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2", size = 844355, upload-time = "2025-09-19T00:35:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95", size = 787254, upload-time = "2025-09-19T00:35:46.904Z" }, + { url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl", hash = "sha256:895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07", size = 264129, upload-time = "2025-09-19T00:35:48.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl", hash = "sha256:7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9", size = 276160, upload-time = "2025-09-19T00:36:00.45Z" }, + { url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl", hash = "sha256:fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df", size = 268471, upload-time = "2025-09-19T00:36:02.149Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e", size = 486335, upload-time = "2025-09-19T00:36:03.661Z" }, + { url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a", size = 289720, upload-time = "2025-09-19T00:36:05.471Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab", size = 287257, upload-time = "2025-09-19T00:36:07.072Z" }, + { url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5", size = 797463, upload-time = "2025-09-19T00:36:08.399Z" }, + { url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742", size = 862670, upload-time = "2025-09-19T00:36:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425", size = 910881, upload-time = "2025-09-19T00:36:12.223Z" }, + { url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352", size = 802011, upload-time = "2025-09-19T00:36:13.901Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d", size = 786668, upload-time = "2025-09-19T00:36:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56", size = 856578, upload-time = "2025-09-19T00:36:16.845Z" }, + { url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e", size = 849017, upload-time = "2025-09-19T00:36:18.597Z" }, + { url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282", size = 788150, upload-time = "2025-09-19T00:36:20.464Z" }, + { url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl", hash = "sha256:e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459", size = 264536, upload-time = "2025-09-19T00:36:21.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl", hash = "sha256:3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77", size = 275501, upload-time = "2025-09-19T00:36:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl", hash = "sha256:032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5", size = 268601, upload-time = "2025-09-19T00:36:25.092Z" }, ] [[package]] @@ -5424,15 +5394,15 @@ wheels = [ [[package]] name = "rich" -version = "14.1.0" +version = "14.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, ] [[package]] @@ -5499,28 +5469,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.12" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" } +sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071, upload-time = "2025-10-07T18:21:55.763Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" }, - { url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" }, - { url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" }, - { url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" }, - { url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" }, - { url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" }, - { url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" }, - { url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" }, - { url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" }, - { url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" }, - { url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" }, - { url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" }, - { url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" }, - { url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" }, - { url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" }, - { url = "https://files.pythonhosted.org/packages/c8/fa/05b6428a008e60f79546c943e54068316f32ec8ab5c4f73e4563934fbdc7/ruff-0.12.12-py3-none-win32.whl", hash = "sha256:173be2bfc142af07a01e3a759aba6f7791aa47acf3604f610b1c36db888df7b1", size = 12052870, upload-time = "2025-09-04T16:50:09.121Z" }, - { url = "https://files.pythonhosted.org/packages/85/60/d1e335417804df452589271818749d061b22772b87efda88354cf35cdb7a/ruff-0.12.12-py3-none-win_amd64.whl", hash = "sha256:e99620bf01884e5f38611934c09dd194eb665b0109104acae3ba6102b600fd0d", size = 13178016, upload-time = "2025-09-04T16:50:12.559Z" }, - { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" }, + { url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532, upload-time = "2025-10-07T18:21:00.373Z" }, + { url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768, upload-time = "2025-10-07T18:21:04.73Z" }, + { url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376, upload-time = "2025-10-07T18:21:07.833Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055, upload-time = "2025-10-07T18:21:10.72Z" }, + { url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544, upload-time = "2025-10-07T18:21:13.741Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280, upload-time = "2025-10-07T18:21:16.411Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286, upload-time = "2025-10-07T18:21:19.577Z" }, + { url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506, upload-time = "2025-10-07T18:21:22.779Z" }, + { url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384, upload-time = "2025-10-07T18:21:25.758Z" }, + { url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976, upload-time = "2025-10-07T18:21:28.83Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850, upload-time = "2025-10-07T18:21:31.842Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825, upload-time = "2025-10-07T18:21:35.074Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599, upload-time = "2025-10-07T18:21:38.08Z" }, + { url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828, upload-time = "2025-10-07T18:21:41.216Z" }, + { url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617, upload-time = "2025-10-07T18:21:44.04Z" }, + { url = "https://files.pythonhosted.org/packages/73/e6/03b882225a1b0627e75339b420883dc3c90707a8917d2284abef7a58d317/ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543", size = 12367872, upload-time = "2025-10-07T18:21:46.67Z" }, + { url = "https://files.pythonhosted.org/packages/41/77/56cf9cf01ea0bfcc662de72540812e5ba8e9563f33ef3d37ab2174892c47/ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2", size = 13464628, upload-time = "2025-10-07T18:21:50.318Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2a/65880dfd0e13f7f13a775998f34703674a4554906167dce02daf7865b954/ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730", size = 12565142, upload-time = "2025-10-07T18:21:53.577Z" }, ] [[package]] @@ -5559,28 +5529,28 @@ wheels = [ [[package]] name = "scipy-stubs" -version = "1.16.2.0" +version = "1.16.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "optype", extra = ["numpy"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/84/b4c2caf7748f331870992e7ede5b5df0b080671bcef8c8c7e27a3cf8694a/scipy_stubs-1.16.2.0.tar.gz", hash = "sha256:8fdd45155fca401bb755b1b63ac2f192f84f25c3be8da2c99d1cafb2708f3052", size = 352676, upload-time = "2025-09-11T23:28:59.236Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/47/b165711b36a1afb8d5f408393487586e07f8bdb86f829b5b904c1ddd091f/scipy_stubs-1.16.2.3.tar.gz", hash = "sha256:b1afd21442699b8bdd399508187bddcedc6c29a34b188fd603396cb6754c2a91", size = 355436, upload-time = "2025-10-08T01:38:37.403Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/c8/67d984c264f759e7653c130a4b12ae3b4f4304867579560e9a869adb7883/scipy_stubs-1.16.2.0-py3-none-any.whl", hash = "sha256:18c50d49e3c932033fdd4f7fa4fea9e45c8787f92bceaec9e86ccbd140e835d5", size = 553247, upload-time = "2025-09-11T23:28:57.688Z" }, + { url = "https://files.pythonhosted.org/packages/29/9f/3d8f613d0c3be9348cb0c351328249b7a2428f13329447ec6f395628d7b0/scipy_stubs-1.16.2.3-py3-none-any.whl", hash = "sha256:05e93238bdaedb7fa1afedf9c3a2337f94fec3d8c33fb2d403c933e1bcc7412e", size = 556848, upload-time = "2025-10-08T01:38:35.697Z" }, ] [[package]] name = "sendgrid" -version = "6.12.4" +version = "6.12.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "ecdsa" }, + { name = "cryptography" }, { name = "python-http-client" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/31/62e00433878dccf33edf07f8efa417b9030a2464eb3b04bbd797a11b4447/sendgrid-6.12.4.tar.gz", hash = "sha256:9e88b849daf0fa4bdf256c3b5da9f5a3272402c0c2fd6b1928c9de440db0a03d", size = 50271, upload-time = "2025-06-12T10:29:37.213Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/fa/f718b2b953f99c1f0085811598ac7e31ccbd4229a81ec2a5290be868187a/sendgrid-6.12.5.tar.gz", hash = "sha256:ea9aae30cd55c332e266bccd11185159482edfc07c149b6cd15cf08869fabdb7", size = 50310, upload-time = "2025-09-19T06:23:09.229Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/9c/45d068fd831a65e6ed1e2ab3233de58784842afdc62fdcdd0a01bbb6b39d/sendgrid-6.12.4-py3-none-any.whl", hash = "sha256:9a211b96241e63bd5b9ed9afcc8608f4bcac426e4a319b3920ab877c8426e92c", size = 102122, upload-time = "2025-06-12T10:29:35.457Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/b3c3880a77082e8f7374954e0074aafafaa9bc78bdf9c8f5a92c2e7afc6a/sendgrid-6.12.5-py3-none-any.whl", hash = "sha256:96f92cc91634bf552fdb766b904bbb53968018da7ae41fdac4d1090dc0311ca8", size = 102173, upload-time = "2025-09-19T06:23:07.93Z" }, ] [[package]] @@ -5614,29 +5584,29 @@ wheels = [ [[package]] name = "shapely" -version = "2.1.1" +version = "2.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/3c/2da625233f4e605155926566c0e7ea8dda361877f48e8b1655e53456f252/shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772", size = 315422, upload-time = "2025-05-19T11:04:41.265Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/97/2df985b1e03f90c503796ad5ecd3d9ed305123b64d4ccb54616b30295b29/shapely-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587a1aa72bc858fab9b8c20427b5f6027b7cbc92743b8e2c73b9de55aa71c7a7", size = 1819368, upload-time = "2025-05-19T11:03:55.937Z" }, - { url = "https://files.pythonhosted.org/packages/56/17/504518860370f0a28908b18864f43d72f03581e2b6680540ca668f07aa42/shapely-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fa5c53b0791a4b998f9ad84aad456c988600757a96b0a05e14bba10cebaaaea", size = 1625362, upload-time = "2025-05-19T11:03:57.06Z" }, - { url = "https://files.pythonhosted.org/packages/36/a1/9677337d729b79fce1ef3296aac6b8ef4743419086f669e8a8070eff8f40/shapely-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aabecd038841ab5310d23495253f01c2a82a3aedae5ab9ca489be214aa458aa7", size = 2999005, upload-time = "2025-05-19T11:03:58.692Z" }, - { url = "https://files.pythonhosted.org/packages/a2/17/e09357274699c6e012bbb5a8ea14765a4d5860bb658df1931c9f90d53bd3/shapely-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586f6aee1edec04e16227517a866df3e9a2e43c1f635efc32978bb3dc9c63753", size = 3108489, upload-time = "2025-05-19T11:04:00.059Z" }, - { url = "https://files.pythonhosted.org/packages/17/5d/93a6c37c4b4e9955ad40834f42b17260ca74ecf36df2e81bb14d12221b90/shapely-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b9878b9e37ad26c72aada8de0c9cfe418d9e2ff36992a1693b7f65a075b28647", size = 3945727, upload-time = "2025-05-19T11:04:01.786Z" }, - { url = "https://files.pythonhosted.org/packages/a3/1a/ad696648f16fd82dd6bfcca0b3b8fbafa7aacc13431c7fc4c9b49e481681/shapely-2.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9a531c48f289ba355e37b134e98e28c557ff13965d4653a5228d0f42a09aed0", size = 4109311, upload-time = "2025-05-19T11:04:03.134Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/150dd245beab179ec0d4472bf6799bf18f21b1efbef59ac87de3377dbf1c/shapely-2.1.1-cp311-cp311-win32.whl", hash = "sha256:4866de2673a971820c75c0167b1f1cd8fb76f2d641101c23d3ca021ad0449bab", size = 1522982, upload-time = "2025-05-19T11:04:05.217Z" }, - { url = "https://files.pythonhosted.org/packages/93/5b/842022c00fbb051083c1c85430f3bb55565b7fd2d775f4f398c0ba8052ce/shapely-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:20a9d79958b3d6c70d8a886b250047ea32ff40489d7abb47d01498c704557a93", size = 1703872, upload-time = "2025-05-19T11:04:06.791Z" }, - { url = "https://files.pythonhosted.org/packages/fb/64/9544dc07dfe80a2d489060791300827c941c451e2910f7364b19607ea352/shapely-2.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2827365b58bf98efb60affc94a8e01c56dd1995a80aabe4b701465d86dcbba43", size = 1833021, upload-time = "2025-05-19T11:04:08.022Z" }, - { url = "https://files.pythonhosted.org/packages/07/aa/fb5f545e72e89b6a0f04a0effda144f5be956c9c312c7d4e00dfddbddbcf/shapely-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c551f7fa7f1e917af2347fe983f21f212863f1d04f08eece01e9c275903fad", size = 1643018, upload-time = "2025-05-19T11:04:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/03/46/61e03edba81de729f09d880ce7ae5c1af873a0814206bbfb4402ab5c3388/shapely-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78dec4d4fbe7b1db8dc36de3031767e7ece5911fb7782bc9e95c5cdec58fb1e9", size = 2986417, upload-time = "2025-05-19T11:04:10.56Z" }, - { url = "https://files.pythonhosted.org/packages/1f/1e/83ec268ab8254a446b4178b45616ab5822d7b9d2b7eb6e27cf0b82f45601/shapely-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:872d3c0a7b8b37da0e23d80496ec5973c4692920b90de9f502b5beb994bbaaef", size = 3098224, upload-time = "2025-05-19T11:04:11.903Z" }, - { url = "https://files.pythonhosted.org/packages/f1/44/0c21e7717c243e067c9ef8fa9126de24239f8345a5bba9280f7bb9935959/shapely-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e2b9125ebfbc28ecf5353511de62f75a8515ae9470521c9a693e4bb9fbe0cf1", size = 3925982, upload-time = "2025-05-19T11:04:13.224Z" }, - { url = "https://files.pythonhosted.org/packages/15/50/d3b4e15fefc103a0eb13d83bad5f65cd6e07a5d8b2ae920e767932a247d1/shapely-2.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4b96cea171b3d7f6786976a0520f178c42792897653ecca0c5422fb1e6946e6d", size = 4089122, upload-time = "2025-05-19T11:04:14.477Z" }, - { url = "https://files.pythonhosted.org/packages/bd/05/9a68f27fc6110baeedeeebc14fd86e73fa38738c5b741302408fb6355577/shapely-2.1.1-cp312-cp312-win32.whl", hash = "sha256:39dca52201e02996df02e447f729da97cfb6ff41a03cb50f5547f19d02905af8", size = 1522437, upload-time = "2025-05-19T11:04:16.203Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e9/a4560e12b9338842a1f82c9016d2543eaa084fce30a1ca11991143086b57/shapely-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:13d643256f81d55a50013eff6321142781cf777eb6a9e207c2c9e6315ba6044a", size = 1703479, upload-time = "2025-05-19T11:04:18.497Z" }, + { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" }, + { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" }, + { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" }, + { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" }, + { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" }, + { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" }, + { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" }, + { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" }, + { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" }, ] [[package]] @@ -5704,31 +5674,31 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.43" +version = "2.0.44" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/bc/d59b5d97d27229b0e009bd9098cd81af71c2fa5549c580a0a67b9bed0496/sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417", size = 9762949, upload-time = "2025-08-11T14:24:58.438Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/77/fa7189fe44114658002566c6fe443d3ed0ec1fa782feb72af6ef7fbe98e7/sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29", size = 2136472, upload-time = "2025-08-11T15:52:21.789Z" }, - { url = "https://files.pythonhosted.org/packages/99/ea/92ac27f2fbc2e6c1766bb807084ca455265707e041ba027c09c17d697867/sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631", size = 2126535, upload-time = "2025-08-11T15:52:23.109Z" }, - { url = "https://files.pythonhosted.org/packages/94/12/536ede80163e295dc57fff69724caf68f91bb40578b6ac6583a293534849/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685", size = 3297521, upload-time = "2025-08-11T15:50:33.536Z" }, - { url = "https://files.pythonhosted.org/packages/03/b5/cacf432e6f1fc9d156eca0560ac61d4355d2181e751ba8c0cd9cb232c8c1/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca", size = 3297343, upload-time = "2025-08-11T15:57:51.186Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ba/d4c9b526f18457667de4c024ffbc3a0920c34237b9e9dd298e44c7c00ee5/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d", size = 3232113, upload-time = "2025-08-11T15:50:34.949Z" }, - { url = "https://files.pythonhosted.org/packages/aa/79/c0121b12b1b114e2c8a10ea297a8a6d5367bc59081b2be896815154b1163/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3", size = 3258240, upload-time = "2025-08-11T15:57:52.983Z" }, - { url = "https://files.pythonhosted.org/packages/79/99/a2f9be96fb382f3ba027ad42f00dbe30fdb6ba28cda5f11412eee346bec5/sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921", size = 2101248, upload-time = "2025-08-11T15:55:01.855Z" }, - { url = "https://files.pythonhosted.org/packages/ee/13/744a32ebe3b4a7a9c7ea4e57babae7aa22070d47acf330d8e5a1359607f1/sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8", size = 2126109, upload-time = "2025-08-11T15:55:04.092Z" }, - { url = "https://files.pythonhosted.org/packages/61/db/20c78f1081446095450bdc6ee6cc10045fce67a8e003a5876b6eaafc5cc4/sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24", size = 2134891, upload-time = "2025-08-11T15:51:13.019Z" }, - { url = "https://files.pythonhosted.org/packages/45/0a/3d89034ae62b200b4396f0f95319f7d86e9945ee64d2343dcad857150fa2/sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83", size = 2123061, upload-time = "2025-08-11T15:51:14.319Z" }, - { url = "https://files.pythonhosted.org/packages/cb/10/2711f7ff1805919221ad5bee205971254845c069ee2e7036847103ca1e4c/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9", size = 3320384, upload-time = "2025-08-11T15:52:35.088Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0e/3d155e264d2ed2778484006ef04647bc63f55b3e2d12e6a4f787747b5900/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48", size = 3329648, upload-time = "2025-08-11T15:56:34.153Z" }, - { url = "https://files.pythonhosted.org/packages/5b/81/635100fb19725c931622c673900da5efb1595c96ff5b441e07e3dd61f2be/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687", size = 3258030, upload-time = "2025-08-11T15:52:36.933Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ed/a99302716d62b4965fded12520c1cbb189f99b17a6d8cf77611d21442e47/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe", size = 3294469, upload-time = "2025-08-11T15:56:35.553Z" }, - { url = "https://files.pythonhosted.org/packages/5d/a2/3a11b06715149bf3310b55a98b5c1e84a42cfb949a7b800bc75cb4e33abc/sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d", size = 2098906, upload-time = "2025-08-11T15:55:00.645Z" }, - { url = "https://files.pythonhosted.org/packages/bc/09/405c915a974814b90aa591280623adc6ad6b322f61fd5cff80aeaef216c9/sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a", size = 2126260, upload-time = "2025-08-11T15:55:02.965Z" }, - { url = "https://files.pythonhosted.org/packages/b8/d9/13bdde6521f322861fab67473cec4b1cc8999f3871953531cf61945fad92/sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc", size = 1924759, upload-time = "2025-08-11T15:39:53.024Z" }, + { url = "https://files.pythonhosted.org/packages/e3/81/15d7c161c9ddf0900b076b55345872ed04ff1ed6a0666e5e94ab44b0163c/sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd", size = 2140517, upload-time = "2025-10-10T15:36:15.64Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d5/4abd13b245c7d91bdf131d4916fd9e96a584dac74215f8b5bc945206a974/sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa", size = 2130738, upload-time = "2025-10-10T15:36:16.91Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3c/8418969879c26522019c1025171cefbb2a8586b6789ea13254ac602986c0/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e", size = 3304145, upload-time = "2025-10-10T15:34:19.569Z" }, + { url = "https://files.pythonhosted.org/packages/94/2d/fdb9246d9d32518bda5d90f4b65030b9bf403a935cfe4c36a474846517cb/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e", size = 3304511, upload-time = "2025-10-10T15:47:05.088Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fb/40f2ad1da97d5c83f6c1269664678293d3fe28e90ad17a1093b735420549/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399", size = 3235161, upload-time = "2025-10-10T15:34:21.193Z" }, + { url = "https://files.pythonhosted.org/packages/95/cb/7cf4078b46752dca917d18cf31910d4eff6076e5b513c2d66100c4293d83/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b", size = 3261426, upload-time = "2025-10-10T15:47:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/f8/3b/55c09b285cb2d55bdfa711e778bdffdd0dc3ffa052b0af41f1c5d6e582fa/sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3", size = 2105392, upload-time = "2025-10-10T15:38:20.051Z" }, + { url = "https://files.pythonhosted.org/packages/c7/23/907193c2f4d680aedbfbdf7bf24c13925e3c7c292e813326c1b84a0b878e/sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5", size = 2130293, upload-time = "2025-10-10T15:38:21.601Z" }, + { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675, upload-time = "2025-10-10T16:03:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726, upload-time = "2025-10-10T16:03:35.934Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603, upload-time = "2025-10-10T15:35:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842, upload-time = "2025-10-10T15:43:45.431Z" }, + { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558, upload-time = "2025-10-10T15:35:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570, upload-time = "2025-10-10T15:43:48.407Z" }, + { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447, upload-time = "2025-10-10T15:03:21.678Z" }, + { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912, upload-time = "2025-10-10T15:03:24.656Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, ] [[package]] @@ -6007,31 +5977,27 @@ wheels = [ [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, ] [[package]] @@ -6061,7 +6027,7 @@ wheels = [ [[package]] name = "transformers" -version = "4.56.1" +version = "4.56.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -6075,39 +6041,39 @@ dependencies = [ { name = "tokenizers" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/89/21/dc88ef3da1e49af07ed69386a11047a31dcf1aaf4ded3bc4b173fbf94116/transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74", size = 9855473, upload-time = "2025-09-04T20:47:13.14Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/82/0bcfddd134cdf53440becb5e738257cc3cf34cf229d63b57bfd288e6579f/transformers-4.56.2.tar.gz", hash = "sha256:5e7c623e2d7494105c726dd10f6f90c2c99a55ebe86eef7233765abd0cb1c529", size = 9844296, upload-time = "2025-09-19T15:16:26.778Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/7c/283c3dd35e00e22a7803a0b2a65251347b745474a82399be058bde1c9f15/transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248", size = 11608197, upload-time = "2025-09-04T20:47:04.895Z" }, + { url = "https://files.pythonhosted.org/packages/70/26/2591b48412bde75e33bfd292034103ffe41743cacd03120e3242516cd143/transformers-4.56.2-py3-none-any.whl", hash = "sha256:79c03d0e85b26cb573c109ff9eafa96f3c8d4febfd8a0774e8bba32702dd6dde", size = 11608055, upload-time = "2025-09-19T15:16:23.736Z" }, ] [[package]] name = "ty" -version = "0.0.1a20" +version = "0.0.1a22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7a/82/a5e3b4bc5280ec49c4b0b43d0ff727d58c7df128752c9c6f97ad0b5f575f/ty-0.0.1a20.tar.gz", hash = "sha256:933b65a152f277aa0e23ba9027e5df2c2cc09e18293e87f2a918658634db5f15", size = 4194773, upload-time = "2025-09-03T12:35:46.775Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/87/eab73cdc990d1141b60237379975efc0e913bfa0d19083daab0f497444a6/ty-0.0.1a22.tar.gz", hash = "sha256:b20ec5362830a1e9e05654c15e88607fdbb45325ec130a9a364c6dd412ecbf55", size = 4312182, upload-time = "2025-10-10T13:07:15.88Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/c8/f7d39392043d5c04936f6cad90e50eb661965ed092ca4bfc01db917d7b8a/ty-0.0.1a20-py3-none-linux_armv6l.whl", hash = "sha256:f73a7aca1f0d38af4d6999b375eb00553f3bfcba102ae976756cc142e14f3450", size = 8443599, upload-time = "2025-09-03T12:35:04.289Z" }, - { url = "https://files.pythonhosted.org/packages/1e/57/5aec78f9b8a677b7439ccded7d66c3361e61247e0f6b14e659b00dd01008/ty-0.0.1a20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cad12c857ea4b97bf61e02f6796e13061ccca5e41f054cbd657862d80aa43bae", size = 8618102, upload-time = "2025-09-03T12:35:07.448Z" }, - { url = "https://files.pythonhosted.org/packages/15/20/50c9107d93cdb55676473d9dc4e2339af6af606660c9428d3b86a1b2a476/ty-0.0.1a20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f153b65c7fcb6b8b59547ddb6353761b3e8d8bb6f0edd15e3e3ac14405949f7a", size = 8192167, upload-time = "2025-09-03T12:35:09.706Z" }, - { url = "https://files.pythonhosted.org/packages/85/28/018b2f330109cee19e81c5ca9df3dc29f06c5778440eb9af05d4550c4302/ty-0.0.1a20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c4336987a6a781d4392a9fd7b3a39edb7e4f3dd4f860e03f46c932b52aefa2", size = 8349256, upload-time = "2025-09-03T12:35:11.76Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c9/2f8797a05587158f52b142278796ffd72c893bc5ad41840fce5aeb65c6f2/ty-0.0.1a20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ff75cd4c744d09914e8c9db8d99e02f82c9379ad56b0a3fc4c5c9c923cfa84e", size = 8271214, upload-time = "2025-09-03T12:35:13.741Z" }, - { url = "https://files.pythonhosted.org/packages/30/d4/2cac5e5eb9ee51941358cb3139aadadb59520cfaec94e4fcd2b166969748/ty-0.0.1a20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e26437772be7f7808868701f2bf9e14e706a6ec4c7d02dbd377ff94d7ba60c11", size = 9264939, upload-time = "2025-09-03T12:35:16.896Z" }, - { url = "https://files.pythonhosted.org/packages/93/96/a6f2b54e484b2c6a5488f217882237dbdf10f0fdbdb6cd31333d57afe494/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83a7ee12465841619b5eb3ca962ffc7d576bb1c1ac812638681aee241acbfbbe", size = 9743137, upload-time = "2025-09-03T12:35:19.799Z" }, - { url = "https://files.pythonhosted.org/packages/6e/67/95b40dcbec3d222f3af5fe5dd1ce066d42f8a25a2f70d5724490457048e7/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:726d0738be4459ac7ffae312ba96c5f486d6cbc082723f322555d7cba9397871", size = 9368153, upload-time = "2025-09-03T12:35:22.569Z" }, - { url = "https://files.pythonhosted.org/packages/2c/24/689fa4c4270b9ef9a53dc2b1d6ffade259ba2c4127e451f0629e130ea46a/ty-0.0.1a20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b481f26513f38543df514189fb16744690bcba8d23afee95a01927d93b46e36", size = 9099637, upload-time = "2025-09-03T12:35:24.94Z" }, - { url = "https://files.pythonhosted.org/packages/a1/5b/913011cbf3ea4030097fb3c4ce751856114c9e1a5e1075561a4c5242af9b/ty-0.0.1a20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7abbe3c02218c12228b1d7c5f98c57240029cc3bcb15b6997b707c19be3908c1", size = 8952000, upload-time = "2025-09-03T12:35:27.288Z" }, - { url = "https://files.pythonhosted.org/packages/df/f9/f5ba2ae455b20c5bb003f9940ef8142a8c4ed9e27de16e8f7472013609db/ty-0.0.1a20-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fff51c75ee3f7cc6d7722f2f15789ef8ffe6fd2af70e7269ac785763c906688e", size = 8217938, upload-time = "2025-09-03T12:35:29.54Z" }, - { url = "https://files.pythonhosted.org/packages/eb/62/17002cf9032f0981cdb8c898d02422c095c30eefd69ca62a8b705d15bd0f/ty-0.0.1a20-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b4124ab75e0e6f09fe7bc9df4a77ee43c5e0ef7e61b0c149d7c089d971437cbd", size = 8292369, upload-time = "2025-09-03T12:35:31.748Z" }, - { url = "https://files.pythonhosted.org/packages/28/d6/0879b1fb66afe1d01d45c7658f3849aa641ac4ea10679404094f3b40053e/ty-0.0.1a20-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8a138fa4f74e6ed34e9fd14652d132409700c7ff57682c2fed656109ebfba42f", size = 8811973, upload-time = "2025-09-03T12:35:33.997Z" }, - { url = "https://files.pythonhosted.org/packages/60/1e/70bf0348cfe8ba5f7532983f53c508c293ddf5fa9f942ed79a3c4d576df3/ty-0.0.1a20-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8eff8871d6b88d150e2a67beba2c57048f20c090c219f38ed02eebaada04c124", size = 9010990, upload-time = "2025-09-03T12:35:36.766Z" }, - { url = "https://files.pythonhosted.org/packages/b7/ca/03d85c7650359247b1ca3f38a0d869a608ef540450151920e7014ed58292/ty-0.0.1a20-py3-none-win32.whl", hash = "sha256:3c2ace3a22fab4bd79f84c74e3dab26e798bfba7006bea4008d6321c1bd6efc6", size = 8100746, upload-time = "2025-09-03T12:35:40.007Z" }, - { url = "https://files.pythonhosted.org/packages/94/53/7a1937b8c7a66d0c8ed7493de49ed454a850396fe137d2ae12ed247e0b2f/ty-0.0.1a20-py3-none-win_amd64.whl", hash = "sha256:f41e77ff118da3385915e13c3f366b3a2f823461de54abd2e0ca72b170ba0f19", size = 8748861, upload-time = "2025-09-03T12:35:42.175Z" }, - { url = "https://files.pythonhosted.org/packages/27/36/5a3a70c5d497d3332f9e63cabc9c6f13484783b832fecc393f4f1c0c4aa8/ty-0.0.1a20-py3-none-win_arm64.whl", hash = "sha256:d8ac1c5a14cda5fad1a8b53959d9a5d979fe16ce1cc2785ea8676fed143ac85f", size = 8269906, upload-time = "2025-09-03T12:35:45.045Z" }, + { url = "https://files.pythonhosted.org/packages/4d/30/83e2dbfbc70de8a1932b19daf05ce803d7d76cdc6251de1519a49cf1c27d/ty-0.0.1a22-py3-none-linux_armv6l.whl", hash = "sha256:6efba0c777881d2d072fa7375a64ad20357e825eff2a0b6ff9ec80399a04253b", size = 8581795, upload-time = "2025-10-10T13:06:44.396Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8c/5193534fc4a3569f517408828d077b26d6280fe8c2dd0bdc63db4403dcdb/ty-0.0.1a22-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2ada020eebe1b44403affdf45cd5c8d3fb8312c3e80469d795690093c0921f55", size = 8682602, upload-time = "2025-10-10T13:06:46.44Z" }, + { url = "https://files.pythonhosted.org/packages/22/4a/7ba53493bf37b61d3e0dfe6df910e6bc74c40d16c3effd84e15c0863d34e/ty-0.0.1a22-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ed4f11f1a5824ea10d3e46b1990d092c3f341b1d492c357d23bed2ac347fd253", size = 8278839, upload-time = "2025-10-10T13:06:48.688Z" }, + { url = "https://files.pythonhosted.org/packages/52/0a/d9862c41b9615de56d2158bfbb5177dbf5a65e94922d3dd13855f48cb91b/ty-0.0.1a22-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56f48d8f94292909d596dbeb56ff7f9f070bd316aa628b45c02ca2b2f5797f31", size = 8421483, upload-time = "2025-10-10T13:06:50.75Z" }, + { url = "https://files.pythonhosted.org/packages/a5/cb/3ebe0e45b80724d4c2f849fdf304179727fd06df7fee7cd12fe6c3efe49d/ty-0.0.1a22-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:733e9ac22885b6574de26bdbae439c960a06acc825a938d3780c9d498bb65339", size = 8419225, upload-time = "2025-10-10T13:06:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b5/da65f3f8ad31d881ca9987a3f6f26069a0cc649c9354adb7453ca62116bb/ty-0.0.1a22-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5135d662484e56809c77b3343614005585caadaa5c1cf643ed6a09303497652b", size = 9352336, upload-time = "2025-10-10T13:06:54.476Z" }, + { url = "https://files.pythonhosted.org/packages/a3/24/9c46f2eb16734ab0fcf3291486b1c5c528a1569f94541dc1f19f97dd2a5b/ty-0.0.1a22-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:87f297f99a98154d33a3f21991979418c65d8bf480f6a1bad1e54d46d2dc7df7", size = 9857840, upload-time = "2025-10-10T13:06:56.514Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ae/930c94bbbe5c049eae5355a197c39522844f55c7ab7fccd0ba061f618541/ty-0.0.1a22-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3310217eaa4dccf20b7336fcbeb072097addc6fde0c9d3f791dea437af0aa6dc", size = 9452611, upload-time = "2025-10-10T13:06:58.154Z" }, + { url = "https://files.pythonhosted.org/packages/a2/80/d8f594438465c352cf0ebd4072f5ca3be2871153a3cd279ed2f35ecd487c/ty-0.0.1a22-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b032e81012bf5228fd65f01b50e29eb409534b6aac28ee5c48ee3b7b860ddf", size = 9214875, upload-time = "2025-10-10T13:06:59.861Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/f852fb20ac27707de495c39a02aeb056e3368833b7e12888d43b1f61594d/ty-0.0.1a22-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3ffda8149cab0000a21e7a078142073e27a1a9ac03b9a0837aa2f53d1fbebcb", size = 8906715, upload-time = "2025-10-10T13:07:01.926Z" }, + { url = "https://files.pythonhosted.org/packages/40/4d/0e0b85b4179891cc3067a6e717f5161921c07873a4f545963fdf1dd3619c/ty-0.0.1a22-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:afa512e7dc78f0cf0b55f87394968ba59c46993c67bc0ef295962144fea85b12", size = 8350873, upload-time = "2025-10-10T13:07:03.999Z" }, + { url = "https://files.pythonhosted.org/packages/a1/1f/e70c63e12b4a0d97d4fd6f872dd199113666ad1b236e18838fa5e5d5502d/ty-0.0.1a22-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:069cdbbea6025f7ebbb5e9043c8d0daf760358df46df8304ef5ca5bb3e320aef", size = 8442568, upload-time = "2025-10-10T13:07:05.745Z" }, + { url = "https://files.pythonhosted.org/packages/de/3b/55518906cb3598f2b99ff1e86c838d77d006cab70cdd2a0a625d02ccb52c/ty-0.0.1a22-py3-none-musllinux_1_2_i686.whl", hash = "sha256:67d31d902e6fd67a4b3523604f635e71d2ec55acfb9118f984600584bfe0ff2a", size = 8896775, upload-time = "2025-10-10T13:07:08.02Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ea/60c654c27931bf84fa9cb463a4c4c49e8869c052fa607a6e930be717b619/ty-0.0.1a22-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f9e154f262162e6f76b01f318e469ac6c22ffce22b010c396ed34e81d8369821", size = 9054544, upload-time = "2025-10-10T13:07:09.675Z" }, + { url = "https://files.pythonhosted.org/packages/6c/60/9a6d5530d6829ccf656e6ae0fb13d70a4e2514f4fb8910266ebd54286620/ty-0.0.1a22-py3-none-win32.whl", hash = "sha256:37525433ca7b02a8fca4b8fa9dcde818bf3a413b539b9dbc8f7b39d124eb7c49", size = 8165703, upload-time = "2025-10-10T13:07:11.378Z" }, + { url = "https://files.pythonhosted.org/packages/14/9c/ac08c832643850d4e18cbc959abc69cd51d531fe11bdb691098b3cf2f562/ty-0.0.1a22-py3-none-win_amd64.whl", hash = "sha256:75d21cdeba8bcef247af89518d7ce98079cac4a55c4160cb76682ea40a18b92c", size = 8828319, upload-time = "2025-10-10T13:07:12.815Z" }, + { url = "https://files.pythonhosted.org/packages/22/df/38068fc44e3cfb455aeb41d0ff1850a4d3c9988010466d4a8d19860b8b9a/ty-0.0.1a22-py3-none-win_arm64.whl", hash = "sha256:1c7f040fe311e9696917417434c2a0e58402235be842c508002c6a2eff1398b0", size = 8367136, upload-time = "2025-10-10T13:07:14.518Z" }, ] [[package]] name = "typer" -version = "0.17.4" +version = "0.19.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -6115,9 +6081,9 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/92/e8/2a73ccf9874ec4c7638f172efc8972ceab13a0e3480b389d6ed822f7a822/typer-0.17.4.tar.gz", hash = "sha256:b77dc07d849312fd2bb5e7f20a7af8985c7ec360c45b051ed5412f64d8dc1580", size = 103734, upload-time = "2025-09-05T18:14:40.746Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755, upload-time = "2025-09-23T09:47:48.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/72/6b3e70d32e89a5cbb6a4513726c1ae8762165b027af569289e19ec08edd8/typer-0.17.4-py3-none-any.whl", hash = "sha256:015534a6edaa450e7007eba705d5c18c3349dcea50a6ad79a5ed530967575824", size = 46643, upload-time = "2025-09-05T18:14:39.166Z" }, + { url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748, upload-time = "2025-09-23T09:47:46.777Z" }, ] [[package]] @@ -6131,11 +6097,11 @@ wheels = [ [[package]] name = "types-awscrt" -version = "0.27.6" +version = "0.28.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/ce/5d84526a39f44c420ce61b16654193f8437d74b54f21597ea2ac65d89954/types_awscrt-0.27.6.tar.gz", hash = "sha256:9d3f1865a93b8b2c32f137514ac88cb048b5bc438739945ba19d972698995bfb", size = 16937, upload-time = "2025-08-13T01:54:54.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/19/a3a6377c9e2e389c1421c033a1830c29cac08f2e1e05a082ea84eb22c75f/types_awscrt-0.28.1.tar.gz", hash = "sha256:66d77ec283e1dc907526a44511a12624118723a396c36d3f3dd9855cb614ce14", size = 17410, upload-time = "2025-10-11T21:55:07.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/af/e3d20e3e81d235b3964846adf46a334645a8a9b25a0d3d472743eb079552/types_awscrt-0.27.6-py3-none-any.whl", hash = "sha256:18aced46da00a57f02eb97637a32e5894dc5aa3dc6a905ba3e5ed85b9f3c526b", size = 39626, upload-time = "2025-08-13T01:54:53.454Z" }, + { url = "https://files.pythonhosted.org/packages/ea/c7/0266b797d19b82aebe0e177efe35de7aabdc192bc1605ce3309331f0a505/types_awscrt-0.28.1-py3-none-any.whl", hash = "sha256:d88f43ef779f90b841ba99badb72fe153077225a4e426ae79e943184827b4443", size = 41851, upload-time = "2025-10-11T21:55:06.235Z" }, ] [[package]] @@ -6161,14 +6127,14 @@ wheels = [ [[package]] name = "types-cffi" -version = "1.17.0.20250822" +version = "1.17.0.20250915" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/0c/76a48cb6e742cac4d61a4ec632dd30635b6d302f5acdc2c0a27572ac7ae3/types_cffi-1.17.0.20250822.tar.gz", hash = "sha256:bf6f5a381ea49da7ff895fae69711271e6192c434470ce6139bf2b2e0d0fa08d", size = 17130, upload-time = "2025-08-22T03:04:02.445Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229, upload-time = "2025-09-15T03:01:25.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/f7/68029931e7539e3246b33386a19c475f234c71d2a878411847b20bb31960/types_cffi-1.17.0.20250822-py3-none-any.whl", hash = "sha256:183dd76c1871a48936d7b931488e41f0f25a7463abe10b5816be275fc11506d5", size = 20083, upload-time = "2025-08-22T03:04:01.466Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ec/092f2b74b49ec4855cdb53050deb9699f7105b8fda6fe034c0781b8687f3/types_cffi-1.17.0.20250915-py3-none-any.whl", hash = "sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c", size = 20112, upload-time = "2025-09-15T03:01:24.187Z" }, ] [[package]] @@ -6256,11 +6222,11 @@ wheels = [ [[package]] name = "types-html5lib" -version = "1.1.11.20250809" +version = "1.1.11.20250917" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/ab/6aa4c487ae6f4f9da5153143bdc9e9b4fbc2b105df7ef8127fb920dc1f21/types_html5lib-1.1.11.20250809.tar.gz", hash = "sha256:7976ec7426bb009997dc5e072bca3ed988dd747d0cbfe093c7dfbd3d5ec8bf57", size = 16793, upload-time = "2025-08-09T03:14:20.819Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/4b/a970718e8bd9324ee8fb8eaf02ff069f6d03c20d4523bb4232892ecc3d06/types_html5lib-1.1.11.20250917.tar.gz", hash = "sha256:7b52743377f33f9b4fd7385afbd2d457b8864ee51f90ff2a795ad9e8c053373a", size = 16868, upload-time = "2025-09-17T02:47:41.18Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/05/328a2d6ecbd8aa3e16512600da78b1fe4605125896794a21824f3cac6f14/types_html5lib-1.1.11.20250809-py3-none-any.whl", hash = "sha256:e5f48ab670ae4cdeafd88bbc47113d8126dcf08318e0b8d70df26ecc13eca9b6", size = 22867, upload-time = "2025-08-09T03:14:20.048Z" }, + { url = "https://files.pythonhosted.org/packages/78/8a/da91a9c64dcb5e69beb567519857411996d8ecae9f6f128bcef8260e7a8d/types_html5lib-1.1.11.20250917-py3-none-any.whl", hash = "sha256:b294fd06d60da205daeb2f615485ca4d475088d2eff1009cf427f4a80fcd5346", size = 22908, upload-time = "2025-09-17T02:47:40.39Z" }, ] [[package]] @@ -6322,20 +6288,20 @@ wheels = [ [[package]] name = "types-openpyxl" -version = "3.1.5.20250822" +version = "3.1.5.20250919" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/7f/ea358482217448deafdb9232f198603511d2efa99e429822256f2b38975a/types_openpyxl-3.1.5.20250822.tar.gz", hash = "sha256:c8704a163e3798290d182c13c75da85f68cd97ff9b35f0ebfb94cf72f8b67bb3", size = 100858, upload-time = "2025-08-22T03:03:31.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/12/8bc4a25d49f1e4b7bbca868daa3ee80b1983d8137b4986867b5b65ab2ecd/types_openpyxl-3.1.5.20250919.tar.gz", hash = "sha256:232b5906773eebace1509b8994cdadda043f692cfdba9bfbb86ca921d54d32d7", size = 100880, upload-time = "2025-09-19T02:54:39.997Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/e8/cac4728e8dcbeb69d6de7de26bb9edb508e9f5c82476ecda22b58b939e60/types_openpyxl-3.1.5.20250822-py3-none-any.whl", hash = "sha256:da7a430d99c48347acf2dc351695f9db6ff90ecb761fed577b4a98fef2d0f831", size = 166093, upload-time = "2025-08-22T03:03:30.686Z" }, + { url = "https://files.pythonhosted.org/packages/36/3c/d49cf3f4489a10e9ddefde18fd258f120754c5825d06d145d9a0aaac770b/types_openpyxl-3.1.5.20250919-py3-none-any.whl", hash = "sha256:bd06f18b12fd5e1c9f0b666ee6151d8140216afa7496f7ebb9fe9d33a1a3ce99", size = 166078, upload-time = "2025-09-19T02:54:38.657Z" }, ] [[package]] name = "types-pexpect" -version = "4.9.0.20250809" +version = "4.9.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/a2/29564e69dee62f0f887ba7bfffa82fa4975504952e6199b218d3b403becd/types_pexpect-4.9.0.20250809.tar.gz", hash = "sha256:17a53c785b847c90d0be9149b00b0254e6e92c21cd856e853dac810ddb20101f", size = 13240, upload-time = "2025-08-09T03:15:04.554Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/e6/cc43e306dc7de14ec7861c24ac4957f688741ae39ae685049695d796b587/types_pexpect-4.9.0.20250916.tar.gz", hash = "sha256:69e5fed6199687a730a572de780a5749248a4c5df2ff1521e194563475c9928d", size = 13322, upload-time = "2025-09-16T02:49:25.61Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/1b/4d557287e6672feb749cf0d8ef5eb19189aff043e73e509e3775febc1cf1/types_pexpect-4.9.0.20250809-py3-none-any.whl", hash = "sha256:d19d206b8a7c282dac9376f26f072e036d22e9cf3e7d8eba3f477500b1f39101", size = 17039, upload-time = "2025-08-09T03:15:03.528Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6d/7740e235a9fb2570968da7d386d7feb511ce68cd23472402ff8cdf7fc78f/types_pexpect-4.9.0.20250916-py3-none-any.whl", hash = "sha256:7fa43cb96042ac58bc74f7c28e5d85782be0ee01344149886849e9d90936fe8a", size = 17057, upload-time = "2025-09-16T02:49:24.546Z" }, ] [[package]] @@ -6349,20 +6315,20 @@ wheels = [ [[package]] name = "types-psutil" -version = "7.0.0.20250822" +version = "7.0.0.20251001" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6d/aa/09699c829d7cc4624138d3ae67eecd4de9574e55729b1c63ca3e5a657f86/types_psutil-7.0.0.20250822.tar.gz", hash = "sha256:226cbc0c0ea9cc0a50b8abcc1d91a26c876dcb40be238131f697883690419698", size = 20358, upload-time = "2025-08-22T03:02:04.556Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/91/b020f9100b196a1f247cd12575f68dcdad94f032c1e0c42987d7632142ce/types_psutil-7.0.0.20251001.tar.gz", hash = "sha256:60d696200ddae28677e7d88cdebd6e960294e85adefbaafe0f6e5d0e7b4c1963", size = 20469, upload-time = "2025-10-01T03:04:21.292Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/46/45006309e20859e12c024d91bb913e6b89a706cd6f9377031c9f7e274ece/types_psutil-7.0.0.20250822-py3-none-any.whl", hash = "sha256:81c82f01aba5a4510b9d8b28154f577b780be75a08954aed074aa064666edc09", size = 23110, upload-time = "2025-08-22T03:02:03.38Z" }, + { url = "https://files.pythonhosted.org/packages/c0/99/50f30e0b648e6f583165cb2e535b0256a02a03efa4868cb2f017ad25b3d8/types_psutil-7.0.0.20251001-py3-none-any.whl", hash = "sha256:adc31de8386d31c61bd4123112fd51e2c700c7502a001cad72a3d56ba6b463d1", size = 23164, upload-time = "2025-10-01T03:04:20.089Z" }, ] [[package]] name = "types-psycopg2" -version = "2.9.21.20250809" +version = "2.9.21.20251012" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/d0/66f3f04bab48bfdb2c8b795b2b3e75eb20c7d1fb0516916db3be6aa4a683/types_psycopg2-2.9.21.20250809.tar.gz", hash = "sha256:b7c2cbdcf7c0bd16240f59ba694347329b0463e43398de69784ea4dee45f3c6d", size = 26539, upload-time = "2025-08-09T03:14:54.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9b/b3/2d09eaf35a084cffd329c584970a3fa07101ca465c13cad1576d7c392587/types_psycopg2-2.9.21.20251012.tar.gz", hash = "sha256:4cdafd38927da0cfde49804f39ab85afd9c6e9c492800e42f1f0c1a1b0312935", size = 26710, upload-time = "2025-10-12T02:55:39.5Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/98/182497602921c47fadc8470d51a32e5c75343c8931c0b572a5c4ae3b948b/types_psycopg2-2.9.21.20250809-py3-none-any.whl", hash = "sha256:59b7b0ed56dcae9efae62b8373497274fc1a0484bdc5135cdacbe5a8f44e1d7b", size = 24824, upload-time = "2025-08-09T03:14:53.908Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/05feaf8cb51159f2c0af04b871dab7e98a2f83a3622f5f216331d2dd924c/types_psycopg2-2.9.21.20251012-py3-none-any.whl", hash = "sha256:712bad5c423fe979e357edbf40a07ca40ef775d74043de72bd4544ca328cc57e", size = 24883, upload-time = "2025-10-12T02:55:38.439Z" }, ] [[package]] @@ -6379,11 +6345,11 @@ wheels = [ [[package]] name = "types-pymysql" -version = "1.1.0.20250909" +version = "1.1.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/0f/bb4331221fd560379ec702d61a11d5a5eead9a2866bb39eae294bde29988/types_pymysql-1.1.0.20250909.tar.gz", hash = "sha256:5ba7230425635b8c59316353701b99a087b949e8002dfeff652be0b62cee445b", size = 22189, upload-time = "2025-09-09T02:55:31.039Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/12/bda1d977c07e0e47502bede1c44a986dd45946494d89e005e04cdeb0f8de/types_pymysql-1.1.0.20250916.tar.gz", hash = "sha256:98d75731795fcc06723a192786662bdfa760e1e00f22809c104fbb47bac5e29b", size = 22131, upload-time = "2025-09-16T02:49:22.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/35/5681d881506a31bbbd9f7d5f6edcbf65489835081965b539b0802a665036/types_pymysql-1.1.0.20250909-py3-none-any.whl", hash = "sha256:c9957d4c10a31748636da5c16b0a0eef6751354d05adcd1b86acb27e8df36fb6", size = 23179, upload-time = "2025-09-09T02:55:29.873Z" }, + { url = "https://files.pythonhosted.org/packages/21/eb/a225e32a6e7b196af67ab2f1b07363595f63255374cc3b88bfdab53b4ee8/types_pymysql-1.1.0.20250916-py3-none-any.whl", hash = "sha256:873eb9836bb5e3de4368cc7010ca72775f86e9692a5c7810f8c7f48da082e55b", size = 23063, upload-time = "2025-09-16T02:49:20.933Z" }, ] [[package]] @@ -6401,11 +6367,11 @@ wheels = [ [[package]] name = "types-python-dateutil" -version = "2.9.0.20250822" +version = "2.9.0.20251008" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0c/0a/775f8551665992204c756be326f3575abba58c4a3a52eef9909ef4536428/types_python_dateutil-2.9.0.20250822.tar.gz", hash = "sha256:84c92c34bd8e68b117bff742bc00b692a1e8531262d4507b33afcc9f7716cd53", size = 16084, upload-time = "2025-08-22T03:02:00.613Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/83/24ed25dd0c6277a1a170c180ad9eef5879ecc9a4745b58d7905a4588c80d/types_python_dateutil-2.9.0.20251008.tar.gz", hash = "sha256:c3826289c170c93ebd8360c3485311187df740166dbab9dd3b792e69f2bc1f9c", size = 16128, upload-time = "2025-10-08T02:51:34.93Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/d9/a29dfa84363e88b053bf85a8b7f212a04f0d7343a4d24933baa45c06e08b/types_python_dateutil-2.9.0.20250822-py3-none-any.whl", hash = "sha256:849d52b737e10a6dc6621d2bd7940ec7c65fcb69e6aa2882acf4e56b2b508ddc", size = 17892, upload-time = "2025-08-22T03:01:59.436Z" }, + { url = "https://files.pythonhosted.org/packages/da/af/5d24b8d49ef358468ecfdff5c556adf37f4fd28e336b96f923661a808329/types_python_dateutil-2.9.0.20251008-py3-none-any.whl", hash = "sha256:b9a5232c8921cf7661b29c163ccc56055c418ab2c6eabe8f917cbcc73a4c4157", size = 17934, upload-time = "2025-10-08T02:51:33.55Z" }, ] [[package]] @@ -6437,11 +6403,11 @@ wheels = [ [[package]] name = "types-pyyaml" -version = "6.0.12.20250822" +version = "6.0.12.20250915" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/85/90a442e538359ab5c9e30de415006fb22567aa4301c908c09f19e42975c2/types_pyyaml-6.0.12.20250822.tar.gz", hash = "sha256:259f1d93079d335730a9db7cff2bcaf65d7e04b4a56b5927d49a612199b59413", size = 17481, upload-time = "2025-08-22T03:02:16.209Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/8e/8f0aca667c97c0d76024b37cffa39e76e2ce39ca54a38f285a64e6ae33ba/types_pyyaml-6.0.12.20250822-py3-none-any.whl", hash = "sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098", size = 20314, upload-time = "2025-08-22T03:02:15.002Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, ] [[package]] @@ -6468,36 +6434,23 @@ wheels = [ [[package]] name = "types-requests" -version = "2.32.4.20250809" +version = "2.32.4.20250913" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" }, -] - -[[package]] -name = "types-requests-oauthlib" -version = "2.0.0.20250809" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "types-oauthlib" }, - { name = "types-requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ed/40/5eca857a2dbda0fedd69b7fd3f51cb0b6ece8d448327d29f0ae54612ec98/types_requests_oauthlib-2.0.0.20250809.tar.gz", hash = "sha256:f3b9b31e0394fe2c362f0d44bc9ef6d5c150a298d01089513cd54a51daec37a2", size = 11008, upload-time = "2025-08-09T03:17:50.705Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/38/8777f0ab409a7249777f230f6aefe0e9ba98355dc8b05fb31391fa30f312/types_requests_oauthlib-2.0.0.20250809-py3-none-any.whl", hash = "sha256:0d1af4907faf9f4a1b0f0afbc7ec488f1dd5561a2b5b6dad70f78091a1acfb76", size = 14319, upload-time = "2025-08-09T03:17:49.786Z" }, + { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" }, ] [[package]] name = "types-s3transfer" -version = "0.13.1" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/c5/23946fac96c9dd5815ec97afd1c8ad6d22efa76c04a79a4823f2f67692a5/types_s3transfer-0.13.1.tar.gz", hash = "sha256:ce488d79fdd7d3b9d39071939121eca814ec65de3aa36bdce1f9189c0a61cc80", size = 14181, upload-time = "2025-08-31T16:57:06.93Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/9b/8913198b7fc700acc1dcb84827137bb2922052e43dde0f4fb0ed2dc6f118/types_s3transfer-0.14.0.tar.gz", hash = "sha256:17f800a87c7eafab0434e9d87452c809c290ae906c2024c24261c564479e9c95", size = 14218, upload-time = "2025-10-11T21:11:27.892Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/dc/b3f9b5c93eed6ffe768f4972661250584d5e4f248b548029026964373bcd/types_s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:4ff730e464a3fd3785b5541f0f555c1bd02ad408cf82b6b7a95429f6b0d26b4a", size = 19617, upload-time = "2025-08-31T16:57:05.73Z" }, + { url = "https://files.pythonhosted.org/packages/92/c3/4dfb2e87c15ca582b7d956dfb7e549de1d005c758eb9a305e934e1b83fda/types_s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:108134854069a38b048e9b710b9b35904d22a9d0f37e4e1889c2e6b58e5b3253", size = 19697, upload-time = "2025-10-11T21:11:26.749Z" }, ] [[package]] @@ -6532,25 +6485,25 @@ wheels = [ [[package]] name = "types-six" -version = "1.17.0.20250515" +version = "1.17.0.20251009" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/78/344047eeced8d230140aa3d9503aa969acb61c6095e7308bbc1ff1de3865/types_six-1.17.0.20250515.tar.gz", hash = "sha256:f4f7f0398cb79304e88397336e642b15e96fbeacf5b96d7625da366b069d2d18", size = 15598, upload-time = "2025-05-15T03:04:19.806Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/f7/448215bc7695cfa0c8a7e0dcfa54fe31b1d52fb87004fed32e659dd85c80/types_six-1.17.0.20251009.tar.gz", hash = "sha256:efe03064ecd0ffb0f7afe133990a2398d8493d8d1c1cc10ff3dfe476d57ba44f", size = 15552, upload-time = "2025-10-09T02:54:26.02Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/85/5ee1c8e35b33b9c8ea1816d5a4e119c27f8bb1539b73b1f636f07aa64750/types_six-1.17.0.20250515-py3-none-any.whl", hash = "sha256:adfaa9568caf35e03d80ffa4ed765c33b282579c869b40bf4b6009c7d8db3fb1", size = 19987, upload-time = "2025-05-15T03:04:18.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2f/94baa623421940e3eb5d2fc63570ebb046f2bb4d9573b8787edab3ed2526/types_six-1.17.0.20251009-py3-none-any.whl", hash = "sha256:2494f4c2a58ada0edfe01ea84b58468732e43394c572d9cf5b1dd06d86c487a3", size = 19935, upload-time = "2025-10-09T02:54:25.096Z" }, ] [[package]] name = "types-tensorflow" -version = "2.18.0.20250809" +version = "2.18.0.20251008" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "types-protobuf" }, { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/07/84/d350f0170a043283cd805344658522b00d769d04753b5a1685c1c8a06731/types_tensorflow-2.18.0.20250809.tar.gz", hash = "sha256:9ed54cbb24c8b12d8c59b9a8afbf7c5f2d46d5e2bf42d00ececaaa79e21d7ed1", size = 257495, upload-time = "2025-08-09T03:17:36.093Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/0a/13bde03fb5a23faaadcca2d6914f865e444334133902310ea05e6ade780c/types_tensorflow-2.18.0.20251008.tar.gz", hash = "sha256:8db03d4dd391a362e2ea796ffdbccb03c082127606d4d852edb7ed9504745933", size = 257550, upload-time = "2025-10-08T02:51:51.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/1c/cc50c17971643a92d5973d35a3d35f017f9d759d95fb7fdafa568a59ba9c/types_tensorflow-2.18.0.20250809-py3-none-any.whl", hash = "sha256:e9aae9da92ddb9991ebd27117db2c2dffe29d7d019db2a70166fd0d099c4fa4f", size = 329000, upload-time = "2025-08-09T03:17:35.02Z" }, + { url = "https://files.pythonhosted.org/packages/66/cc/e50e49db621b0cf03c1f3d10be47389de41a02dc9924c3a83a9c1a55bf28/types_tensorflow-2.18.0.20251008-py3-none-any.whl", hash = "sha256:d6b0dd4d81ac6d9c5af803ebcc8ce0f65c5850c063e8b9789dc828898944b5f4", size = 329023, upload-time = "2025-10-08T02:51:50.024Z" }, ] [[package]] @@ -6598,14 +6551,14 @@ wheels = [ [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] @@ -6747,11 +6700,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.5.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" }, ] [[package]] @@ -6765,15 +6718,15 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.35.0" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, ] [package.optional-dependencies] @@ -6845,7 +6798,7 @@ wheels = [ [[package]] name = "wandb" -version = "0.21.4" +version = "0.22.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -6859,17 +6812,17 @@ dependencies = [ { name = "sentry-sdk" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/59/a8/aaa3f3f8e410f34442466aac10b1891b3084d35b98aef59ebcb4c0efb941/wandb-0.21.4.tar.gz", hash = "sha256:b350d50973409658deb455010fafcfa81e6be3470232e316286319e839ffb67b", size = 40175929, upload-time = "2025-09-11T21:14:29.161Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/a8/680bd77e11a278e6c14a2cb4646e8ab9525b2baaa81c3d12dc0f616aa4aa/wandb-0.22.2.tar.gz", hash = "sha256:510f5a1ac30d16921c36c3b932da852f046641d4aee98a86a7f5ec03a6e95bda", size = 41401439, upload-time = "2025-10-07T19:54:21.88Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/6b/3a8d9db18a4c4568599a8792c0c8b1f422d9864c7123e8301a9477fbf0ac/wandb-0.21.4-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:c681ef7adb09925251d8d995c58aa76ae86a46dbf8de3b67353ad99fdef232d5", size = 18845369, upload-time = "2025-09-11T21:14:02.879Z" }, - { url = "https://files.pythonhosted.org/packages/60/e0/d7d6818938ec6958c93d979f9a90ea3d06bdc41e130b30f8cd89ae03c245/wandb-0.21.4-py3-none-macosx_12_0_arm64.whl", hash = "sha256:d35acc65c10bb7ac55d1331f7b1b8ab761f368f7b051131515f081a56ea5febc", size = 18339122, upload-time = "2025-09-11T21:14:06.455Z" }, - { url = "https://files.pythonhosted.org/packages/13/29/9bb8ed4adf32bed30e4d5df74d956dd1e93b6fd4bbc29dbe84167c84804b/wandb-0.21.4-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:765e66b57b7be5f393ecebd9a9d2c382c9f979d19cdee4a3f118eaafed43fca1", size = 19081975, upload-time = "2025-09-11T21:14:09.317Z" }, - { url = "https://files.pythonhosted.org/packages/30/6e/4aa33bc2c56b70c0116e73687c72c7a674f4072442633b3b23270d2215e3/wandb-0.21.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06127ec49245d12fdb3922c1eca1ab611cefc94adabeaaaba7b069707c516cba", size = 18161358, upload-time = "2025-09-11T21:14:12.092Z" }, - { url = "https://files.pythonhosted.org/packages/f7/56/d9f845ecfd5e078cf637cb29d8abe3350b8a174924c54086168783454a8f/wandb-0.21.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48d4f65f1be5f5a25b868695e09cdbfe481678220df349a8c2cbed3992fb497f", size = 19602680, upload-time = "2025-09-11T21:14:14.987Z" }, - { url = "https://files.pythonhosted.org/packages/68/ea/237a3c2b679a35e02e577c5bf844d6a221a7d32925ab8d5230529e9f2841/wandb-0.21.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ebd11f78351a3ca22caa1045146a6d2ad9e62fed6d0de2e67a0db5710d75103a", size = 18166392, upload-time = "2025-09-11T21:14:17.478Z" }, - { url = "https://files.pythonhosted.org/packages/12/e3/dbf2c575c79c99d94f16ce1a2cbbb2529d5029a76348c1ddac7e47f6873f/wandb-0.21.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:595b9e77591a805653e05db8b892805ee0a5317d147ef4976353e4f1cc16ebdc", size = 19678800, upload-time = "2025-09-11T21:14:20.264Z" }, - { url = "https://files.pythonhosted.org/packages/fa/eb/4ed04879d697772b8eb251c0e5af9a4ff7e2cc2b3fcd4b8eee91253ec2f1/wandb-0.21.4-py3-none-win32.whl", hash = "sha256:f9c86eb7eb7d40c6441533428188b1ae3205674e80c940792d850e2c1fe8d31e", size = 18738950, upload-time = "2025-09-11T21:14:23.08Z" }, - { url = "https://files.pythonhosted.org/packages/c3/4a/86c5e19600cb6a616a45f133c26826b46133499cd72d592772929d530ccd/wandb-0.21.4-py3-none-win_amd64.whl", hash = "sha256:2da3d5bb310a9f9fb7f680f4aef285348095a4cc6d1ce22b7343ba4e3fffcd84", size = 18738953, upload-time = "2025-09-11T21:14:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/e7/b3/8c637fb594cfd574ce9c9f7d0ac2f2d12742eb38ec59dcbb713beae95343/wandb-0.22.2-py3-none-macosx_12_0_arm64.whl", hash = "sha256:2e29c9fa4462b5411b2cd2175ae33eff4309c91de7c426bca6bc8e7abc7e5dec", size = 18677549, upload-time = "2025-10-07T19:54:00.839Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f3/e309a726eaebddad6b8d9a73a50891e5796962ec8a091bb6a61d31692d1e/wandb-0.22.2-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:c42d594cd7a9da4fd39ecdb0abbc081b61f304123277b2b6c4ba84283956fd21", size = 19715188, upload-time = "2025-10-07T19:54:03.805Z" }, + { url = "https://files.pythonhosted.org/packages/f9/73/fad59910215876008f4781b57d828d1b19b3677c9b46af615e7229746435/wandb-0.22.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5188d84e66d3fd584f3b3ae4d2a70e78f29403c0528e6aecaa4188a1fcf54d8", size = 18463148, upload-time = "2025-10-07T19:54:05.676Z" }, + { url = "https://files.pythonhosted.org/packages/87/11/572c1913b5b92e4c519f735adfae572b46f2d79d99ede63eec0d6a272d6e/wandb-0.22.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88ccd484af9f21cfc127976793c3cf66cfe1acd75bd8cd650086a64e88bac4bf", size = 19908645, upload-time = "2025-10-07T19:54:07.693Z" }, + { url = "https://files.pythonhosted.org/packages/6d/0d/133aa82f5a505ba638b4fda5014cefddfe7f1f6238ef4afc0871ec61c41f/wandb-0.22.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:abf0ed175e791af64110e0a0b99ce02bbbbd1017722bc32d3bc328efb86450cd", size = 18501348, upload-time = "2025-10-07T19:54:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d5/776203be2601872f01dacc6a5b4274106ec0db7cd3bf2cdb3b741f8fc932/wandb-0.22.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:44e77c56403b90bf3473a7ca3bfc4d42c636b7c0e31a5fb9cd0382f08302f74b", size = 20001756, upload-time = "2025-10-07T19:54:12.452Z" }, + { url = "https://files.pythonhosted.org/packages/30/43/ae3fa46e20b1d9a6508dd9abe716d57205c038ed4661c5c98ace48a60eac/wandb-0.22.2-py3-none-win32.whl", hash = "sha256:44d12bd379dbe15be5ceed6bdf23803d42f648ba0dd111297b4c47a3c7be6dbd", size = 19075950, upload-time = "2025-10-07T19:54:14.892Z" }, + { url = "https://files.pythonhosted.org/packages/09/59/c174321e868205f7a659d1e5ec51f546e62267296d6f4179bb9119294964/wandb-0.22.2-py3-none-win_amd64.whl", hash = "sha256:c95eb221bf316c0872f7ac55071856b9f25f95a2de983ada48acf653ce259386", size = 19075953, upload-time = "2025-10-07T19:54:16.837Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a2/c7c24fda78513cab5686949d8cb36459dbbccbbb4b2b6fc67237ece31a00/wandb-0.22.2-py3-none-win_arm64.whl", hash = "sha256:20d2ab9aa10445aab3d60914a980f002a4f66566e28b0cd156b1e462f0080a0d", size = 17383217, upload-time = "2025-10-07T19:54:19.384Z" }, ] [[package]] @@ -6915,11 +6868,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, ] [[package]] @@ -6948,16 +6901,20 @@ wheels = [ [[package]] name = "weaviate-client" -version = "3.24.2" +version = "4.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, - { name = "requests" }, + { name = "deprecation" }, + { name = "grpcio" }, + { name = "httpx" }, + { name = "protobuf" }, + { name = "pydantic" }, { name = "validators" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/c1/3285a21d8885f2b09aabb65edb9a8e062a35c2d7175e1bb024fa096582ab/weaviate-client-3.24.2.tar.gz", hash = "sha256:6914c48c9a7e5ad0be9399271f9cb85d6f59ab77476c6d4e56a3925bf149edaa", size = 199332, upload-time = "2023-10-04T08:37:54.26Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/0e/e4582b007427187a9fde55fa575db4b766c81929d2b43a3dd8becce50567/weaviate_client-4.17.0.tar.gz", hash = "sha256:731d58d84b0989df4db399b686357ed285fb95971a492ccca8dec90bb2343c51", size = 769019, upload-time = "2025-09-26T11:20:27.381Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/98/3136d05f93e30cf29e1db280eaadf766df18d812dfe7994bcced653b2340/weaviate_client-3.24.2-py3-none-any.whl", hash = "sha256:bc50ca5fcebcd48de0d00f66700b0cf7c31a97c4cd3d29b4036d77c5d1d9479b", size = 107968, upload-time = "2023-10-04T08:37:52.511Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c5/2da3a45866da7a935dab8ad07be05dcaee48b3ad4955144583b651929be7/weaviate_client-4.17.0-py3-none-any.whl", hash = "sha256:60e4a355b90537ee1e942ab0b76a94750897a13d9cf13c5a6decbd166d0ca8b5", size = 582763, upload-time = "2025-09-26T11:20:25.864Z" }, ] [[package]] @@ -6971,11 +6928,11 @@ wheels = [ [[package]] name = "websocket-client" -version = "1.8.0" +version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, ] [[package]] @@ -7084,20 +7041,20 @@ wheels = [ [[package]] name = "xlsxwriter" -version = "3.2.5" +version = "3.2.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/47/7704bac42ac6fe1710ae099b70e6a1e68ed173ef14792b647808c357da43/xlsxwriter-3.2.5.tar.gz", hash = "sha256:7e88469d607cdc920151c0ab3ce9cf1a83992d4b7bc730c5ffdd1a12115a7dbe", size = 213306, upload-time = "2025-06-17T08:59:14.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/2c/c06ef49dc36e7954e55b802a8b231770d286a9758b3d936bd1e04ce5ba88/xlsxwriter-3.2.9.tar.gz", hash = "sha256:254b1c37a368c444eac6e2f867405cc9e461b0ed97a3233b2ac1e574efb4140c", size = 215940, upload-time = "2025-09-16T00:16:21.63Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/34/a22e6664211f0c8879521328000bdcae9bf6dbafa94a923e531f6d5b3f73/xlsxwriter-3.2.5-py3-none-any.whl", hash = "sha256:4f4824234e1eaf9d95df9a8fe974585ff91d0f5e3d3f12ace5b71e443c1c6abd", size = 172347, upload-time = "2025-06-17T08:59:13.453Z" }, + { url = "https://files.pythonhosted.org/packages/3a/0c/3662f4a66880196a590b202f0db82d919dd2f89e99a27fadef91c4a33d41/xlsxwriter-3.2.9-py3-none-any.whl", hash = "sha256:9a5db42bc5dff014806c58a20b9eae7322a134abb6fce3c92c181bfb275ec5b3", size = 175315, upload-time = "2025-09-16T00:16:20.108Z" }, ] [[package]] name = "xmltodict" -version = "0.15.1" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/7a/42f705c672e77dc3ce85a6823bb289055323aac30de7c4b9eca1e28b2c17/xmltodict-0.15.1.tar.gz", hash = "sha256:3d8d49127f3ce6979d40a36dbcad96f8bab106d232d24b49efdd4bd21716983c", size = 62984, upload-time = "2025-09-08T18:33:19.349Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725, upload-time = "2025-09-17T21:59:26.459Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/4e/001c53a22f6bd5f383f49915a53e40f0cab2d3f1884d968f3ae14be367b7/xmltodict-0.15.1-py2.py3-none-any.whl", hash = "sha256:dcd84b52f30a15be5ac4c9099a0cb234df8758624b035411e329c5c1e7a49089", size = 11260, upload-time = "2025-09-08T18:33:17.87Z" }, + { url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893, upload-time = "2025-09-17T21:59:24.859Z" }, ] [[package]] @@ -7169,65 +7126,62 @@ wheels = [ [[package]] name = "zope-interface" -version = "8.0" +version = "8.0.1" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "setuptools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/21/a6af230243831459f7238764acb3086a9cf96dbf405d8084d30add1ee2e7/zope_interface-8.0.tar.gz", hash = "sha256:b14d5aac547e635af749ce20bf49a3f5f93b8a854d2a6b1e95d4d5e5dc618f7d", size = 253397, upload-time = "2025-09-12T07:17:13.571Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/3a/7fcf02178b8fad0a51e67e32765cd039ae505d054d744d76b8c2bbcba5ba/zope_interface-8.0.1.tar.gz", hash = "sha256:eba5610d042c3704a48222f7f7c6ab5b243ed26f917e2bc69379456b115e02d1", size = 253746, upload-time = "2025-09-25T05:55:51.285Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/6f/a16fc92b643313a55a0d2ccb040dd69048372f0a8f64107570256e664e5c/zope_interface-8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec1da7b9156ae000cea2d19bad83ddb5c50252f9d7b186da276d17768c67a3cb", size = 207652, upload-time = "2025-09-12T07:23:51.746Z" }, - { url = "https://files.pythonhosted.org/packages/01/0c/6bebd9417072c3eb6163228783cabb4890e738520b45562ade1cbf7d19d6/zope_interface-8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:160ba50022b342451baf516de3e3a2cd2d8c8dbac216803889a5eefa67083688", size = 208096, upload-time = "2025-09-12T07:23:52.895Z" }, - { url = "https://files.pythonhosted.org/packages/62/f1/03c4d2b70ce98828760dfc19f34be62526ea8b7f57160a009d338f396eb4/zope_interface-8.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:879bb5bf937cde4acd738264e87f03c7bf7d45478f7c8b9dc417182b13d81f6c", size = 254770, upload-time = "2025-09-12T07:58:18.379Z" }, - { url = "https://files.pythonhosted.org/packages/bb/73/06400c668d7d334d2296d23b3dacace43f45d6e721c6f6d08ea512703ede/zope_interface-8.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fb931bf55c66a092c5fbfb82a0ff3cc3221149b185bde36f0afc48acb8dcd92", size = 259542, upload-time = "2025-09-12T08:00:27.632Z" }, - { url = "https://files.pythonhosted.org/packages/d9/28/565b5f41045aa520853410d33b420f605018207a854fba3d93ed85e7bef2/zope_interface-8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1858d1e5bb2c5ae766890708184a603eb484bb7454e306e967932a9f3c558b07", size = 260720, upload-time = "2025-09-12T08:29:19.238Z" }, - { url = "https://files.pythonhosted.org/packages/c5/46/6c6b0df12665fec622133932a361829b6e6fbe255e6ce01768eedbcb7fa0/zope_interface-8.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e88c66ebedd1e839082f308b8372a50ef19423e01ee2e09600b80e765a10234", size = 211914, upload-time = "2025-09-12T07:23:19.858Z" }, - { url = "https://files.pythonhosted.org/packages/ae/42/9c79e4b2172e2584727cbc35bba1ea6884c15f1a77fe2b80ed8358893bb2/zope_interface-8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b80447a3a5c7347f4ebf3e50de319c8d2a5dabd7de32f20899ac50fc275b145d", size = 208359, upload-time = "2025-09-12T07:23:40.746Z" }, - { url = "https://files.pythonhosted.org/packages/d9/3a/77b5e3dbaced66141472faf788ea20e9b395076ea6fd30e2fde4597047b1/zope_interface-8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:67047a4470cb2fddb5ba5105b0160a1d1c30ce4b300cf264d0563136adac4eac", size = 208547, upload-time = "2025-09-12T07:23:42.088Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d3/a920b3787373e717384ef5db2cafaae70d451b8850b9b4808c024867dd06/zope_interface-8.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:1bee9c1b42513148f98d3918affd829804a5c992c000c290dc805f25a75a6a3f", size = 258986, upload-time = "2025-09-12T07:58:20.681Z" }, - { url = "https://files.pythonhosted.org/packages/4d/37/c7f5b1ccfcbb0b90d57d02b5744460e9f77a84932689ca8d99a842f330b2/zope_interface-8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:804ebacb2776eb89a57d9b5e9abec86930e0ee784a0005030801ae2f6c04d5d8", size = 264438, upload-time = "2025-09-12T08:00:28.921Z" }, - { url = "https://files.pythonhosted.org/packages/43/eb/fd6fefc92618bdf16fbfd71fb43ed206f99b8db5a0dd55797f4e33d7dd75/zope_interface-8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c4d9d3982aaa88b177812cd911ceaf5ffee4829e86ab3273c89428f2c0c32cc4", size = 263971, upload-time = "2025-09-12T08:29:20.693Z" }, - { url = "https://files.pythonhosted.org/packages/d9/ca/f99f4ef959b2541f0a3e05768d9ff48ad055d4bed00c7a438b088d54196a/zope_interface-8.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea1f2e47bc0124a03ee1e5fb31aee5dfde876244bcc552b9e3eb20b041b350d7", size = 212031, upload-time = "2025-09-12T07:23:04.755Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2f/c10c739bcb9b072090c97c2e08533777497190daa19d190d72b4cce9c7cb/zope_interface-8.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4bd01022d2e1bce4a4a4ed9549edb25393c92e607d7daa6deff843f1f68b479d", size = 207903, upload-time = "2025-09-25T05:58:21.671Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e1/9845ac3697f108d9a1af6912170c59a23732090bbfb35955fe77e5544955/zope_interface-8.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:29be8db8b712d94f1c05e24ea230a879271d787205ba1c9a6100d1d81f06c69a", size = 208345, upload-time = "2025-09-25T05:58:24.217Z" }, + { url = "https://files.pythonhosted.org/packages/f2/49/6573bc8b841cfab18e80c8e8259f1abdbbf716140011370de30231be79ad/zope_interface-8.0.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:51ae1b856565b30455b7879fdf0a56a88763b401d3f814fa9f9542d7410dbd7e", size = 255027, upload-time = "2025-09-25T05:58:19.975Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fd/908b0fd4b1ab6e412dfac9bd2b606f2893ef9ba3dd36d643f5e5b94c57b3/zope_interface-8.0.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d2e7596149cb1acd1d4d41b9f8fe2ffc0e9e29e2e91d026311814181d0d9efaf", size = 259800, upload-time = "2025-09-25T05:58:11.487Z" }, + { url = "https://files.pythonhosted.org/packages/dc/78/8419a2b4e88410520ed4b7f93bbd25a6d4ae66c4e2b131320f2b90f43077/zope_interface-8.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b2737c11c34fb9128816759864752d007ec4f987b571c934c30723ed881a7a4f", size = 260978, upload-time = "2025-09-25T06:26:24.483Z" }, + { url = "https://files.pythonhosted.org/packages/e5/90/caf68152c292f1810e2bd3acd2177badf08a740aa8a348714617d6c9ad0b/zope_interface-8.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:cf66e4bf731aa7e0ced855bb3670e8cda772f6515a475c6a107bad5cb6604103", size = 212155, upload-time = "2025-09-25T05:59:40.318Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/0f08713ddda834c428ebf97b2a7fd8dea50c0100065a8955924dbd94dae8/zope_interface-8.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:115f27c1cc95ce7a517d960ef381beedb0a7ce9489645e80b9ab3cbf8a78799c", size = 208609, upload-time = "2025-09-25T05:58:53.698Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/d423045f54dc81e0991ec655041e7a0eccf6b2642535839dd364b35f4d7f/zope_interface-8.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af655c573b84e3cb6a4f6fd3fbe04e4dc91c63c6b6f99019b3713ef964e589bc", size = 208797, upload-time = "2025-09-25T05:58:56.258Z" }, + { url = "https://files.pythonhosted.org/packages/c6/43/39d4bb3f7a80ebd261446792493cfa4e198badd47107224f5b6fe1997ad9/zope_interface-8.0.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:23f82ef9b2d5370750cc1bf883c3b94c33d098ce08557922a3fbc7ff3b63dfe1", size = 259242, upload-time = "2025-09-25T05:58:21.602Z" }, + { url = "https://files.pythonhosted.org/packages/da/29/49effcff64ef30731e35520a152a9dfcafec86cf114b4c2aff942e8264ba/zope_interface-8.0.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35a1565d5244997f2e629c5c68715b3d9d9036e8df23c4068b08d9316dcb2822", size = 264696, upload-time = "2025-09-25T05:58:13.351Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/b947673ec9a258eeaa20208dd2f6127d9fbb3e5071272a674ebe02063a78/zope_interface-8.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:029ea1db7e855a475bf88d9910baab4e94d007a054810e9007ac037a91c67c6f", size = 264229, upload-time = "2025-09-25T06:26:26.226Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ee/eed6efd1fc3788d1bef7a814e0592d8173b7fe601c699b935009df035fc2/zope_interface-8.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0beb3e7f7dc153944076fcaf717a935f68d39efa9fce96ec97bafcc0c2ea6cab", size = 212270, upload-time = "2025-09-25T05:58:53.584Z" }, ] [[package]] name = "zstandard" -version = "0.24.0" +version = "0.25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/1b/c20b2ef1d987627765dcd5bf1dadb8ef6564f00a87972635099bb76b7a05/zstandard-0.24.0.tar.gz", hash = "sha256:fe3198b81c00032326342d973e526803f183f97aa9e9a98e3f897ebafe21178f", size = 905681, upload-time = "2025-08-17T18:36:36.352Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/1f/5c72806f76043c0ef9191a2b65281dacdf3b65b0828eb13bb2c987c4fb90/zstandard-0.24.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:addfc23e3bd5f4b6787b9ca95b2d09a1a67ad5a3c318daaa783ff90b2d3a366e", size = 795228, upload-time = "2025-08-17T18:21:46.978Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ba/3059bd5cd834666a789251d14417621b5c61233bd46e7d9023ea8bc1043a/zstandard-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b005bcee4be9c3984b355336283afe77b2defa76ed6b89332eced7b6fa68b68", size = 640520, upload-time = "2025-08-17T18:21:48.162Z" }, - { url = "https://files.pythonhosted.org/packages/57/07/f0e632bf783f915c1fdd0bf68614c4764cae9dd46ba32cbae4dd659592c3/zstandard-0.24.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:3f96a9130171e01dbb6c3d4d9925d604e2131a97f540e223b88ba45daf56d6fb", size = 5347682, upload-time = "2025-08-17T18:21:50.266Z" }, - { url = "https://files.pythonhosted.org/packages/a6/4c/63523169fe84773a7462cd090b0989cb7c7a7f2a8b0a5fbf00009ba7d74d/zstandard-0.24.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd0d3d16e63873253bad22b413ec679cf6586e51b5772eb10733899832efec42", size = 5057650, upload-time = "2025-08-17T18:21:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/c6/16/49013f7ef80293f5cebf4c4229535a9f4c9416bbfd238560edc579815dbe/zstandard-0.24.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:b7a8c30d9bf4bd5e4dcfe26900bef0fcd9749acde45cdf0b3c89e2052fda9a13", size = 5404893, upload-time = "2025-08-17T18:21:54.54Z" }, - { url = "https://files.pythonhosted.org/packages/4d/38/78e8bcb5fc32a63b055f2b99e0be49b506f2351d0180173674f516cf8a7a/zstandard-0.24.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:52cd7d9fa0a115c9446abb79b06a47171b7d916c35c10e0c3aa6f01d57561382", size = 5452389, upload-time = "2025-08-17T18:21:56.822Z" }, - { url = "https://files.pythonhosted.org/packages/55/8a/81671f05619edbacd49bd84ce6899a09fc8299be20c09ae92f6618ccb92d/zstandard-0.24.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0f6fc2ea6e07e20df48752e7700e02e1892c61f9a6bfbacaf2c5b24d5ad504b", size = 5558888, upload-time = "2025-08-17T18:21:58.68Z" }, - { url = "https://files.pythonhosted.org/packages/49/cc/e83feb2d7d22d1f88434defbaeb6e5e91f42a4f607b5d4d2d58912b69d67/zstandard-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e46eb6702691b24ddb3e31e88b4a499e31506991db3d3724a85bd1c5fc3cfe4e", size = 5048038, upload-time = "2025-08-17T18:22:00.642Z" }, - { url = "https://files.pythonhosted.org/packages/08/c3/7a5c57ff49ef8943877f85c23368c104c2aea510abb339a2dc31ad0a27c3/zstandard-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5e3b9310fd7f0d12edc75532cd9a56da6293840c84da90070d692e0bb15f186", size = 5573833, upload-time = "2025-08-17T18:22:02.402Z" }, - { url = "https://files.pythonhosted.org/packages/f9/00/64519983cd92535ba4bdd4ac26ac52db00040a52d6c4efb8d1764abcc343/zstandard-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76cdfe7f920738ea871f035568f82bad3328cbc8d98f1f6988264096b5264efd", size = 4961072, upload-time = "2025-08-17T18:22:04.384Z" }, - { url = "https://files.pythonhosted.org/packages/72/ab/3a08a43067387d22994fc87c3113636aa34ccd2914a4d2d188ce365c5d85/zstandard-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3f2fe35ec84908dddf0fbf66b35d7c2878dbe349552dd52e005c755d3493d61c", size = 5268462, upload-time = "2025-08-17T18:22:06.095Z" }, - { url = "https://files.pythonhosted.org/packages/49/cf/2abb3a1ad85aebe18c53e7eca73223f1546ddfa3bf4d2fb83fc5a064c5ca/zstandard-0.24.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:aa705beb74ab116563f4ce784fa94771f230c05d09ab5de9c397793e725bb1db", size = 5443319, upload-time = "2025-08-17T18:22:08.572Z" }, - { url = "https://files.pythonhosted.org/packages/40/42/0dd59fc2f68f1664cda11c3b26abdf987f4e57cb6b6b0f329520cd074552/zstandard-0.24.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:aadf32c389bb7f02b8ec5c243c38302b92c006da565e120dfcb7bf0378f4f848", size = 5822355, upload-time = "2025-08-17T18:22:10.537Z" }, - { url = "https://files.pythonhosted.org/packages/99/c0/ea4e640fd4f7d58d6f87a1e7aca11fb886ac24db277fbbb879336c912f63/zstandard-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e40cd0fc734aa1d4bd0e7ad102fd2a1aefa50ce9ef570005ffc2273c5442ddc3", size = 5365257, upload-time = "2025-08-17T18:22:13.159Z" }, - { url = "https://files.pythonhosted.org/packages/27/a9/92da42a5c4e7e4003271f2e1f0efd1f37cfd565d763ad3604e9597980a1c/zstandard-0.24.0-cp311-cp311-win32.whl", hash = "sha256:cda61c46343809ecda43dc620d1333dd7433a25d0a252f2dcc7667f6331c7b61", size = 435559, upload-time = "2025-08-17T18:22:17.29Z" }, - { url = "https://files.pythonhosted.org/packages/e2/8e/2c8e5c681ae4937c007938f954a060fa7c74f36273b289cabdb5ef0e9a7e/zstandard-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:3b95fc06489aa9388400d1aab01a83652bc040c9c087bd732eb214909d7fb0dd", size = 505070, upload-time = "2025-08-17T18:22:14.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/10/a2f27a66bec75e236b575c9f7b0d7d37004a03aa2dcde8e2decbe9ed7b4d/zstandard-0.24.0-cp311-cp311-win_arm64.whl", hash = "sha256:ad9fd176ff6800a0cf52bcf59c71e5de4fa25bf3ba62b58800e0f84885344d34", size = 461507, upload-time = "2025-08-17T18:22:15.964Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/0bd281d9154bba7fc421a291e263911e1d69d6951aa80955b992a48289f6/zstandard-0.24.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a2bda8f2790add22773ee7a4e43c90ea05598bffc94c21c40ae0a9000b0133c3", size = 795710, upload-time = "2025-08-17T18:22:19.189Z" }, - { url = "https://files.pythonhosted.org/packages/36/26/b250a2eef515caf492e2d86732e75240cdac9d92b04383722b9753590c36/zstandard-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cc76de75300f65b8eb574d855c12518dc25a075dadb41dd18f6322bda3fe15d5", size = 640336, upload-time = "2025-08-17T18:22:20.466Z" }, - { url = "https://files.pythonhosted.org/packages/79/bf/3ba6b522306d9bf097aac8547556b98a4f753dc807a170becaf30dcd6f01/zstandard-0.24.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d2b3b4bda1a025b10fe0269369475f420177f2cb06e0f9d32c95b4873c9f80b8", size = 5342533, upload-time = "2025-08-17T18:22:22.326Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ec/22bc75bf054e25accdf8e928bc68ab36b4466809729c554ff3a1c1c8bce6/zstandard-0.24.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b84c6c210684286e504022d11ec294d2b7922d66c823e87575d8b23eba7c81f", size = 5062837, upload-time = "2025-08-17T18:22:24.416Z" }, - { url = "https://files.pythonhosted.org/packages/48/cc/33edfc9d286e517fb5b51d9c3210e5bcfce578d02a675f994308ca587ae1/zstandard-0.24.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c59740682a686bf835a1a4d8d0ed1eefe31ac07f1c5a7ed5f2e72cf577692b00", size = 5393855, upload-time = "2025-08-17T18:22:26.786Z" }, - { url = "https://files.pythonhosted.org/packages/73/36/59254e9b29da6215fb3a717812bf87192d89f190f23817d88cb8868c47ac/zstandard-0.24.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6324fde5cf5120fbf6541d5ff3c86011ec056e8d0f915d8e7822926a5377193a", size = 5451058, upload-time = "2025-08-17T18:22:28.885Z" }, - { url = "https://files.pythonhosted.org/packages/9a/c7/31674cb2168b741bbbe71ce37dd397c9c671e73349d88ad3bca9e9fae25b/zstandard-0.24.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:51a86bd963de3f36688553926a84e550d45d7f9745bd1947d79472eca27fcc75", size = 5546619, upload-time = "2025-08-17T18:22:31.115Z" }, - { url = "https://files.pythonhosted.org/packages/e6/01/1a9f22239f08c00c156f2266db857545ece66a6fc0303d45c298564bc20b/zstandard-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d82ac87017b734f2fb70ff93818c66f0ad2c3810f61040f077ed38d924e19980", size = 5046676, upload-time = "2025-08-17T18:22:33.077Z" }, - { url = "https://files.pythonhosted.org/packages/a7/91/6c0cf8fa143a4988a0361380ac2ef0d7cb98a374704b389fbc38b5891712/zstandard-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92ea7855d5bcfb386c34557516c73753435fb2d4a014e2c9343b5f5ba148b5d8", size = 5576381, upload-time = "2025-08-17T18:22:35.391Z" }, - { url = "https://files.pythonhosted.org/packages/e2/77/1526080e22e78871e786ccf3c84bf5cec9ed25110a9585507d3c551da3d6/zstandard-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3adb4b5414febf074800d264ddf69ecade8c658837a83a19e8ab820e924c9933", size = 4953403, upload-time = "2025-08-17T18:22:37.266Z" }, - { url = "https://files.pythonhosted.org/packages/6e/d0/a3a833930bff01eab697eb8abeafb0ab068438771fa066558d96d7dafbf9/zstandard-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6374feaf347e6b83ec13cc5dcfa70076f06d8f7ecd46cc71d58fac798ff08b76", size = 5267396, upload-time = "2025-08-17T18:22:39.757Z" }, - { url = "https://files.pythonhosted.org/packages/f3/5e/90a0db9a61cd4769c06374297ecfcbbf66654f74cec89392519deba64d76/zstandard-0.24.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:13fc548e214df08d896ee5f29e1f91ee35db14f733fef8eabea8dca6e451d1e2", size = 5433269, upload-time = "2025-08-17T18:22:42.131Z" }, - { url = "https://files.pythonhosted.org/packages/ce/58/fc6a71060dd67c26a9c5566e0d7c99248cbe5abfda6b3b65b8f1a28d59f7/zstandard-0.24.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0a416814608610abf5488889c74e43ffa0343ca6cf43957c6b6ec526212422da", size = 5814203, upload-time = "2025-08-17T18:22:44.017Z" }, - { url = "https://files.pythonhosted.org/packages/5c/6a/89573d4393e3ecbfa425d9a4e391027f58d7810dec5cdb13a26e4cdeef5c/zstandard-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0d66da2649bb0af4471699aeb7a83d6f59ae30236fb9f6b5d20fb618ef6c6777", size = 5359622, upload-time = "2025-08-17T18:22:45.802Z" }, - { url = "https://files.pythonhosted.org/packages/60/ff/2cbab815d6f02a53a9d8d8703bc727d8408a2e508143ca9af6c3cca2054b/zstandard-0.24.0-cp312-cp312-win32.whl", hash = "sha256:ff19efaa33e7f136fe95f9bbcc90ab7fb60648453b03f95d1de3ab6997de0f32", size = 435968, upload-time = "2025-08-17T18:22:49.493Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a3/8f96b8ddb7ad12344218fbd0fd2805702dafd126ae9f8a1fb91eef7b33da/zstandard-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc05f8a875eb651d1cc62e12a4a0e6afa5cd0cc231381adb830d2e9c196ea895", size = 505195, upload-time = "2025-08-17T18:22:47.193Z" }, - { url = "https://files.pythonhosted.org/packages/a3/4a/bfca20679da63bfc236634ef2e4b1b4254203098b0170e3511fee781351f/zstandard-0.24.0-cp312-cp312-win_arm64.whl", hash = "sha256:b04c94718f7a8ed7cdd01b162b6caa1954b3c9d486f00ecbbd300f149d2b2606", size = 461605, upload-time = "2025-08-17T18:22:48.317Z" }, + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, ] diff --git a/docker/.env.example b/docker/.env.example index c0f084796e..b0e8d020ba 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -45,7 +45,7 @@ APP_WEB_URL= # Recommendation: use a dedicated domain (e.g., https://upload.example.com). # Alternatively, use http://:5001 or http://api:5001, # ensuring port 5001 is externally accessible (see docker-compose.yaml). -FILES_URL=http://api:5001 +FILES_URL= # INTERNAL_FILES_URL is used for plugin daemon communication within Docker network. # Set this to the internal Docker service URL for proper plugin file access. @@ -449,7 +449,7 @@ SUPABASE_URL=your-server-url # ------------------------------ # The type of vector store to use. -# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`, `clickzetta`. +# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`, `clickzetta`, `alibabacloud_mysql`. VECTOR_STORE=weaviate # Prefix used to create collection name in vector database VECTOR_INDEX_NAME_PREFIX=Vector_index @@ -580,6 +580,15 @@ ORACLE_WALLET_LOCATION=/app/api/storage/wallet ORACLE_WALLET_PASSWORD=dify ORACLE_IS_AUTONOMOUS=false +# AlibabaCloud MySQL configuration, only available when VECTOR_STORE is `alibabcloud_mysql` +ALIBABACLOUD_MYSQL_HOST=127.0.0.1 +ALIBABACLOUD_MYSQL_PORT=3306 +ALIBABACLOUD_MYSQL_USER=root +ALIBABACLOUD_MYSQL_PASSWORD=difyai123456 +ALIBABACLOUD_MYSQL_DATABASE=dify +ALIBABACLOUD_MYSQL_MAX_CONNECTION=5 +ALIBABACLOUD_MYSQL_HNSW_M=6 + # relyt configurations, only available when VECTOR_STORE is `relyt` RELYT_HOST=db RELYT_PORT=5432 @@ -867,14 +876,14 @@ CODE_MAX_NUMBER=9223372036854775807 CODE_MIN_NUMBER=-9223372036854775808 CODE_MAX_DEPTH=5 CODE_MAX_PRECISION=20 -CODE_MAX_STRING_LENGTH=80000 +CODE_MAX_STRING_LENGTH=400000 CODE_MAX_STRING_ARRAY_LENGTH=30 CODE_MAX_OBJECT_ARRAY_LENGTH=30 CODE_MAX_NUMBER_ARRAY_LENGTH=1000 CODE_EXECUTION_CONNECT_TIMEOUT=10 CODE_EXECUTION_READ_TIMEOUT=60 CODE_EXECUTION_WRITE_TIMEOUT=10 -TEMPLATE_TRANSFORM_MAX_LENGTH=80000 +TEMPLATE_TRANSFORM_MAX_LENGTH=400000 # Workflow runtime configuration WORKFLOW_MAX_EXECUTION_STEPS=500 @@ -930,6 +939,16 @@ WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100 HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 HTTP_REQUEST_NODE_SSL_VERIFY=True + +# HTTP request node timeout configuration +# Maximum timeout values (in seconds) that users can set in HTTP request nodes +# - Connect timeout: Time to wait for establishing connection (default: 10s) +# - Read timeout: Time to wait for receiving response data (default: 600s, 10 minutes) +# - Write timeout: Time to wait for sending request data (default: 600s, 10 minutes) +HTTP_REQUEST_MAX_CONNECT_TIMEOUT=10 +HTTP_REQUEST_MAX_READ_TIMEOUT=600 +HTTP_REQUEST_MAX_WRITE_TIMEOUT=600 + # Base64 encoded CA certificate data for custom certificate verification (PEM format, optional) # HTTP_REQUEST_NODE_SSL_CERT_DATA=LS0tLS1CRUdJTi... # Base64 encoded client certificate data for mutual TLS authentication (PEM format, optional) diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 685fc325d0..5483e2d554 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -58,7 +58,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -76,7 +76,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.9.0 + image: langgenius/dify-web:1.9.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -329,7 +329,7 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - "" - weaviate diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index d350503f27..ebc619a50f 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -181,7 +181,7 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - "" - weaviate @@ -206,6 +206,7 @@ services: AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai} ports: - "${EXPOSE_WEAVIATE_PORT:-8080}:8080" + - "${EXPOSE_WEAVIATE_GRPC_PORT:-50051}:50051" networks: # create a network between sandbox, api and ssrf_proxy, and can not access outside. diff --git a/docker/docker-compose.override.yml b/docker/docker-compose.override.yml new file mode 100644 index 0000000000..8f2ab1cb43 --- /dev/null +++ b/docker/docker-compose.override.yml @@ -0,0 +1,9 @@ +services: + api: + volumes: + - ../api/core/rag/datasource/vdb/weaviate/weaviate_vector.py:/app/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py:ro + command: > + sh -c " + pip install --no-cache-dir 'weaviate>=4.0.0' && + /bin/bash /entrypoint.sh + " diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 2617f84e7d..46b4a750ea 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -10,7 +10,7 @@ x-shared-env: &shared-api-worker-env SERVICE_API_URL: ${SERVICE_API_URL:-} APP_API_URL: ${APP_API_URL:-} APP_WEB_URL: ${APP_WEB_URL:-} - FILES_URL: ${FILES_URL:-http://api:5001} + FILES_URL: ${FILES_URL:-} INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-} LANG: ${LANG:-en_US.UTF-8} LC_ALL: ${LC_ALL:-en_US.UTF-8} @@ -244,6 +244,13 @@ x-shared-env: &shared-api-worker-env ORACLE_WALLET_LOCATION: ${ORACLE_WALLET_LOCATION:-/app/api/storage/wallet} ORACLE_WALLET_PASSWORD: ${ORACLE_WALLET_PASSWORD:-dify} ORACLE_IS_AUTONOMOUS: ${ORACLE_IS_AUTONOMOUS:-false} + ALIBABACLOUD_MYSQL_HOST: ${ALIBABACLOUD_MYSQL_HOST:-127.0.0.1} + ALIBABACLOUD_MYSQL_PORT: ${ALIBABACLOUD_MYSQL_PORT:-3306} + ALIBABACLOUD_MYSQL_USER: ${ALIBABACLOUD_MYSQL_USER:-root} + ALIBABACLOUD_MYSQL_PASSWORD: ${ALIBABACLOUD_MYSQL_PASSWORD:-difyai123456} + ALIBABACLOUD_MYSQL_DATABASE: ${ALIBABACLOUD_MYSQL_DATABASE:-dify} + ALIBABACLOUD_MYSQL_MAX_CONNECTION: ${ALIBABACLOUD_MYSQL_MAX_CONNECTION:-5} + ALIBABACLOUD_MYSQL_HNSW_M: ${ALIBABACLOUD_MYSQL_HNSW_M:-6} RELYT_HOST: ${RELYT_HOST:-db} RELYT_PORT: ${RELYT_PORT:-5432} RELYT_USER: ${RELYT_USER:-postgres} @@ -390,14 +397,14 @@ x-shared-env: &shared-api-worker-env CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808} CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5} CODE_MAX_PRECISION: ${CODE_MAX_PRECISION:-20} - CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-80000} + CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-400000} CODE_MAX_STRING_ARRAY_LENGTH: ${CODE_MAX_STRING_ARRAY_LENGTH:-30} CODE_MAX_OBJECT_ARRAY_LENGTH: ${CODE_MAX_OBJECT_ARRAY_LENGTH:-30} CODE_MAX_NUMBER_ARRAY_LENGTH: ${CODE_MAX_NUMBER_ARRAY_LENGTH:-1000} CODE_EXECUTION_CONNECT_TIMEOUT: ${CODE_EXECUTION_CONNECT_TIMEOUT:-10} CODE_EXECUTION_READ_TIMEOUT: ${CODE_EXECUTION_READ_TIMEOUT:-60} CODE_EXECUTION_WRITE_TIMEOUT: ${CODE_EXECUTION_WRITE_TIMEOUT:-10} - TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-80000} + TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-400000} WORKFLOW_MAX_EXECUTION_STEPS: ${WORKFLOW_MAX_EXECUTION_STEPS:-500} WORKFLOW_MAX_EXECUTION_TIME: ${WORKFLOW_MAX_EXECUTION_TIME:-1200} WORKFLOW_CALL_MAX_DEPTH: ${WORKFLOW_CALL_MAX_DEPTH:-5} @@ -418,6 +425,9 @@ x-shared-env: &shared-api-worker-env HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True} + HTTP_REQUEST_MAX_CONNECT_TIMEOUT: ${HTTP_REQUEST_MAX_CONNECT_TIMEOUT:-10} + HTTP_REQUEST_MAX_READ_TIMEOUT: ${HTTP_REQUEST_MAX_READ_TIMEOUT:-600} + HTTP_REQUEST_MAX_WRITE_TIMEOUT: ${HTTP_REQUEST_MAX_WRITE_TIMEOUT:-600} RESPECT_XFORWARD_HEADERS_ENABLED: ${RESPECT_XFORWARD_HEADERS_ENABLED:-false} SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128} SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128} @@ -599,7 +609,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -628,7 +638,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -655,7 +665,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -673,7 +683,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.9.0 + image: langgenius/dify-web:1.9.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -926,7 +936,7 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - "" - weaviate diff --git a/README_AR.md b/docs/ar-SA/README.md similarity index 81% rename from README_AR.md rename to docs/ar-SA/README.md index 2451757ab5..afa494c5d3 100644 --- a/README_AR.md +++ b/docs/ar-SA/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

@@ -97,7 +99,7 @@
-أسهل طريقة لبدء تشغيل خادم Dify هي تشغيل ملف [docker-compose.yml](docker/docker-compose.yaml) الخاص بنا. قبل تشغيل أمر التثبيت، تأكد من تثبيت [Docker](https://docs.docker.com/get-docker/) و [Docker Compose](https://docs.docker.com/compose/install/) على جهازك: +أسهل طريقة لبدء تشغيل خادم Dify هي تشغيل ملف [docker-compose.yml](../../docker/docker-compose.yaml) الخاص بنا. قبل تشغيل أمر التثبيت، تأكد من تثبيت [Docker](https://docs.docker.com/get-docker/) و [Docker Compose](https://docs.docker.com/compose/install/) على جهازك: ```bash cd docker @@ -111,7 +113,7 @@ docker compose up -d ## الخطوات التالية -إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments). +إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](../../docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments). يوجد مجتمع خاص بـ [Helm Charts](https://helm.sh/) وملفات YAML التي تسمح بتنفيذ Dify على Kubernetes للنظام من الإيجابيات العلوية. @@ -185,12 +187,4 @@ docker compose up -d ## الرخصة -هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. - -## الكشف عن الأمان - -لحماية خصوصيتك، يرجى تجنب نشر مشكلات الأمان على GitHub. بدلاً من ذلك، أرسل أسئلتك إلى وسنقدم لك إجابة أكثر تفصيلاً. - -## الرخصة - -هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. +هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. diff --git a/README_BN.md b/docs/bn-BD/README.md similarity index 85% rename from README_BN.md rename to docs/bn-BD/README.md index ef24dea171..318853a8de 100644 --- a/README_BN.md +++ b/docs/bn-BD/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 ডিফাই ওয়ার্কফ্লো ফাইল আপলোড পরিচিতি: গুগল নোটবুক-এলএম পডকাস্ট পুনর্নির্মাণ @@ -39,18 +39,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

ডিফাই একটি ওপেন-সোর্স LLM অ্যাপ ডেভেলপমেন্ট প্ল্যাটফর্ম। এটি ইন্টুইটিভ ইন্টারফেস, এজেন্টিক AI ওয়ার্কফ্লো, RAG পাইপলাইন, এজেন্ট ক্যাপাবিলিটি, মডেল ম্যানেজমেন্ট, মনিটরিং সুবিধা এবং আরও অনেক কিছু একত্রিত করে, যা দ্রুত প্রোটোটাইপ থেকে প্রোডাকশন পর্যন্ত নিয়ে যেতে সহায়তা করে। @@ -64,7 +65,7 @@
-ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে : +ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](../../docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে : ```bash cd dify @@ -128,7 +129,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## Advanced Setup -যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। +যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](../../docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। যেকোনো পরিবর্তন করার পর, অনুগ্রহ করে `docker-compose up -d` পুনরায় চালান। ভেরিয়েবলের সম্পূর্ণ তালিকা [এখানে] (https://docs.dify.ai/getting-started/install-self-hosted/environments) খুঁজে পেতে পারেন। যদি আপনি একটি হাইলি এভেইলেবল সেটআপ কনফিগার করতে চান, তাহলে কমিউনিটি [Helm Charts](https://helm.sh/) এবং YAML ফাইল রয়েছে যা Dify কে Kubernetes-এ ডিপ্লয় করার প্রক্রিয়া বর্ণনা করে। @@ -175,7 +176,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## Contributing -যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)। +যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন। একই সাথে, সোশ্যাল মিডিয়া এবং ইভেন্ট এবং কনফারেন্সে এটি শেয়ার করে Dify কে সমর্থন করুন। > আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন। @@ -203,4 +204,4 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## লাইসেন্স -এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে। +এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](../../LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে। diff --git a/CONTRIBUTING_DE.md b/docs/de-DE/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_DE.md rename to docs/de-DE/CONTRIBUTING.md index f819e80bbb..db12006b30 100644 --- a/CONTRIBUTING_DE.md +++ b/docs/de-DE/CONTRIBUTING.md @@ -6,7 +6,7 @@ Wir müssen wendig sein und schnell liefern, aber wir möchten auch sicherstelle Dieser Leitfaden ist, wie Dify selbst, in ständiger Entwicklung. Wir sind dankbar für Ihr Verständnis, falls er manchmal hinter dem eigentlichen Projekt zurückbleibt, und begrüßen jedes Feedback zur Verbesserung. -Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](./LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](../../LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Bevor Sie loslegen diff --git a/README_DE.md b/docs/de-DE/README.md similarity index 79% rename from README_DE.md rename to docs/de-DE/README.md index a08fe63d4f..8907d914d3 100644 --- a/README_DE.md +++ b/docs/de-DE/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Einführung in Dify Workflow File Upload: Google NotebookLM Podcast nachbilden @@ -39,18 +39,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre intuitive Benutzeroberfläche vereint agentenbasierte KI-Workflows, RAG-Pipelines, Agentenfunktionen, Modellverwaltung, Überwachungsfunktionen und mehr, sodass Sie schnell von einem Prototyp in die Produktion übergehen können. @@ -64,7 +65,7 @@ Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre in
-Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind: +Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](../../docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind: ```bash cd dify @@ -127,7 +128,7 @@ Star Dify auf GitHub und lassen Sie sich sofort über neue Releases benachrichti ## Erweiterte Einstellungen -Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](../../docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments). Falls Sie eine hochverfügbare Konfiguration einrichten möchten, gibt es von der Community bereitgestellte [Helm Charts](https://helm.sh/) und YAML-Dateien, die es ermöglichen, Dify auf Kubernetes bereitzustellen. @@ -173,14 +174,14 @@ Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline ## Contributing -Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_DE.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. +Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](./CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. > Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c). ## Gemeinschaft & Kontakt - [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen. -- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. - [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. @@ -200,4 +201,4 @@ Um Ihre Privatsphäre zu schützen, vermeiden Sie es bitte, Sicherheitsprobleme ## Lizenz -Dieses Repository steht unter der [Dify Open Source License](LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. +Dieses Repository steht unter der [Dify Open Source License](../../LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. diff --git a/CONTRIBUTING_ES.md b/docs/es-ES/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_ES.md rename to docs/es-ES/CONTRIBUTING.md index e19d958c65..6cd80651c4 100644 --- a/CONTRIBUTING_ES.md +++ b/docs/es-ES/CONTRIBUTING.md @@ -6,7 +6,7 @@ Necesitamos ser ágiles y enviar rápidamente dado donde estamos, pero también Esta guía, como Dify mismo, es un trabajo en constante progreso. Agradecemos mucho tu comprensión si a veces se queda atrás del proyecto real, y damos la bienvenida a cualquier comentario para que podamos mejorar. -En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](./LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](../../LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Antes de empezar diff --git a/README_ES.md b/docs/es-ES/README.md similarity index 79% rename from README_ES.md rename to docs/es-ES/README.md index d8fdbf54e6..b005691fea 100644 --- a/README_ES.md +++ b/docs/es-ES/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Dale estrella a Dify en GitHub y serás notificado instantáneamente de las nuev
-La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina: +La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](../../docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina: ```bash cd docker @@ -122,7 +124,7 @@ Después de ejecutarlo, puedes acceder al panel de control de Dify en tu navegad ## Próximos pasos -Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](../../docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). . Después de realizar los cambios, ejecuta `docker-compose up -d` nuevamente. Puedes ver la lista completa de variables de entorno [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). @@ -170,7 +172,7 @@ Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @ ## Contribuir -Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_ES.md). +Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](./CONTRIBUTING.md). Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias. > Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c). @@ -184,7 +186,7 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en ## Comunidad y Contacto - [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas. -- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. - [X(Twitter)](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. @@ -198,12 +200,4 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En ## Licencia -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. - -## Divulgación de Seguridad - -Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada. - -## Licencia - -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. +Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. diff --git a/CONTRIBUTING_FR.md b/docs/fr-FR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_FR.md rename to docs/fr-FR/CONTRIBUTING.md index 335e943fcd..74e44ca734 100644 --- a/CONTRIBUTING_FR.md +++ b/docs/fr-FR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Nous devons être agiles et livrer rapidement compte tenu de notre position, mai Ce guide, comme Dify lui-même, est un travail en constante évolution. Nous apprécions grandement votre compréhension si parfois il est en retard par rapport au projet réel, et nous accueillons tout commentaire pour nous aider à nous améliorer. -En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](./LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](../../LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Avant de vous lancer diff --git a/README_FR.md b/docs/fr-FR/README.md similarity index 79% rename from README_FR.md rename to docs/fr-FR/README.md index 7474ea50c2..3aca9a9672 100644 --- a/README_FR.md +++ b/docs/fr-FR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Mettez une étoile à Dify sur GitHub et soyez instantanément informé des nouv
-La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine: +La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](../../docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine: ```bash cd docker @@ -122,7 +124,7 @@ Après l'exécution, vous pouvez accéder au tableau de bord Dify dans votre nav ## Prochaines étapes -Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](../../docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments). Si vous souhaitez configurer une configuration haute disponibilité, la communauté fournit des [Helm Charts](https://helm.sh/) et des fichiers YAML, à travers lesquels vous pouvez déployer Dify sur Kubernetes. @@ -168,7 +170,7 @@ Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart ## Contribuer -Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_FR.md). +Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](./CONTRIBUTING.md). Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences. > Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c). @@ -182,7 +184,7 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le ## Communauté & Contact - [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions. -- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté. - [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté. @@ -196,12 +198,4 @@ Pour protéger votre vie privée, veuillez éviter de publier des problèmes de ## Licence -Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. - -## Divulgation de sécurité - -Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée. - -## Licence - -Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. +Ce référentiel est disponible sous la [Licence open source Dify](../../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. diff --git a/CONTRIBUTING_JA.md b/docs/ja-JP/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_JA.md rename to docs/ja-JP/CONTRIBUTING.md index 2d0d79fc16..4ee7d8c963 100644 --- a/CONTRIBUTING_JA.md +++ b/docs/ja-JP/CONTRIBUTING.md @@ -6,7 +6,7 @@ Difyに貢献しようとお考えですか?素晴らしいですね。私た このガイドは、Dify自体と同様に、常に進化し続けています。実際のプロジェクトの進行状況と多少のずれが生じる場合もございますが、ご理解いただけますと幸いです。改善のためのフィードバックも歓迎いたします。 -ライセンスについては、[ライセンスと貢献者同意書](./LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。 +ライセンスについては、[ライセンスと貢献者同意書](../../LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。 ## 始める前に diff --git a/README_JA.md b/docs/ja-JP/README.md similarity index 79% rename from README_JA.md rename to docs/ja-JP/README.md index a782849f6e..66831285d6 100644 --- a/README_JA.md +++ b/docs/ja-JP/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -109,7 +111,7 @@ GitHub上でDifyにスターを付けることで、Difyに関する新しいニ
-Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。 +Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](../../docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。 ```bash cd docker @@ -123,7 +125,7 @@ docker compose up -d ## 次のステップ -設定をカスタマイズする必要がある場合は、[.env.example](docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。 +設定をカスタマイズする必要がある場合は、[.env.example](../../docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。 高可用性設定を設定する必要がある場合、コミュニティは[Helm Charts](https://helm.sh/)とYAMLファイルにより、DifyをKubernetesにデプロイすることができます。 @@ -169,7 +171,7 @@ docker compose up -d ## 貢献 -コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_JA.md)を参照してください。 +コードに貢献したい方は、[Contribution Guide](./CONTRIBUTING.md)を参照してください。 同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。 > Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。 @@ -183,10 +185,10 @@ docker compose up -d ## コミュニティ & お問い合わせ - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。 -- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください +- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](./CONTRIBUTING.md)を参照してください - [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。 - [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 ## ライセンス -このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](LICENSE)の下で利用可能です。 +このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](../../LICENSE)の下で利用可能です。 diff --git a/CONTRIBUTING_KR.md b/docs/ko-KR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_KR.md rename to docs/ko-KR/CONTRIBUTING.md index 14b1c9a9ca..9c171c3561 100644 --- a/CONTRIBUTING_KR.md +++ b/docs/ko-KR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Dify에 기여하려고 하시는군요 - 정말 멋집니다, 당신이 무엇 이 가이드는 Dify 자체와 마찬가지로 끊임없이 진행 중인 작업입니다. 때로는 실제 프로젝트보다 뒤처질 수 있다는 점을 이해해 주시면 감사하겠으며, 개선을 위한 피드백은 언제든지 환영합니다. -라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](./LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다. +라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](../../LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다. ## 시작하기 전에 diff --git a/README_KR.md b/docs/ko-KR/README.md similarity index 79% rename from README_KR.md rename to docs/ko-KR/README.md index ec28cc0f61..ec67bc90ed 100644 --- a/README_KR.md +++ b/docs/ko-KR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify 클라우드 · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify는 오픈 소스 LLM 앱 개발 플랫폼입니다. 직관적인 인터페이스를 통해 AI 워크플로우, RAG 파이프라인, 에이전트 기능, 모델 관리, 관찰 기능 등을 결합하여 프로토타입에서 프로덕션까지 빠르게 전환할 수 있습니다. 주요 기능 목록은 다음과 같습니다:

@@ -102,7 +104,7 @@ GitHub에서 Dify에 별표를 찍어 새로운 릴리스를 즉시 알림 받
-Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요. +Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](../../docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요. ```bash cd docker @@ -116,7 +118,7 @@ docker compose up -d ## 다음 단계 -구성을 사용자 정의해야 하는 경우 [.env.example](docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다. +구성을 사용자 정의해야 하는 경우 [.env.example](../../docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다. Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했다는 커뮤니티가 제공하는 [Helm Charts](https://helm.sh/)와 YAML 파일이 존재합니다. @@ -162,7 +164,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 기여 -코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_KR.md)를 참조하세요. +코드에 기여하고 싶은 분들은 [기여 가이드](./CONTRIBUTING.md)를 참조하세요. 동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다. > 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요. @@ -176,7 +178,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 커뮤니티 & 연락처 - [GitHub 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다. -- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. +- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](./CONTRIBUTING.md)를 참조하세요. - [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. - [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. @@ -190,4 +192,4 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 라이선스 -이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](LICENSE)에 따라 사용할 수 있습니다. +이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](../../LICENSE)에 따라 사용할 수 있습니다. diff --git a/CONTRIBUTING_PT.md b/docs/pt-BR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_PT.md rename to docs/pt-BR/CONTRIBUTING.md index aeabcad51f..737b2ddce2 100644 --- a/CONTRIBUTING_PT.md +++ b/docs/pt-BR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Precisamos ser ágeis e entregar rapidamente considerando onde estamos, mas tamb Este guia, como o próprio Dify, é um trabalho em constante evolução. Agradecemos muito a sua compreensão se às vezes ele ficar atrasado em relação ao projeto real, e damos as boas-vindas a qualquer feedback para que possamos melhorar. -Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](./LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](../../LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Antes de começar diff --git a/README_PT.md b/docs/pt-BR/README.md similarity index 78% rename from README_PT.md rename to docs/pt-BR/README.md index da8f354a49..78383a3c76 100644 --- a/README_PT.md +++ b/docs/pt-BR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Introduzindo o Dify Workflow com Upload de Arquivo: Recrie o Podcast Google NotebookLM @@ -39,18 +39,20 @@

- README em Inglês - 简体中文版自述文件 - 日本語のREADME - README em Espanhol - README em Francês - README tlhIngan Hol - README em Coreano - README em Árabe - README em Turco - README em Vietnamita - README em Português - BR - README in বাংলা + README em Inglês + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README em Espanhol + README em Francês + README tlhIngan Hol + README em Coreano + README em Árabe + README em Turco + README em Vietnamita + README em Português - BR + README in Deutsch + README in বাংলা

Dify é uma plataforma de desenvolvimento de aplicativos LLM de código aberto. Sua interface intuitiva combina workflow de IA, pipeline RAG, capacidades de agente, gerenciamento de modelos, recursos de observabilidade e muito mais, permitindo que você vá rapidamente do protótipo à produção. Aqui está uma lista das principais funcionalidades: @@ -108,7 +110,7 @@ Dê uma estrela no Dify no GitHub e seja notificado imediatamente sobre novos la
-A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina: +A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](../../docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina: ```bash cd docker @@ -122,7 +124,7 @@ Após a execução, você pode acessar o painel do Dify no navegador em [http:// ## Próximos passos -Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](../../docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments). Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts](https://helm.sh/) e arquivos YAML contribuídos pela comunidade que permitem a implantação do Dify no Kubernetes. @@ -168,7 +170,7 @@ Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by ## Contribuindo -Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_PT.md). +Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](./CONTRIBUTING.md). Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências. > Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c). @@ -182,7 +184,7 @@ Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em ## Comunidade e contato - [Discussões no GitHub](https://github.com/langgenius/dify/discussions). Melhor para: compartilhar feedback e fazer perguntas. -- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Melhor para: compartilhar suas aplicações e interagir com a comunidade. - [X(Twitter)](https://twitter.com/dify_ai). Melhor para: compartilhar suas aplicações e interagir com a comunidade. @@ -196,4 +198,4 @@ Para proteger sua privacidade, evite postar problemas de segurança no GitHub. E ## Licença -Este repositório está disponível sob a [Licença de Código Aberto Dify](LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. +Este repositório está disponível sob a [Licença de Código Aberto Dify](../../LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. diff --git a/README_SI.md b/docs/sl-SI/README.md similarity index 83% rename from README_SI.md rename to docs/sl-SI/README.md index c20dc3484f..65aedb7703 100644 --- a/README_SI.md +++ b/docs/sl-SI/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Predstavljamo nalaganje datotek Dify Workflow: znova ustvarite Google NotebookLM Podcast @@ -36,18 +36,20 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README Slovenščina - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README Slovenščina + README in Deutsch + README in বাংলা

Dify je odprtokodna platforma za razvoj aplikacij LLM. Njegov intuitivni vmesnik združuje agentski potek dela z umetno inteligenco, cevovod RAG, zmogljivosti agentov, upravljanje modelov, funkcije opazovanja in več, kar vam omogoča hiter prehod od prototipa do proizvodnje. @@ -169,7 +171,7 @@ Z enim klikom namestite Dify v AKS z uporabo [Azure Devops Pipeline Helm Chart b ## Prispevam -Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. +Za tiste, ki bi radi prispevali kodo, si oglejte naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. > Iščemo sodelavce za pomoč pri prevajanju Difyja v jezike, ki niso mandarinščina ali angleščina. Če želite pomagati, si oglejte i18n README za več informacij in nam pustite komentar v global-userskanalu našega strežnika skupnosti Discord . @@ -196,4 +198,4 @@ Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj ## Licenca -To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami. +To skladišče je na voljo pod [odprtokodno licenco Dify](../../LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami. diff --git a/README_KL.md b/docs/tlh/README.md similarity index 79% rename from README_KL.md rename to docs/tlh/README.md index 93da9a6140..b1e3016efd 100644 --- a/README_KL.md +++ b/docs/tlh/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Star Dify on GitHub and be instantly notified of new releases.
-The easiest way to start the Dify server is to run our [docker-compose.yml](docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine: +The easiest way to start the Dify server is to run our [docker-compose.yml](../../docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine: ```bash cd docker @@ -122,7 +124,7 @@ After running, you can access the Dify dashboard in your browser at [http://loca ## Next steps -If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). +If you need to customize the configuration, please refer to the comments in our [.env.example](../../docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes. @@ -181,10 +183,7 @@ At the same time, please consider supporting Dify by sharing it on social media ## Community & Contact -- \[GitHub Discussion\](https://github.com/langgenius/dify/discussions - -). Best for: sharing feedback and asking questions. - +- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions. - [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. - [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. @@ -199,4 +198,4 @@ To protect your privacy, please avoid posting security issues on GitHub. Instead ## License -This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions. +This repository is available under the [Dify Open Source License](../../LICENSE), which is essentially Apache 2.0 with a few additional restrictions. diff --git a/CONTRIBUTING_TR.md b/docs/tr-TR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_TR.md rename to docs/tr-TR/CONTRIBUTING.md index d016802a53..59227d31a9 100644 --- a/CONTRIBUTING_TR.md +++ b/docs/tr-TR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Bulunduğumuz noktada çevik olmamız ve hızlı hareket etmemiz gerekiyor, anca Bu rehber, Dify'ın kendisi gibi, sürekli gelişen bir çalışmadır. Bazen gerçek projenin gerisinde kalırsa anlayışınız için çok minnettarız ve gelişmemize yardımcı olacak her türlü geri bildirimi memnuniyetle karşılıyoruz. -Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](./LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar. +Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](../../LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar. ## Başlamadan Önce diff --git a/README_TR.md b/docs/tr-TR/README.md similarity index 79% rename from README_TR.md rename to docs/tr-TR/README.md index 21df0d1605..a044da1f4e 100644 --- a/README_TR.md +++ b/docs/tr-TR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Bulut · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify, açık kaynaklı bir LLM uygulama geliştirme platformudur. Sezgisel arayüzü, AI iş akışı, RAG pipeline'ı, ajan yetenekleri, model yönetimi, gözlemlenebilirlik özellikleri ve daha fazlasını birleştirerek, prototipten üretime hızlıca geçmenizi sağlar. İşte temel özelliklerin bir listesi: @@ -102,7 +104,7 @@ GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun. > - RAM >= 4GB
-Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun: +Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](../../docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun: ```bash cd docker @@ -116,7 +118,7 @@ docker compose up -d ## Sonraki adımlar -Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz. +Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](../../docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz. Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'ın Kubernetes üzerine dağıtılmasına olanak tanıyan topluluk katkılı [Helm Charts](https://helm.sh/) ve YAML dosyaları mevcuttur. @@ -161,7 +163,7 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter ## Katkıda Bulunma -Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TR.md) bakabilirsiniz. +Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](./CONTRIBUTING.md) bakabilirsiniz. Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün. > Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın. @@ -175,7 +177,7 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p ## Topluluk & iletişim - [GitHub Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için. -- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın. +- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](./CONTRIBUTING.md) bakın. - [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. - [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. @@ -189,4 +191,4 @@ Gizliliğinizi korumak için, lütfen güvenlik sorunlarını GitHub'da paylaşm ## Lisans -Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](LICENSE) altında kullanıma sunulmuştur. +Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](../../LICENSE) altında kullanıma sunulmuştur. diff --git a/CONTRIBUTING_VI.md b/docs/vi-VN/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_VI.md rename to docs/vi-VN/CONTRIBUTING.md index 2ad431296a..fa1d875f83 100644 --- a/CONTRIBUTING_VI.md +++ b/docs/vi-VN/CONTRIBUTING.md @@ -6,7 +6,7 @@ Chúng tôi cần phải nhanh nhẹn và triển khai nhanh chóng, nhưng cũn Hướng dẫn này, giống như Dify, đang được phát triển liên tục. Chúng tôi rất cảm kích sự thông cảm của bạn nếu đôi khi nó chưa theo kịp dự án thực tế, và hoan nghênh mọi phản hồi để cải thiện. -Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](./LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](../../LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Trước khi bắt đầu diff --git a/README_VI.md b/docs/vi-VN/README.md similarity index 80% rename from README_VI.md rename to docs/vi-VN/README.md index 6d5305fb75..847641da12 100644 --- a/README_VI.md +++ b/docs/vi-VN/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify là một nền tảng phát triển ứng dụng LLM mã nguồn mở. Giao diện trực quan kết hợp quy trình làm việc AI, mô hình RAG, khả năng tác nhân, quản lý mô hình, tính năng quan sát và hơn thế nữa, cho phép bạn nhanh chóng chuyển từ nguyên mẫu sang sản phẩm. Đây là danh sách các tính năng cốt lõi: @@ -103,7 +105,7 @@ Yêu thích Dify trên GitHub và được thông báo ngay lập tức về cá
-Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn: +Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](../../docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn: ```bash cd docker @@ -117,7 +119,7 @@ Sau khi chạy, bạn có thể truy cập bảng điều khiển Dify trong tr ## Các bước tiếp theo -Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](../../docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments). Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có các [Helm Charts](https://helm.sh/) và tệp YAML do cộng đồng đóng góp cho phép Dify được triển khai trên Kubernetes. @@ -162,7 +164,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Đóng góp -Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_VI.md) của chúng tôi. +Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](./CONTRIBUTING.md) của chúng tôi. Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị. > Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi. @@ -176,7 +178,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Cộng đồng & liên hệ - [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi. -- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. +- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](./CONTRIBUTING.md) của chúng tôi. - [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. - [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. @@ -190,4 +192,4 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Giấy phép -Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung. +Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](../../LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung. diff --git a/CONTRIBUTING_CN.md b/docs/zh-CN/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_CN.md rename to docs/zh-CN/CONTRIBUTING.md index c278c8fd7a..5b71467804 100644 --- a/CONTRIBUTING_CN.md +++ b/docs/zh-CN/CONTRIBUTING.md @@ -6,7 +6,7 @@ 本指南和 Dify 一样在不断完善中。如果有任何滞后于项目实际情况的地方,恳请谅解,我们也欢迎任何改进建议。 -关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](./LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 +关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](../../LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 ## 开始之前 diff --git a/README_CN.md b/docs/zh-CN/README.md similarity index 79% rename from README_CN.md rename to docs/zh-CN/README.md index 9aaebf4037..202b99a6b1 100644 --- a/README_CN.md +++ b/docs/zh-CN/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)
Dify 云服务 · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা
# @@ -111,7 +113,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI ### 快速启动 -启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): +启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](../../docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): ```bash cd docker @@ -123,7 +125,7 @@ docker compose up -d ### 自定义配置 -如果您需要自定义配置,请参考 [.env.example](docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。 +如果您需要自定义配置,请参考 [.env.example](../../docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。 #### 使用 Helm Chart 或 Kubernetes 资源清单(YAML)部署 @@ -180,7 +182,7 @@ docker compose up -d ## Contributing -对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_CN.md)。 +对于那些想要贡献代码的人,请参阅我们的[贡献指南](./CONTRIBUTING.md)。 同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。 > 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。 @@ -196,7 +198,7 @@ docker compose up -d 我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括:提交代码、问题、新想法,或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。 - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。 -- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。 +- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](./CONTRIBUTING.md)。 - [电子邮件支持](mailto:hello@dify.ai?subject=%5BGitHub%5DQuestions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。 - [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。 - [X(Twitter)](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。 @@ -208,4 +210,4 @@ docker compose up -d ## License -本仓库遵循 [Dify Open Source License](LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。 +本仓库遵循 [Dify Open Source License](../../LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。 diff --git a/CONTRIBUTING_TW.md b/docs/zh-TW/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING_TW.md rename to docs/zh-TW/CONTRIBUTING.md index 5c4d7022fe..1d5f02efa1 100644 --- a/CONTRIBUTING_TW.md +++ b/docs/zh-TW/CONTRIBUTING.md @@ -6,7 +6,7 @@ 這份指南與 Dify 一樣,都在持續完善中。如果指南內容有落後於實際專案的情況,還請見諒,也歡迎提供改進建議。 -關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](./LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 +關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](../../LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 ## 開始之前 diff --git a/README_TW.md b/docs/zh-TW/README.md similarity index 80% rename from README_TW.md rename to docs/zh-TW/README.md index 18d0724784..526e8d9c8c 100644 --- a/README_TW.md +++ b/docs/zh-TW/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 介紹 Dify 工作流程檔案上傳功能:重現 Google NotebookLM Podcast @@ -39,18 +39,18 @@

- README in English - 繁體中文文件 - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch

Dify 是一個開源的 LLM 應用程式開發平台。其直觀的界面結合了智能代理工作流程、RAG 管道、代理功能、模型管理、可觀察性功能等,讓您能夠快速從原型進展到生產環境。 @@ -64,7 +64,7 @@ Dify 是一個開源的 LLM 應用程式開發平台。其直觀的界面結合
-啟動 Dify 伺服器最簡單的方式是透過 [docker compose](docker/docker-compose.yaml)。在使用以下命令運行 Dify 之前,請確保您的機器已安裝 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): +啟動 Dify 伺服器最簡單的方式是透過 [docker compose](../../docker/docker-compose.yaml)。在使用以下命令運行 Dify 之前,請確保您的機器已安裝 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): ```bash cd dify @@ -128,7 +128,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 進階設定 -如果您需要自定義配置,請參考我們的 [.env.example](docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。 +如果您需要自定義配置,請參考我們的 [.env.example](../../docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。 如果您想配置高可用性設置,社區貢獻的 [Helm Charts](https://helm.sh/) 和 Kubernetes 資源清單(YAML)允許在 Kubernetes 上部署 Dify。 @@ -173,7 +173,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 貢獻 -對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TW.md)。 +對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](./CONTRIBUTING.md)。 同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。 > 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。 @@ -181,7 +181,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 社群與聯絡方式 - [GitHub Discussion](https://github.com/langgenius/dify/discussions):最適合分享反饋和提問。 -- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 +- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](./CONTRIBUTING.md)。 - [Discord](https://discord.gg/FngNHpbcY7):最適合分享您的應用程式並與社群互動。 - [X(Twitter)](https://twitter.com/dify_ai):最適合分享您的應用程式並與社群互動。 @@ -201,4 +201,4 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 授權條款 -本代碼庫採用 [Dify 開源授權](LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。 +本代碼庫採用 [Dify 開源授權](../../LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。 diff --git a/scripts/stress-test/setup/import_workflow_app.py b/scripts/stress-test/setup/import_workflow_app.py index 86d0239e35..41a76bd29b 100755 --- a/scripts/stress-test/setup/import_workflow_app.py +++ b/scripts/stress-test/setup/import_workflow_app.py @@ -8,7 +8,7 @@ sys.path.append(str(Path(__file__).parent.parent)) import json import httpx -from common import Logger, config_helper +from common import Logger, config_helper # type: ignore[import] def import_workflow_app() -> None: diff --git a/sdks/python-client/MANIFEST.in b/sdks/python-client/MANIFEST.in index 12f44237a2..34b7e8711c 100644 --- a/sdks/python-client/MANIFEST.in +++ b/sdks/python-client/MANIFEST.in @@ -1 +1,3 @@ recursive-include dify_client *.py +include README.md +include LICENSE diff --git a/sdks/python-client/README.md b/sdks/python-client/README.md index 34b14b3a94..ebfb5f5397 100644 --- a/sdks/python-client/README.md +++ b/sdks/python-client/README.md @@ -10,6 +10,8 @@ First, install `dify-client` python sdk package: pip install dify-client ``` +### Synchronous Usage + Write your code with sdk: - completion generate with `blocking` response_mode @@ -221,3 +223,187 @@ answer = result.get("data").get("outputs") print(answer["answer"]) ``` + +- Dataset Management + +```python +from dify_client import KnowledgeBaseClient + +api_key = "your_api_key" +dataset_id = "your_dataset_id" + +# Use context manager to ensure proper resource cleanup +with KnowledgeBaseClient(api_key, dataset_id) as kb_client: + # Get dataset information + dataset_info = kb_client.get_dataset() + dataset_info.raise_for_status() + print(dataset_info.json()) + + # Update dataset configuration + update_response = kb_client.update_dataset( + name="Updated Dataset Name", + description="Updated description", + indexing_technique="high_quality" + ) + update_response.raise_for_status() + print(update_response.json()) + + # Batch update document status + batch_response = kb_client.batch_update_document_status( + action="enable", + document_ids=["doc_id_1", "doc_id_2", "doc_id_3"] + ) + batch_response.raise_for_status() + print(batch_response.json()) +``` + +- Conversation Variables Management + +```python +from dify_client import ChatClient + +api_key = "your_api_key" + +# Use context manager to ensure proper resource cleanup +with ChatClient(api_key) as chat_client: + # Get all conversation variables + variables = chat_client.get_conversation_variables( + conversation_id="conversation_id", + user="user_id" + ) + variables.raise_for_status() + print(variables.json()) + + # Update a specific conversation variable + update_var = chat_client.update_conversation_variable( + conversation_id="conversation_id", + variable_id="variable_id", + value="new_value", + user="user_id" + ) + update_var.raise_for_status() + print(update_var.json()) +``` + +### Asynchronous Usage + +The SDK provides full async/await support for all API operations using `httpx.AsyncClient`. All async clients mirror their synchronous counterparts but require `await` for method calls. + +- async chat with `blocking` response_mode + +```python +import asyncio +from dify_client import AsyncChatClient + +api_key = "your_api_key" + +async def main(): + # Use async context manager for proper resource cleanup + async with AsyncChatClient(api_key) as client: + response = await client.create_chat_message( + inputs={}, + query="Hello, how are you?", + user="user_id", + response_mode="blocking" + ) + response.raise_for_status() + result = response.json() + print(result.get('answer')) + +# Run the async function +asyncio.run(main()) +``` + +- async completion with `streaming` response_mode + +```python +import asyncio +import json +from dify_client import AsyncCompletionClient + +api_key = "your_api_key" + +async def main(): + async with AsyncCompletionClient(api_key) as client: + response = await client.create_completion_message( + inputs={"query": "What's the weather?"}, + response_mode="streaming", + user="user_id" + ) + response.raise_for_status() + + # Stream the response + async for line in response.aiter_lines(): + if line.startswith('data:'): + data = line[5:].strip() + if data: + chunk = json.loads(data) + print(chunk.get('answer', ''), end='', flush=True) + +asyncio.run(main()) +``` + +- async workflow execution + +```python +import asyncio +from dify_client import AsyncWorkflowClient + +api_key = "your_api_key" + +async def main(): + async with AsyncWorkflowClient(api_key) as client: + response = await client.run( + inputs={"query": "What is machine learning?"}, + response_mode="blocking", + user="user_id" + ) + response.raise_for_status() + result = response.json() + print(result.get("data").get("outputs")) + +asyncio.run(main()) +``` + +- async dataset management + +```python +import asyncio +from dify_client import AsyncKnowledgeBaseClient + +api_key = "your_api_key" +dataset_id = "your_dataset_id" + +async def main(): + async with AsyncKnowledgeBaseClient(api_key, dataset_id) as kb_client: + # Get dataset information + dataset_info = await kb_client.get_dataset() + dataset_info.raise_for_status() + print(dataset_info.json()) + + # List documents + docs = await kb_client.list_documents(page=1, page_size=10) + docs.raise_for_status() + print(docs.json()) + +asyncio.run(main()) +``` + +**Benefits of Async Usage:** + +- **Better Performance**: Handle multiple concurrent API requests efficiently +- **Non-blocking I/O**: Don't block the event loop during network operations +- **Scalability**: Ideal for applications handling many simultaneous requests +- **Modern Python**: Leverages Python's native async/await syntax + +**Available Async Clients:** + +- `AsyncDifyClient` - Base async client +- `AsyncChatClient` - Async chat operations +- `AsyncCompletionClient` - Async completion operations +- `AsyncWorkflowClient` - Async workflow operations +- `AsyncKnowledgeBaseClient` - Async dataset/knowledge base operations +- `AsyncWorkspaceClient` - Async workspace operations + +``` +``` diff --git a/sdks/python-client/dify_client/__init__.py b/sdks/python-client/dify_client/__init__.py index e866472f45..ced093b20a 100644 --- a/sdks/python-client/dify_client/__init__.py +++ b/sdks/python-client/dify_client/__init__.py @@ -4,12 +4,31 @@ from dify_client.client import ( DifyClient, KnowledgeBaseClient, WorkflowClient, + WorkspaceClient, +) + +from dify_client.async_client import ( + AsyncChatClient, + AsyncCompletionClient, + AsyncDifyClient, + AsyncKnowledgeBaseClient, + AsyncWorkflowClient, + AsyncWorkspaceClient, ) __all__ = [ + # Synchronous clients "ChatClient", "CompletionClient", "DifyClient", "KnowledgeBaseClient", "WorkflowClient", + "WorkspaceClient", + # Asynchronous clients + "AsyncChatClient", + "AsyncCompletionClient", + "AsyncDifyClient", + "AsyncKnowledgeBaseClient", + "AsyncWorkflowClient", + "AsyncWorkspaceClient", ] diff --git a/sdks/python-client/dify_client/async_client.py b/sdks/python-client/dify_client/async_client.py new file mode 100644 index 0000000000..984f668d0c --- /dev/null +++ b/sdks/python-client/dify_client/async_client.py @@ -0,0 +1,808 @@ +"""Asynchronous Dify API client. + +This module provides async/await support for all Dify API operations using httpx.AsyncClient. +All client classes mirror their synchronous counterparts but require `await` for method calls. + +Example: + import asyncio + from dify_client import AsyncChatClient + + async def main(): + async with AsyncChatClient(api_key="your-key") as client: + response = await client.create_chat_message( + inputs={}, + query="Hello", + user="user-123" + ) + print(response.json()) + + asyncio.run(main()) +""" + +import json +import os +from typing import Literal, Dict, List, Any, IO + +import aiofiles +import httpx + + +class AsyncDifyClient: + """Asynchronous Dify API client. + + This client uses httpx.AsyncClient for efficient async connection pooling. + It's recommended to use this client as a context manager: + + Example: + async with AsyncDifyClient(api_key="your-key") as client: + response = await client.get_app_info() + """ + + def __init__( + self, + api_key: str, + base_url: str = "https://api.dify.ai/v1", + timeout: float = 60.0, + ): + """Initialize the async Dify client. + + Args: + api_key: Your Dify API key + base_url: Base URL for the Dify API + timeout: Request timeout in seconds (default: 60.0) + """ + self.api_key = api_key + self.base_url = base_url + self._client = httpx.AsyncClient( + base_url=base_url, + timeout=httpx.Timeout(timeout, connect=5.0), + ) + + async def __aenter__(self): + """Support async context manager protocol.""" + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Clean up resources when exiting async context.""" + await self.aclose() + + async def aclose(self): + """Close the async HTTP client and release resources.""" + if hasattr(self, "_client"): + await self._client.aclose() + + async def _send_request( + self, + method: str, + endpoint: str, + json: dict | None = None, + params: dict | None = None, + stream: bool = False, + **kwargs, + ): + """Send an async HTTP request to the Dify API. + + Args: + method: HTTP method (GET, POST, PUT, PATCH, DELETE) + endpoint: API endpoint path + json: JSON request body + params: Query parameters + stream: Whether to stream the response + **kwargs: Additional arguments to pass to httpx.request + + Returns: + httpx.Response object + """ + headers = { + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + } + + response = await self._client.request( + method, + endpoint, + json=json, + params=params, + headers=headers, + **kwargs, + ) + + return response + + async def _send_request_with_files(self, method: str, endpoint: str, data: dict, files: dict): + """Send an async HTTP request with file uploads. + + Args: + method: HTTP method (POST, PUT, etc.) + endpoint: API endpoint path + data: Form data + files: Files to upload + + Returns: + httpx.Response object + """ + headers = {"Authorization": f"Bearer {self.api_key}"} + + response = await self._client.request( + method, + endpoint, + data=data, + headers=headers, + files=files, + ) + + return response + + async def message_feedback(self, message_id: str, rating: Literal["like", "dislike"], user: str): + """Send feedback for a message.""" + data = {"rating": rating, "user": user} + return await self._send_request("POST", f"/messages/{message_id}/feedbacks", data) + + async def get_application_parameters(self, user: str): + """Get application parameters.""" + params = {"user": user} + return await self._send_request("GET", "/parameters", params=params) + + async def file_upload(self, user: str, files: dict): + """Upload a file.""" + data = {"user": user} + return await self._send_request_with_files("POST", "/files/upload", data=data, files=files) + + async def text_to_audio(self, text: str, user: str, streaming: bool = False): + """Convert text to audio.""" + data = {"text": text, "user": user, "streaming": streaming} + return await self._send_request("POST", "/text-to-audio", json=data) + + async def get_meta(self, user: str): + """Get metadata.""" + params = {"user": user} + return await self._send_request("GET", "/meta", params=params) + + async def get_app_info(self): + """Get basic application information including name, description, tags, and mode.""" + return await self._send_request("GET", "/info") + + async def get_app_site_info(self): + """Get application site information.""" + return await self._send_request("GET", "/site") + + async def get_file_preview(self, file_id: str): + """Get file preview by file ID.""" + return await self._send_request("GET", f"/files/{file_id}/preview") + + +class AsyncCompletionClient(AsyncDifyClient): + """Async client for Completion API operations.""" + + async def create_completion_message( + self, + inputs: dict, + response_mode: Literal["blocking", "streaming"], + user: str, + files: dict | None = None, + ): + """Create a completion message. + + Args: + inputs: Input variables for the completion + response_mode: Response mode ('blocking' or 'streaming') + user: User identifier + files: Optional files to include + + Returns: + httpx.Response object + """ + data = { + "inputs": inputs, + "response_mode": response_mode, + "user": user, + "files": files, + } + return await self._send_request( + "POST", + "/completion-messages", + data, + stream=(response_mode == "streaming"), + ) + + +class AsyncChatClient(AsyncDifyClient): + """Async client for Chat API operations.""" + + async def create_chat_message( + self, + inputs: dict, + query: str, + user: str, + response_mode: Literal["blocking", "streaming"] = "blocking", + conversation_id: str | None = None, + files: dict | None = None, + ): + """Create a chat message. + + Args: + inputs: Input variables for the chat + query: User query/message + user: User identifier + response_mode: Response mode ('blocking' or 'streaming') + conversation_id: Optional conversation ID for context + files: Optional files to include + + Returns: + httpx.Response object + """ + data = { + "inputs": inputs, + "query": query, + "user": user, + "response_mode": response_mode, + "files": files, + } + if conversation_id: + data["conversation_id"] = conversation_id + + return await self._send_request( + "POST", + "/chat-messages", + data, + stream=(response_mode == "streaming"), + ) + + async def get_suggested(self, message_id: str, user: str): + """Get suggested questions for a message.""" + params = {"user": user} + return await self._send_request("GET", f"/messages/{message_id}/suggested", params=params) + + async def stop_message(self, task_id: str, user: str): + """Stop a running message generation.""" + data = {"user": user} + return await self._send_request("POST", f"/chat-messages/{task_id}/stop", data) + + async def get_conversations( + self, + user: str, + last_id: str | None = None, + limit: int | None = None, + pinned: bool | None = None, + ): + """Get list of conversations.""" + params = {"user": user, "last_id": last_id, "limit": limit, "pinned": pinned} + return await self._send_request("GET", "/conversations", params=params) + + async def get_conversation_messages( + self, + user: str, + conversation_id: str | None = None, + first_id: str | None = None, + limit: int | None = None, + ): + """Get messages from a conversation.""" + params = { + "user": user, + "conversation_id": conversation_id, + "first_id": first_id, + "limit": limit, + } + return await self._send_request("GET", "/messages", params=params) + + async def rename_conversation(self, conversation_id: str, name: str, auto_generate: bool, user: str): + """Rename a conversation.""" + data = {"name": name, "auto_generate": auto_generate, "user": user} + return await self._send_request("POST", f"/conversations/{conversation_id}/name", data) + + async def delete_conversation(self, conversation_id: str, user: str): + """Delete a conversation.""" + data = {"user": user} + return await self._send_request("DELETE", f"/conversations/{conversation_id}", data) + + async def audio_to_text(self, audio_file: IO[bytes] | tuple, user: str): + """Convert audio to text.""" + data = {"user": user} + files = {"file": audio_file} + return await self._send_request_with_files("POST", "/audio-to-text", data, files) + + # Annotation APIs + async def annotation_reply_action( + self, + action: Literal["enable", "disable"], + score_threshold: float, + embedding_provider_name: str, + embedding_model_name: str, + ): + """Enable or disable annotation reply feature.""" + data = { + "score_threshold": score_threshold, + "embedding_provider_name": embedding_provider_name, + "embedding_model_name": embedding_model_name, + } + return await self._send_request("POST", f"/apps/annotation-reply/{action}", json=data) + + async def get_annotation_reply_status(self, action: Literal["enable", "disable"], job_id: str): + """Get the status of an annotation reply action job.""" + return await self._send_request("GET", f"/apps/annotation-reply/{action}/status/{job_id}") + + async def list_annotations(self, page: int = 1, limit: int = 20, keyword: str | None = None): + """List annotations for the application.""" + params = {"page": page, "limit": limit, "keyword": keyword} + return await self._send_request("GET", "/apps/annotations", params=params) + + async def create_annotation(self, question: str, answer: str): + """Create a new annotation.""" + data = {"question": question, "answer": answer} + return await self._send_request("POST", "/apps/annotations", json=data) + + async def update_annotation(self, annotation_id: str, question: str, answer: str): + """Update an existing annotation.""" + data = {"question": question, "answer": answer} + return await self._send_request("PUT", f"/apps/annotations/{annotation_id}", json=data) + + async def delete_annotation(self, annotation_id: str): + """Delete an annotation.""" + return await self._send_request("DELETE", f"/apps/annotations/{annotation_id}") + + # Conversation Variables APIs + async def get_conversation_variables(self, conversation_id: str, user: str): + """Get all variables for a specific conversation. + + Args: + conversation_id: The conversation ID to query variables for + user: User identifier + + Returns: + Response from the API containing: + - variables: List of conversation variables with their values + - conversation_id: The conversation ID + """ + params = {"user": user} + url = f"/conversations/{conversation_id}/variables" + return await self._send_request("GET", url, params=params) + + async def update_conversation_variable(self, conversation_id: str, variable_id: str, value: Any, user: str): + """Update a specific conversation variable. + + Args: + conversation_id: The conversation ID + variable_id: The variable ID to update + value: New value for the variable + user: User identifier + + Returns: + Response from the API with updated variable information + """ + data = {"value": value, "user": user} + url = f"/conversations/{conversation_id}/variables/{variable_id}" + return await self._send_request("PATCH", url, json=data) + + +class AsyncWorkflowClient(AsyncDifyClient): + """Async client for Workflow API operations.""" + + async def run( + self, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): + """Run a workflow.""" + data = {"inputs": inputs, "response_mode": response_mode, "user": user} + return await self._send_request("POST", "/workflows/run", data) + + async def stop(self, task_id: str, user: str): + """Stop a running workflow task.""" + data = {"user": user} + return await self._send_request("POST", f"/workflows/tasks/{task_id}/stop", data) + + async def get_result(self, workflow_run_id: str): + """Get workflow run result.""" + return await self._send_request("GET", f"/workflows/run/{workflow_run_id}") + + async def get_workflow_logs( + self, + keyword: str = None, + status: Literal["succeeded", "failed", "stopped"] | None = None, + page: int = 1, + limit: int = 20, + created_at__before: str = None, + created_at__after: str = None, + created_by_end_user_session_id: str = None, + created_by_account: str = None, + ): + """Get workflow execution logs with optional filtering.""" + params = { + "page": page, + "limit": limit, + "keyword": keyword, + "status": status, + "created_at__before": created_at__before, + "created_at__after": created_at__after, + "created_by_end_user_session_id": created_by_end_user_session_id, + "created_by_account": created_by_account, + } + return await self._send_request("GET", "/workflows/logs", params=params) + + async def run_specific_workflow( + self, + workflow_id: str, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): + """Run a specific workflow by workflow ID.""" + data = {"inputs": inputs, "response_mode": response_mode, "user": user} + return await self._send_request( + "POST", + f"/workflows/{workflow_id}/run", + data, + stream=(response_mode == "streaming"), + ) + + +class AsyncWorkspaceClient(AsyncDifyClient): + """Async client for workspace-related operations.""" + + async def get_available_models(self, model_type: str): + """Get available models by model type.""" + url = f"/workspaces/current/models/model-types/{model_type}" + return await self._send_request("GET", url) + + +class AsyncKnowledgeBaseClient(AsyncDifyClient): + """Async client for Knowledge Base API operations.""" + + def __init__( + self, + api_key: str, + base_url: str = "https://api.dify.ai/v1", + dataset_id: str | None = None, + timeout: float = 60.0, + ): + """Construct an AsyncKnowledgeBaseClient object. + + Args: + api_key: API key of Dify + base_url: Base URL of Dify API + dataset_id: ID of the dataset + timeout: Request timeout in seconds + """ + super().__init__(api_key=api_key, base_url=base_url, timeout=timeout) + self.dataset_id = dataset_id + + def _get_dataset_id(self): + """Get the dataset ID, raise error if not set.""" + if self.dataset_id is None: + raise ValueError("dataset_id is not set") + return self.dataset_id + + async def create_dataset(self, name: str, **kwargs): + """Create a new dataset.""" + return await self._send_request("POST", "/datasets", {"name": name}, **kwargs) + + async def list_datasets(self, page: int = 1, page_size: int = 20, **kwargs): + """List all datasets.""" + return await self._send_request("GET", "/datasets", params={"page": page, "limit": page_size}, **kwargs) + + async def create_document_by_text(self, name: str, text: str, extra_params: dict | None = None, **kwargs): + """Create a document by text. + + Args: + name: Name of the document + text: Text content of the document + extra_params: Extra parameters for the API + + Returns: + Response from the API + """ + data = { + "indexing_technique": "high_quality", + "process_rule": {"mode": "automatic"}, + "name": name, + "text": text, + } + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + url = f"/datasets/{self._get_dataset_id()}/document/create_by_text" + return await self._send_request("POST", url, json=data, **kwargs) + + async def update_document_by_text( + self, + document_id: str, + name: str, + text: str, + extra_params: dict | None = None, + **kwargs, + ): + """Update a document by text.""" + data = {"name": name, "text": text} + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_text" + return await self._send_request("POST", url, json=data, **kwargs) + + async def create_document_by_file( + self, + file_path: str, + original_document_id: str | None = None, + extra_params: dict | None = None, + ): + """Create a document by file.""" + async with aiofiles.open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + data = { + "process_rule": {"mode": "automatic"}, + "indexing_technique": "high_quality", + } + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + if original_document_id is not None: + data["original_document_id"] = original_document_id + url = f"/datasets/{self._get_dataset_id()}/document/create_by_file" + return await self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) + + async def update_document_by_file(self, document_id: str, file_path: str, extra_params: dict | None = None): + """Update a document by file.""" + async with aiofiles.open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + data = {} + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_file" + return await self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) + + async def batch_indexing_status(self, batch_id: str, **kwargs): + """Get the status of the batch indexing.""" + url = f"/datasets/{self._get_dataset_id()}/documents/{batch_id}/indexing-status" + return await self._send_request("GET", url, **kwargs) + + async def delete_dataset(self): + """Delete this dataset.""" + url = f"/datasets/{self._get_dataset_id()}" + return await self._send_request("DELETE", url) + + async def delete_document(self, document_id: str): + """Delete a document.""" + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}" + return await self._send_request("DELETE", url) + + async def list_documents( + self, + page: int | None = None, + page_size: int | None = None, + keyword: str | None = None, + **kwargs, + ): + """Get a list of documents in this dataset.""" + params = { + "page": page, + "limit": page_size, + "keyword": keyword, + } + url = f"/datasets/{self._get_dataset_id()}/documents" + return await self._send_request("GET", url, params=params, **kwargs) + + async def add_segments(self, document_id: str, segments: list[dict], **kwargs): + """Add segments to a document.""" + data = {"segments": segments} + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments" + return await self._send_request("POST", url, json=data, **kwargs) + + async def query_segments( + self, + document_id: str, + keyword: str | None = None, + status: str | None = None, + **kwargs, + ): + """Query segments in this document. + + Args: + document_id: ID of the document + keyword: Query keyword (optional) + status: Status of the segment (optional, e.g., 'completed') + **kwargs: Additional parameters to pass to the API. + Can include a 'params' dict for extra query parameters. + + Returns: + Response from the API + """ + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments" + params = { + "keyword": keyword, + "status": status, + } + if "params" in kwargs: + params.update(kwargs.pop("params")) + return await self._send_request("GET", url, params=params, **kwargs) + + async def delete_document_segment(self, document_id: str, segment_id: str): + """Delete a segment from a document.""" + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments/{segment_id}" + return await self._send_request("DELETE", url) + + async def update_document_segment(self, document_id: str, segment_id: str, segment_data: dict, **kwargs): + """Update a segment in a document.""" + data = {"segment": segment_data} + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments/{segment_id}" + return await self._send_request("POST", url, json=data, **kwargs) + + # Advanced Knowledge Base APIs + async def hit_testing( + self, + query: str, + retrieval_model: Dict[str, Any] = None, + external_retrieval_model: Dict[str, Any] = None, + ): + """Perform hit testing on the dataset.""" + data = {"query": query} + if retrieval_model: + data["retrieval_model"] = retrieval_model + if external_retrieval_model: + data["external_retrieval_model"] = external_retrieval_model + url = f"/datasets/{self._get_dataset_id()}/hit-testing" + return await self._send_request("POST", url, json=data) + + async def get_dataset_metadata(self): + """Get dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return await self._send_request("GET", url) + + async def create_dataset_metadata(self, metadata_data: Dict[str, Any]): + """Create dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return await self._send_request("POST", url, json=metadata_data) + + async def update_dataset_metadata(self, metadata_id: str, metadata_data: Dict[str, Any]): + """Update dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/{metadata_id}" + return await self._send_request("PATCH", url, json=metadata_data) + + async def get_built_in_metadata(self): + """Get built-in metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in" + return await self._send_request("GET", url) + + async def manage_built_in_metadata(self, action: str, metadata_data: Dict[str, Any] = None): + """Manage built-in metadata with specified action.""" + data = metadata_data or {} + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in/{action}" + return await self._send_request("POST", url, json=data) + + async def update_documents_metadata(self, operation_data: List[Dict[str, Any]]): + """Update metadata for multiple documents.""" + url = f"/datasets/{self._get_dataset_id()}/documents/metadata" + data = {"operation_data": operation_data} + return await self._send_request("POST", url, json=data) + + # Dataset Tags APIs + async def list_dataset_tags(self): + """List all dataset tags.""" + return await self._send_request("GET", "/datasets/tags") + + async def bind_dataset_tags(self, tag_ids: List[str]): + """Bind tags to dataset.""" + data = {"tag_ids": tag_ids, "target_id": self._get_dataset_id()} + return await self._send_request("POST", "/datasets/tags/binding", json=data) + + async def unbind_dataset_tag(self, tag_id: str): + """Unbind a single tag from dataset.""" + data = {"tag_id": tag_id, "target_id": self._get_dataset_id()} + return await self._send_request("POST", "/datasets/tags/unbinding", json=data) + + async def get_dataset_tags(self): + """Get tags for current dataset.""" + url = f"/datasets/{self._get_dataset_id()}/tags" + return await self._send_request("GET", url) + + # RAG Pipeline APIs + async def get_datasource_plugins(self, is_published: bool = True): + """Get datasource plugins for RAG pipeline.""" + params = {"is_published": is_published} + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource-plugins" + return await self._send_request("GET", url, params=params) + + async def run_datasource_node( + self, + node_id: str, + inputs: Dict[str, Any], + datasource_type: str, + is_published: bool = True, + credential_id: str = None, + ): + """Run a datasource node in RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "is_published": is_published, + } + if credential_id: + data["credential_id"] = credential_id + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource/nodes/{node_id}/run" + return await self._send_request("POST", url, json=data, stream=True) + + async def run_rag_pipeline( + self, + inputs: Dict[str, Any], + datasource_type: str, + datasource_info_list: List[Dict[str, Any]], + start_node_id: str, + is_published: bool = True, + response_mode: Literal["streaming", "blocking"] = "blocking", + ): + """Run RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "datasource_info_list": datasource_info_list, + "start_node_id": start_node_id, + "is_published": is_published, + "response_mode": response_mode, + } + url = f"/datasets/{self._get_dataset_id()}/pipeline/run" + return await self._send_request("POST", url, json=data, stream=response_mode == "streaming") + + async def upload_pipeline_file(self, file_path: str): + """Upload file for RAG pipeline.""" + async with aiofiles.open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + return await self._send_request_with_files("POST", "/datasets/pipeline/file-upload", {}, files) + + # Dataset Management APIs + async def get_dataset(self, dataset_id: str | None = None): + """Get detailed information about a specific dataset.""" + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}" + return await self._send_request("GET", url) + + async def update_dataset( + self, + dataset_id: str | None = None, + name: str | None = None, + description: str | None = None, + indexing_technique: str | None = None, + embedding_model: str | None = None, + embedding_model_provider: str | None = None, + retrieval_model: Dict[str, Any] | None = None, + **kwargs, + ): + """Update dataset configuration. + + Args: + dataset_id: Dataset ID (optional, uses current dataset_id if not provided) + name: New dataset name + description: New dataset description + indexing_technique: Indexing technique ('high_quality' or 'economy') + embedding_model: Embedding model name + embedding_model_provider: Embedding model provider + retrieval_model: Retrieval model configuration dict + **kwargs: Additional parameters to pass to the API + + Returns: + Response from the API with updated dataset information + """ + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}" + + payload = { + "name": name, + "description": description, + "indexing_technique": indexing_technique, + "embedding_model": embedding_model, + "embedding_model_provider": embedding_model_provider, + "retrieval_model": retrieval_model, + } + + data = {k: v for k, v in payload.items() if v is not None} + data.update(kwargs) + + return await self._send_request("PATCH", url, json=data) + + async def batch_update_document_status( + self, + action: Literal["enable", "disable", "archive", "un_archive"], + document_ids: List[str], + dataset_id: str | None = None, + ): + """Batch update document status.""" + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}/documents/status/{action}" + data = {"document_ids": document_ids} + return await self._send_request("PATCH", url, json=data) diff --git a/sdks/python-client/dify_client/client.py b/sdks/python-client/dify_client/client.py index 791cb98a1b..41c5abe16d 100644 --- a/sdks/python-client/dify_client/client.py +++ b/sdks/python-client/dify_client/client.py @@ -1,31 +1,114 @@ import json -from typing import Literal -import requests +import os +from typing import Literal, Dict, List, Any, IO + +import httpx class DifyClient: - def __init__(self, api_key, base_url: str = "https://api.dify.ai/v1"): + """Synchronous Dify API client. + + This client uses httpx.Client for efficient connection pooling and resource management. + It's recommended to use this client as a context manager: + + Example: + with DifyClient(api_key="your-key") as client: + response = client.get_app_info() + """ + + def __init__( + self, + api_key: str, + base_url: str = "https://api.dify.ai/v1", + timeout: float = 60.0, + ): + """Initialize the Dify client. + + Args: + api_key: Your Dify API key + base_url: Base URL for the Dify API + timeout: Request timeout in seconds (default: 60.0) + """ self.api_key = api_key self.base_url = base_url + self._client = httpx.Client( + base_url=base_url, + timeout=httpx.Timeout(timeout, connect=5.0), + ) + + def __enter__(self): + """Support context manager protocol.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Clean up resources when exiting context.""" + self.close() + + def close(self): + """Close the HTTP client and release resources.""" + if hasattr(self, "_client"): + self._client.close() def _send_request( - self, method: str, endpoint: str, json: dict | None = None, params: dict | None = None, stream: bool = False + self, + method: str, + endpoint: str, + json: dict | None = None, + params: dict | None = None, + stream: bool = False, + **kwargs, ): + """Send an HTTP request to the Dify API. + + Args: + method: HTTP method (GET, POST, PUT, PATCH, DELETE) + endpoint: API endpoint path + json: JSON request body + params: Query parameters + stream: Whether to stream the response + **kwargs: Additional arguments to pass to httpx.request + + Returns: + httpx.Response object + """ headers = { "Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json", } - url = f"{self.base_url}{endpoint}" - response = requests.request(method, url, json=json, params=params, headers=headers, stream=stream) + # httpx.Client automatically prepends base_url + response = self._client.request( + method, + endpoint, + json=json, + params=params, + headers=headers, + **kwargs, + ) return response - def _send_request_with_files(self, method, endpoint, data, files): + def _send_request_with_files(self, method: str, endpoint: str, data: dict, files: dict): + """Send an HTTP request with file uploads. + + Args: + method: HTTP method (POST, PUT, etc.) + endpoint: API endpoint path + data: Form data + files: Files to upload + + Returns: + httpx.Response object + """ headers = {"Authorization": f"Bearer {self.api_key}"} - url = f"{self.base_url}{endpoint}" - response = requests.request(method, url, data=data, headers=headers, files=files) + response = self._client.request( + method, + endpoint, + data=data, + headers=headers, + files=files, + ) return response @@ -49,10 +132,26 @@ class DifyClient: params = {"user": user} return self._send_request("GET", "/meta", params=params) + def get_app_info(self): + """Get basic application information including name, description, tags, and mode.""" + return self._send_request("GET", "/info") + + def get_app_site_info(self): + """Get application site information.""" + return self._send_request("GET", "/site") + + def get_file_preview(self, file_id: str): + """Get file preview by file ID.""" + return self._send_request("GET", f"/files/{file_id}/preview") + class CompletionClient(DifyClient): def create_completion_message( - self, inputs: dict, response_mode: Literal["blocking", "streaming"], user: str, files: dict | None = None + self, + inputs: dict, + response_mode: Literal["blocking", "streaming"], + user: str, + files: dict | None = None, ): data = { "inputs": inputs, @@ -64,7 +163,7 @@ class CompletionClient(DifyClient): "POST", "/completion-messages", data, - stream=True if response_mode == "streaming" else False, + stream=(response_mode == "streaming"), ) @@ -92,7 +191,7 @@ class ChatClient(DifyClient): "POST", "/chat-messages", data, - stream=True if response_mode == "streaming" else False, + stream=(response_mode == "streaming"), ) def get_suggested(self, message_id: str, user: str): @@ -139,14 +238,91 @@ class ChatClient(DifyClient): data = {"user": user} return self._send_request("DELETE", f"/conversations/{conversation_id}", data) - def audio_to_text(self, audio_file: dict, user: str): + def audio_to_text(self, audio_file: IO[bytes] | tuple, user: str): data = {"user": user} - files = {"audio_file": audio_file} + files = {"file": audio_file} return self._send_request_with_files("POST", "/audio-to-text", data, files) + # Annotation APIs + def annotation_reply_action( + self, + action: Literal["enable", "disable"], + score_threshold: float, + embedding_provider_name: str, + embedding_model_name: str, + ): + """Enable or disable annotation reply feature.""" + data = { + "score_threshold": score_threshold, + "embedding_provider_name": embedding_provider_name, + "embedding_model_name": embedding_model_name, + } + return self._send_request("POST", f"/apps/annotation-reply/{action}", json=data) + + def get_annotation_reply_status(self, action: Literal["enable", "disable"], job_id: str): + """Get the status of an annotation reply action job.""" + return self._send_request("GET", f"/apps/annotation-reply/{action}/status/{job_id}") + + def list_annotations(self, page: int = 1, limit: int = 20, keyword: str | None = None): + """List annotations for the application.""" + params = {"page": page, "limit": limit, "keyword": keyword} + return self._send_request("GET", "/apps/annotations", params=params) + + def create_annotation(self, question: str, answer: str): + """Create a new annotation.""" + data = {"question": question, "answer": answer} + return self._send_request("POST", "/apps/annotations", json=data) + + def update_annotation(self, annotation_id: str, question: str, answer: str): + """Update an existing annotation.""" + data = {"question": question, "answer": answer} + return self._send_request("PUT", f"/apps/annotations/{annotation_id}", json=data) + + def delete_annotation(self, annotation_id: str): + """Delete an annotation.""" + return self._send_request("DELETE", f"/apps/annotations/{annotation_id}") + + # Conversation Variables APIs + def get_conversation_variables(self, conversation_id: str, user: str): + """Get all variables for a specific conversation. + + Args: + conversation_id: The conversation ID to query variables for + user: User identifier + + Returns: + Response from the API containing: + - variables: List of conversation variables with their values + - conversation_id: The conversation ID + """ + params = {"user": user} + url = f"/conversations/{conversation_id}/variables" + return self._send_request("GET", url, params=params) + + def update_conversation_variable(self, conversation_id: str, variable_id: str, value: Any, user: str): + """Update a specific conversation variable. + + Args: + conversation_id: The conversation ID + variable_id: The variable ID to update + value: New value for the variable + user: User identifier + + Returns: + Response from the API with updated variable information + """ + data = {"value": value, "user": user} + url = f"/conversations/{conversation_id}/variables/{variable_id}" + return self._send_request("PATCH", url, json=data) + class WorkflowClient(DifyClient): - def run(self, inputs: dict, response_mode: Literal["blocking", "streaming"] = "streaming", user: str = "abc-123"): + def run( + self, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): data = {"inputs": inputs, "response_mode": response_mode, "user": user} return self._send_request("POST", "/workflows/run", data) @@ -157,6 +333,58 @@ class WorkflowClient(DifyClient): def get_result(self, workflow_run_id): return self._send_request("GET", f"/workflows/run/{workflow_run_id}") + def get_workflow_logs( + self, + keyword: str = None, + status: Literal["succeeded", "failed", "stopped"] | None = None, + page: int = 1, + limit: int = 20, + created_at__before: str = None, + created_at__after: str = None, + created_by_end_user_session_id: str = None, + created_by_account: str = None, + ): + """Get workflow execution logs with optional filtering.""" + params = {"page": page, "limit": limit} + if keyword: + params["keyword"] = keyword + if status: + params["status"] = status + if created_at__before: + params["created_at__before"] = created_at__before + if created_at__after: + params["created_at__after"] = created_at__after + if created_by_end_user_session_id: + params["created_by_end_user_session_id"] = created_by_end_user_session_id + if created_by_account: + params["created_by_account"] = created_by_account + return self._send_request("GET", "/workflows/logs", params=params) + + def run_specific_workflow( + self, + workflow_id: str, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): + """Run a specific workflow by workflow ID.""" + data = {"inputs": inputs, "response_mode": response_mode, "user": user} + return self._send_request( + "POST", + f"/workflows/{workflow_id}/run", + data, + stream=(response_mode == "streaming"), + ) + + +class WorkspaceClient(DifyClient): + """Client for workspace-related operations.""" + + def get_available_models(self, model_type: str): + """Get available models by model type.""" + url = f"/workspaces/current/models/model-types/{model_type}" + return self._send_request("GET", url) + class KnowledgeBaseClient(DifyClient): def __init__( @@ -186,7 +414,7 @@ class KnowledgeBaseClient(DifyClient): return self._send_request("POST", "/datasets", {"name": name}, **kwargs) def list_datasets(self, page: int = 1, page_size: int = 20, **kwargs): - return self._send_request("GET", f"/datasets?page={page}&limit={page_size}", **kwargs) + return self._send_request("GET", "/datasets", params={"page": page, "limit": page_size}, **kwargs) def create_document_by_text(self, name, text, extra_params: dict | None = None, **kwargs): """ @@ -226,7 +454,12 @@ class KnowledgeBaseClient(DifyClient): return self._send_request("POST", url, json=data, **kwargs) def update_document_by_text( - self, document_id: str, name: str, text: str, extra_params: dict | None = None, **kwargs + self, + document_id: str, + name: str, + text: str, + extra_params: dict | None = None, + **kwargs, ): """ Update a document by text. @@ -261,7 +494,10 @@ class KnowledgeBaseClient(DifyClient): return self._send_request("POST", url, json=data, **kwargs) def create_document_by_file( - self, file_path: str, original_document_id: str | None = None, extra_params: dict | None = None + self, + file_path: str, + original_document_id: str | None = None, + extra_params: dict | None = None, ): """ Create a document by file. @@ -288,17 +524,18 @@ class KnowledgeBaseClient(DifyClient): } :return: Response from the API """ - files = {"file": open(file_path, "rb")} - data = { - "process_rule": {"mode": "automatic"}, - "indexing_technique": "high_quality", - } - if extra_params is not None and isinstance(extra_params, dict): - data.update(extra_params) - if original_document_id is not None: - data["original_document_id"] = original_document_id - url = f"/datasets/{self._get_dataset_id()}/document/create_by_file" - return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) + with open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + data = { + "process_rule": {"mode": "automatic"}, + "indexing_technique": "high_quality", + } + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + if original_document_id is not None: + data["original_document_id"] = original_document_id + url = f"/datasets/{self._get_dataset_id()}/document/create_by_file" + return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) def update_document_by_file(self, document_id: str, file_path: str, extra_params: dict | None = None): """ @@ -326,12 +563,13 @@ class KnowledgeBaseClient(DifyClient): } :return: """ - files = {"file": open(file_path, "rb")} - data = {} - if extra_params is not None and isinstance(extra_params, dict): - data.update(extra_params) - url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_file" - return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) + with open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + data = {} + if extra_params is not None and isinstance(extra_params, dict): + data.update(extra_params) + url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_file" + return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files) def batch_indexing_status(self, batch_id: str, **kwargs): """ @@ -409,6 +647,8 @@ class KnowledgeBaseClient(DifyClient): :param document_id: ID of the document :param keyword: query keyword, optional :param status: status of the segment, optional, e.g. completed + :param kwargs: Additional parameters to pass to the API. + Can include a 'params' dict for extra query parameters. """ url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments" params = {} @@ -417,7 +657,7 @@ class KnowledgeBaseClient(DifyClient): if status is not None: params["status"] = status if "params" in kwargs: - params.update(kwargs["params"]) + params.update(kwargs.pop("params")) return self._send_request("GET", url, params=params, **kwargs) def delete_document_segment(self, document_id: str, segment_id: str): @@ -443,3 +683,213 @@ class KnowledgeBaseClient(DifyClient): data = {"segment": segment_data} url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments/{segment_id}" return self._send_request("POST", url, json=data, **kwargs) + + # Advanced Knowledge Base APIs + def hit_testing( + self, + query: str, + retrieval_model: Dict[str, Any] = None, + external_retrieval_model: Dict[str, Any] = None, + ): + """Perform hit testing on the dataset.""" + data = {"query": query} + if retrieval_model: + data["retrieval_model"] = retrieval_model + if external_retrieval_model: + data["external_retrieval_model"] = external_retrieval_model + url = f"/datasets/{self._get_dataset_id()}/hit-testing" + return self._send_request("POST", url, json=data) + + def get_dataset_metadata(self): + """Get dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return self._send_request("GET", url) + + def create_dataset_metadata(self, metadata_data: Dict[str, Any]): + """Create dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return self._send_request("POST", url, json=metadata_data) + + def update_dataset_metadata(self, metadata_id: str, metadata_data: Dict[str, Any]): + """Update dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/{metadata_id}" + return self._send_request("PATCH", url, json=metadata_data) + + def get_built_in_metadata(self): + """Get built-in metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in" + return self._send_request("GET", url) + + def manage_built_in_metadata(self, action: str, metadata_data: Dict[str, Any] = None): + """Manage built-in metadata with specified action.""" + data = metadata_data or {} + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in/{action}" + return self._send_request("POST", url, json=data) + + def update_documents_metadata(self, operation_data: List[Dict[str, Any]]): + """Update metadata for multiple documents.""" + url = f"/datasets/{self._get_dataset_id()}/documents/metadata" + data = {"operation_data": operation_data} + return self._send_request("POST", url, json=data) + + # Dataset Tags APIs + def list_dataset_tags(self): + """List all dataset tags.""" + return self._send_request("GET", "/datasets/tags") + + def bind_dataset_tags(self, tag_ids: List[str]): + """Bind tags to dataset.""" + data = {"tag_ids": tag_ids, "target_id": self._get_dataset_id()} + return self._send_request("POST", "/datasets/tags/binding", json=data) + + def unbind_dataset_tag(self, tag_id: str): + """Unbind a single tag from dataset.""" + data = {"tag_id": tag_id, "target_id": self._get_dataset_id()} + return self._send_request("POST", "/datasets/tags/unbinding", json=data) + + def get_dataset_tags(self): + """Get tags for current dataset.""" + url = f"/datasets/{self._get_dataset_id()}/tags" + return self._send_request("GET", url) + + # RAG Pipeline APIs + def get_datasource_plugins(self, is_published: bool = True): + """Get datasource plugins for RAG pipeline.""" + params = {"is_published": is_published} + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource-plugins" + return self._send_request("GET", url, params=params) + + def run_datasource_node( + self, + node_id: str, + inputs: Dict[str, Any], + datasource_type: str, + is_published: bool = True, + credential_id: str = None, + ): + """Run a datasource node in RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "is_published": is_published, + } + if credential_id: + data["credential_id"] = credential_id + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource/nodes/{node_id}/run" + return self._send_request("POST", url, json=data, stream=True) + + def run_rag_pipeline( + self, + inputs: Dict[str, Any], + datasource_type: str, + datasource_info_list: List[Dict[str, Any]], + start_node_id: str, + is_published: bool = True, + response_mode: Literal["streaming", "blocking"] = "blocking", + ): + """Run RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "datasource_info_list": datasource_info_list, + "start_node_id": start_node_id, + "is_published": is_published, + "response_mode": response_mode, + } + url = f"/datasets/{self._get_dataset_id()}/pipeline/run" + return self._send_request("POST", url, json=data, stream=response_mode == "streaming") + + def upload_pipeline_file(self, file_path: str): + """Upload file for RAG pipeline.""" + with open(file_path, "rb") as f: + files = {"file": (os.path.basename(file_path), f)} + return self._send_request_with_files("POST", "/datasets/pipeline/file-upload", {}, files) + + # Dataset Management APIs + def get_dataset(self, dataset_id: str | None = None): + """Get detailed information about a specific dataset. + + Args: + dataset_id: Dataset ID (optional, uses current dataset_id if not provided) + + Returns: + Response from the API containing dataset details including: + - name, description, permission + - indexing_technique, embedding_model, embedding_model_provider + - retrieval_model configuration + - document_count, word_count, app_count + - created_at, updated_at + """ + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}" + return self._send_request("GET", url) + + def update_dataset( + self, + dataset_id: str | None = None, + name: str | None = None, + description: str | None = None, + indexing_technique: str | None = None, + embedding_model: str | None = None, + embedding_model_provider: str | None = None, + retrieval_model: Dict[str, Any] | None = None, + **kwargs, + ): + """Update dataset configuration. + + Args: + dataset_id: Dataset ID (optional, uses current dataset_id if not provided) + name: New dataset name + description: New dataset description + indexing_technique: Indexing technique ('high_quality' or 'economy') + embedding_model: Embedding model name + embedding_model_provider: Embedding model provider + retrieval_model: Retrieval model configuration dict + **kwargs: Additional parameters to pass to the API + + Returns: + Response from the API with updated dataset information + """ + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}" + + # Build data dictionary with all possible parameters + payload = { + "name": name, + "description": description, + "indexing_technique": indexing_technique, + "embedding_model": embedding_model, + "embedding_model_provider": embedding_model_provider, + "retrieval_model": retrieval_model, + } + + # Filter out None values and merge with additional kwargs + data = {k: v for k, v in payload.items() if v is not None} + data.update(kwargs) + + return self._send_request("PATCH", url, json=data) + + def batch_update_document_status( + self, + action: Literal["enable", "disable", "archive", "un_archive"], + document_ids: List[str], + dataset_id: str | None = None, + ): + """Batch update document status (enable/disable/archive/unarchive). + + Args: + action: Action to perform on documents + - 'enable': Enable documents for retrieval + - 'disable': Disable documents from retrieval + - 'archive': Archive documents + - 'un_archive': Unarchive documents + document_ids: List of document IDs to update + dataset_id: Dataset ID (optional, uses current dataset_id if not provided) + + Returns: + Response from the API with operation result + """ + ds_id = dataset_id or self._get_dataset_id() + url = f"/datasets/{ds_id}/documents/status/{action}" + data = {"document_ids": document_ids} + return self._send_request("PATCH", url, json=data) diff --git a/sdks/python-client/pyproject.toml b/sdks/python-client/pyproject.toml new file mode 100644 index 0000000000..db02cbd6e3 --- /dev/null +++ b/sdks/python-client/pyproject.toml @@ -0,0 +1,43 @@ +[project] +name = "dify-client" +version = "0.1.12" +description = "A package for interacting with the Dify Service-API" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "httpx>=0.27.0", + "aiofiles>=23.0.0", +] +authors = [ + {name = "Dify", email = "hello@dify.ai"} +] +license = {text = "MIT"} +keywords = ["dify", "nlp", "ai", "language-processing"] +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +[project.urls] +Homepage = "https://github.com/langgenius/dify" + +[project.optional-dependencies] +dev = [ + "pytest>=7.0.0", + "pytest-asyncio>=0.21.0", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["dify_client"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = ["test_*.py"] +python_classes = ["Test*"] +python_functions = ["test_*"] +asyncio_mode = "auto" diff --git a/sdks/python-client/setup.py b/sdks/python-client/setup.py deleted file mode 100644 index a05f6410fb..0000000000 --- a/sdks/python-client/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -from setuptools import setup - -with open("README.md", encoding="utf-8") as fh: - long_description = fh.read() - -setup( - name="dify-client", - version="0.1.12", - author="Dify", - author_email="hello@dify.ai", - description="A package for interacting with the Dify Service-API", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/langgenius/dify", - license="MIT", - packages=["dify_client"], - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - ], - python_requires=">=3.6", - install_requires=["requests"], - keywords="dify nlp ai language-processing", - include_package_data=True, -) diff --git a/sdks/python-client/tests/test_async_client.py b/sdks/python-client/tests/test_async_client.py new file mode 100644 index 0000000000..4f5001866f --- /dev/null +++ b/sdks/python-client/tests/test_async_client.py @@ -0,0 +1,250 @@ +#!/usr/bin/env python3 +""" +Test suite for async client implementation in the Python SDK. + +This test validates the async/await functionality using httpx.AsyncClient +and ensures API parity with sync clients. +""" + +import unittest +from unittest.mock import Mock, patch, AsyncMock + +from dify_client.async_client import ( + AsyncDifyClient, + AsyncChatClient, + AsyncCompletionClient, + AsyncWorkflowClient, + AsyncWorkspaceClient, + AsyncKnowledgeBaseClient, +) + + +class TestAsyncAPIParity(unittest.TestCase): + """Test that async clients have API parity with sync clients.""" + + def test_dify_client_api_parity(self): + """Test AsyncDifyClient has same methods as DifyClient.""" + from dify_client import DifyClient + + sync_methods = {name for name in dir(DifyClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncDifyClient) if not name.startswith("_")} + + # aclose is async-specific, close is sync-specific + sync_methods.discard("close") + async_methods.discard("aclose") + + # Verify parity + self.assertEqual(sync_methods, async_methods, "API parity mismatch for DifyClient") + + def test_chat_client_api_parity(self): + """Test AsyncChatClient has same methods as ChatClient.""" + from dify_client import ChatClient + + sync_methods = {name for name in dir(ChatClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncChatClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for ChatClient") + + def test_completion_client_api_parity(self): + """Test AsyncCompletionClient has same methods as CompletionClient.""" + from dify_client import CompletionClient + + sync_methods = {name for name in dir(CompletionClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncCompletionClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for CompletionClient") + + def test_workflow_client_api_parity(self): + """Test AsyncWorkflowClient has same methods as WorkflowClient.""" + from dify_client import WorkflowClient + + sync_methods = {name for name in dir(WorkflowClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncWorkflowClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for WorkflowClient") + + def test_workspace_client_api_parity(self): + """Test AsyncWorkspaceClient has same methods as WorkspaceClient.""" + from dify_client import WorkspaceClient + + sync_methods = {name for name in dir(WorkspaceClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncWorkspaceClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for WorkspaceClient") + + def test_knowledge_base_client_api_parity(self): + """Test AsyncKnowledgeBaseClient has same methods as KnowledgeBaseClient.""" + from dify_client import KnowledgeBaseClient + + sync_methods = {name for name in dir(KnowledgeBaseClient) if not name.startswith("_")} + async_methods = {name for name in dir(AsyncKnowledgeBaseClient) if not name.startswith("_")} + + sync_methods.discard("close") + async_methods.discard("aclose") + + self.assertEqual(sync_methods, async_methods, "API parity mismatch for KnowledgeBaseClient") + + +class TestAsyncClientMocked(unittest.IsolatedAsyncioTestCase): + """Test async client with mocked httpx.AsyncClient.""" + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_client_initialization(self, mock_httpx_async_client): + """Test async client initializes with httpx.AsyncClient.""" + mock_client_instance = AsyncMock() + mock_httpx_async_client.return_value = mock_client_instance + + client = AsyncDifyClient("test-key", "https://api.dify.ai/v1") + + # Verify httpx.AsyncClient was called + mock_httpx_async_client.assert_called_once() + self.assertEqual(client.api_key, "test-key") + + await client.aclose() + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_context_manager(self, mock_httpx_async_client): + """Test async context manager works.""" + mock_client_instance = AsyncMock() + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncDifyClient("test-key") as client: + self.assertEqual(client.api_key, "test-key") + + # Verify aclose was called + mock_client_instance.aclose.assert_called_once() + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_send_request(self, mock_httpx_async_client): + """Test async _send_request method.""" + mock_response = AsyncMock() + mock_response.json = AsyncMock(return_value={"result": "success"}) + mock_response.status_code = 200 + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncDifyClient("test-key") as client: + response = await client._send_request("GET", "/test") + + # Verify request was called + mock_client_instance.request.assert_called_once() + call_args = mock_client_instance.request.call_args + + # Verify parameters + self.assertEqual(call_args[0][0], "GET") + self.assertEqual(call_args[0][1], "/test") + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_chat_client(self, mock_httpx_async_client): + """Test AsyncChatClient functionality.""" + mock_response = AsyncMock() + mock_response.text = '{"answer": "Hello!"}' + mock_response.json = AsyncMock(return_value={"answer": "Hello!"}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncChatClient("test-key") as client: + response = await client.create_chat_message({}, "Hi", "user123") + self.assertIn("answer", response.text) + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_completion_client(self, mock_httpx_async_client): + """Test AsyncCompletionClient functionality.""" + mock_response = AsyncMock() + mock_response.text = '{"answer": "Response"}' + mock_response.json = AsyncMock(return_value={"answer": "Response"}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncCompletionClient("test-key") as client: + response = await client.create_completion_message({"query": "test"}, "blocking", "user123") + self.assertIn("answer", response.text) + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_workflow_client(self, mock_httpx_async_client): + """Test AsyncWorkflowClient functionality.""" + mock_response = AsyncMock() + mock_response.json = AsyncMock(return_value={"result": "success"}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncWorkflowClient("test-key") as client: + response = await client.run({"input": "test"}, "blocking", "user123") + data = await response.json() + self.assertEqual(data["result"], "success") + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_workspace_client(self, mock_httpx_async_client): + """Test AsyncWorkspaceClient functionality.""" + mock_response = AsyncMock() + mock_response.json = AsyncMock(return_value={"data": []}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncWorkspaceClient("test-key") as client: + response = await client.get_available_models("llm") + data = await response.json() + self.assertIn("data", data) + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_async_knowledge_base_client(self, mock_httpx_async_client): + """Test AsyncKnowledgeBaseClient functionality.""" + mock_response = AsyncMock() + mock_response.json = AsyncMock(return_value={"data": [], "total": 0}) + + mock_client_instance = AsyncMock() + mock_client_instance.request = AsyncMock(return_value=mock_response) + mock_httpx_async_client.return_value = mock_client_instance + + async with AsyncKnowledgeBaseClient("test-key") as client: + response = await client.list_datasets() + data = await response.json() + self.assertIn("data", data) + + @patch("dify_client.async_client.httpx.AsyncClient") + async def test_all_async_client_classes(self, mock_httpx_async_client): + """Test all async client classes work with httpx.AsyncClient.""" + mock_client_instance = AsyncMock() + mock_httpx_async_client.return_value = mock_client_instance + + clients = [ + AsyncDifyClient("key"), + AsyncChatClient("key"), + AsyncCompletionClient("key"), + AsyncWorkflowClient("key"), + AsyncWorkspaceClient("key"), + AsyncKnowledgeBaseClient("key"), + ] + + # Verify httpx.AsyncClient was called for each + self.assertEqual(mock_httpx_async_client.call_count, 6) + + # Clean up + for client in clients: + await client.aclose() + + +if __name__ == "__main__": + unittest.main() diff --git a/sdks/python-client/tests/test_httpx_migration.py b/sdks/python-client/tests/test_httpx_migration.py new file mode 100644 index 0000000000..b8e434d7ec --- /dev/null +++ b/sdks/python-client/tests/test_httpx_migration.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python3 +""" +Test suite for httpx migration in the Python SDK. + +This test validates that the migration from requests to httpx maintains +backward compatibility and proper resource management. +""" + +import unittest +from unittest.mock import Mock, patch + +from dify_client import ( + DifyClient, + ChatClient, + CompletionClient, + WorkflowClient, + WorkspaceClient, + KnowledgeBaseClient, +) + + +class TestHttpxMigrationMocked(unittest.TestCase): + """Test cases for httpx migration with mocked requests.""" + + def setUp(self): + """Set up test fixtures.""" + self.api_key = "test-api-key" + self.base_url = "https://api.dify.ai/v1" + + @patch("dify_client.client.httpx.Client") + def test_client_initialization(self, mock_httpx_client): + """Test that client initializes with httpx.Client.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + + # Verify httpx.Client was called with correct parameters + mock_httpx_client.assert_called_once() + call_kwargs = mock_httpx_client.call_args[1] + self.assertEqual(call_kwargs["base_url"], self.base_url) + + # Verify client properties + self.assertEqual(client.api_key, self.api_key) + self.assertEqual(client.base_url, self.base_url) + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_context_manager_support(self, mock_httpx_client): + """Test that client works as context manager.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + with DifyClient(self.api_key, self.base_url) as client: + self.assertEqual(client.api_key, self.api_key) + + # Verify close was called + mock_client_instance.close.assert_called_once() + + @patch("dify_client.client.httpx.Client") + def test_manual_close(self, mock_httpx_client): + """Test manual close() method.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + client.close() + + # Verify close was called + mock_client_instance.close.assert_called_once() + + @patch("dify_client.client.httpx.Client") + def test_send_request_httpx_compatibility(self, mock_httpx_client): + """Test _send_request uses httpx.Client.request properly.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + response = client._send_request("GET", "/test-endpoint") + + # Verify httpx.Client.request was called correctly + mock_client_instance.request.assert_called_once() + call_args = mock_client_instance.request.call_args + + # Verify method and endpoint + self.assertEqual(call_args[0][0], "GET") + self.assertEqual(call_args[0][1], "/test-endpoint") + + # Verify headers contain authorization + headers = call_args[1]["headers"] + self.assertEqual(headers["Authorization"], f"Bearer {self.api_key}") + self.assertEqual(headers["Content-Type"], "application/json") + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_response_compatibility(self, mock_httpx_client): + """Test httpx.Response is compatible with requests.Response API.""" + mock_response = Mock() + mock_response.json.return_value = {"key": "value"} + mock_response.text = '{"key": "value"}' + mock_response.content = b'{"key": "value"}' + mock_response.status_code = 200 + mock_response.headers = {"Content-Type": "application/json"} + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + response = client._send_request("GET", "/test") + + # Verify all common response methods work + self.assertEqual(response.json(), {"key": "value"}) + self.assertEqual(response.text, '{"key": "value"}') + self.assertEqual(response.content, b'{"key": "value"}') + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers["Content-Type"], "application/json") + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_all_client_classes_use_httpx(self, mock_httpx_client): + """Test that all client classes properly use httpx.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + clients = [ + DifyClient(self.api_key, self.base_url), + ChatClient(self.api_key, self.base_url), + CompletionClient(self.api_key, self.base_url), + WorkflowClient(self.api_key, self.base_url), + WorkspaceClient(self.api_key, self.base_url), + KnowledgeBaseClient(self.api_key, self.base_url), + ] + + # Verify httpx.Client was called for each client + self.assertEqual(mock_httpx_client.call_count, 6) + + # Clean up + for client in clients: + client.close() + + @patch("dify_client.client.httpx.Client") + def test_json_parameter_handling(self, mock_httpx_client): + """Test that json parameter is passed correctly.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + test_data = {"key": "value", "number": 123} + + client._send_request("POST", "/test", json=test_data) + + # Verify json parameter was passed + call_args = mock_client_instance.request.call_args + self.assertEqual(call_args[1]["json"], test_data) + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_params_parameter_handling(self, mock_httpx_client): + """Test that params parameter is passed correctly.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + client = DifyClient(self.api_key, self.base_url) + test_params = {"page": 1, "limit": 20} + + client._send_request("GET", "/test", params=test_params) + + # Verify params parameter was passed + call_args = mock_client_instance.request.call_args + self.assertEqual(call_args[1]["params"], test_params) + + client.close() + + @patch("dify_client.client.httpx.Client") + def test_inheritance_chain(self, mock_httpx_client): + """Test that inheritance chain is maintained.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + # ChatClient inherits from DifyClient + chat_client = ChatClient(self.api_key, self.base_url) + self.assertIsInstance(chat_client, DifyClient) + + # CompletionClient inherits from DifyClient + completion_client = CompletionClient(self.api_key, self.base_url) + self.assertIsInstance(completion_client, DifyClient) + + # WorkflowClient inherits from DifyClient + workflow_client = WorkflowClient(self.api_key, self.base_url) + self.assertIsInstance(workflow_client, DifyClient) + + # Clean up + chat_client.close() + completion_client.close() + workflow_client.close() + + @patch("dify_client.client.httpx.Client") + def test_nested_context_managers(self, mock_httpx_client): + """Test nested context managers work correctly.""" + mock_client_instance = Mock() + mock_httpx_client.return_value = mock_client_instance + + with DifyClient(self.api_key, self.base_url) as client1: + with ChatClient(self.api_key, self.base_url) as client2: + self.assertEqual(client1.api_key, self.api_key) + self.assertEqual(client2.api_key, self.api_key) + + # Both close methods should have been called + self.assertEqual(mock_client_instance.close.call_count, 2) + + +class TestChatClientHttpx(unittest.TestCase): + """Test ChatClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_create_chat_message_httpx(self, mock_httpx_client): + """Test create_chat_message works with httpx.""" + mock_response = Mock() + mock_response.text = '{"answer": "Hello!"}' + mock_response.json.return_value = {"answer": "Hello!"} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with ChatClient("test-key") as client: + response = client.create_chat_message({}, "Hi", "user123") + self.assertIn("answer", response.text) + self.assertEqual(response.json()["answer"], "Hello!") + + +class TestCompletionClientHttpx(unittest.TestCase): + """Test CompletionClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_create_completion_message_httpx(self, mock_httpx_client): + """Test create_completion_message works with httpx.""" + mock_response = Mock() + mock_response.text = '{"answer": "Response"}' + mock_response.json.return_value = {"answer": "Response"} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with CompletionClient("test-key") as client: + response = client.create_completion_message({"query": "test"}, "blocking", "user123") + self.assertIn("answer", response.text) + + +class TestKnowledgeBaseClientHttpx(unittest.TestCase): + """Test KnowledgeBaseClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_list_datasets_httpx(self, mock_httpx_client): + """Test list_datasets works with httpx.""" + mock_response = Mock() + mock_response.json.return_value = {"data": [], "total": 0} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with KnowledgeBaseClient("test-key") as client: + response = client.list_datasets() + data = response.json() + self.assertIn("data", data) + self.assertIn("total", data) + + +class TestWorkflowClientHttpx(unittest.TestCase): + """Test WorkflowClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_run_workflow_httpx(self, mock_httpx_client): + """Test run workflow works with httpx.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with WorkflowClient("test-key") as client: + response = client.run({"input": "test"}, "blocking", "user123") + self.assertEqual(response.json()["result"], "success") + + +class TestWorkspaceClientHttpx(unittest.TestCase): + """Test WorkspaceClient specific httpx integration.""" + + @patch("dify_client.client.httpx.Client") + def test_get_available_models_httpx(self, mock_httpx_client): + """Test get_available_models works with httpx.""" + mock_response = Mock() + mock_response.json.return_value = {"data": []} + mock_response.status_code = 200 + + mock_client_instance = Mock() + mock_client_instance.request.return_value = mock_response + mock_httpx_client.return_value = mock_client_instance + + with WorkspaceClient("test-key") as client: + response = client.get_available_models("llm") + self.assertIn("data", response.json()) + + +if __name__ == "__main__": + unittest.main() diff --git a/sdks/python-client/uv.lock b/sdks/python-client/uv.lock new file mode 100644 index 0000000000..19f348289b --- /dev/null +++ b/sdks/python-client/uv.lock @@ -0,0 +1,271 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" + +[[package]] +name = "aiofiles" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "dify-client" +version = "0.1.12" +source = { editable = "." } +dependencies = [ + { name = "aiofiles" }, + { name = "httpx" }, +] + +[package.optional-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiofiles", specifier = ">=23.0.0" }, + { name = "httpx", specifier = ">=0.27.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" }, +] +provides-extras = ["dev"] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] diff --git a/web/__tests__/document-detail-navigation-fix.test.tsx b/web/__tests__/document-detail-navigation-fix.test.tsx index 200ed09ea9..a358744998 100644 --- a/web/__tests__/document-detail-navigation-fix.test.tsx +++ b/web/__tests__/document-detail-navigation-fix.test.tsx @@ -54,7 +54,7 @@ const DocumentDetailWithFix = ({ datasetId, documentId }: { datasetId: string; d return (
-
diff --git a/web/__tests__/goto-anything/command-selector.test.tsx b/web/__tests__/goto-anything/command-selector.test.tsx index 1db4be31fb..6d4e045d49 100644 --- a/web/__tests__/goto-anything/command-selector.test.tsx +++ b/web/__tests__/goto-anything/command-selector.test.tsx @@ -16,7 +16,7 @@ jest.mock('cmdk', () => ({ Item: ({ children, onSelect, value, className }: any) => (
onSelect && onSelect()} + onClick={() => onSelect?.()} data-value={value} data-testid={`command-item-${value}`} > diff --git a/web/__tests__/real-browser-flicker.test.tsx b/web/__tests__/real-browser-flicker.test.tsx index 52bdf4777f..f71e8de515 100644 --- a/web/__tests__/real-browser-flicker.test.tsx +++ b/web/__tests__/real-browser-flicker.test.tsx @@ -13,39 +13,60 @@ import { ThemeProvider } from 'next-themes' import useTheme from '@/hooks/use-theme' import { useEffect, useState } from 'react' +const DARK_MODE_MEDIA_QUERY = /prefers-color-scheme:\s*dark/i + // Setup browser environment for testing const setupMockEnvironment = (storedTheme: string | null, systemPrefersDark = false) => { - // Mock localStorage - const mockStorage = { - getItem: jest.fn((key: string) => { - if (key === 'theme') return storedTheme - return null - }), - setItem: jest.fn(), - removeItem: jest.fn(), + if (typeof window === 'undefined') + return + + try { + window.localStorage.clear() + } + catch { + // ignore if localStorage has been replaced by a throwing stub } - // Mock system theme preference - const mockMatchMedia = jest.fn((query: string) => ({ - matches: query.includes('dark') && systemPrefersDark, - media: query, - addListener: jest.fn(), - removeListener: jest.fn(), - })) + if (storedTheme === null) + window.localStorage.removeItem('theme') + else + window.localStorage.setItem('theme', storedTheme) - if (typeof window !== 'undefined') { - Object.defineProperty(window, 'localStorage', { - value: mockStorage, - configurable: true, - }) + document.documentElement.removeAttribute('data-theme') - Object.defineProperty(window, 'matchMedia', { - value: mockMatchMedia, - configurable: true, - }) + const mockMatchMedia: typeof window.matchMedia = (query: string) => { + const listeners = new Set<(event: MediaQueryListEvent) => void>() + const isDarkQuery = DARK_MODE_MEDIA_QUERY.test(query) + const matches = isDarkQuery ? systemPrefersDark : false + + const mediaQueryList: MediaQueryList = { + matches, + media: query, + onchange: null, + addListener: (listener: MediaQueryListListener) => { + listeners.add(listener) + }, + removeListener: (listener: MediaQueryListListener) => { + listeners.delete(listener) + }, + addEventListener: (_event, listener: EventListener) => { + if (typeof listener === 'function') + listeners.add(listener as MediaQueryListListener) + }, + removeEventListener: (_event, listener: EventListener) => { + if (typeof listener === 'function') + listeners.delete(listener as MediaQueryListListener) + }, + dispatchEvent: (event: Event) => { + listeners.forEach(listener => listener(event as MediaQueryListEvent)) + return true + }, + } + + return mediaQueryList } - return { mockStorage, mockMatchMedia } + jest.spyOn(window, 'matchMedia').mockImplementation(mockMatchMedia) } // Simulate real page component based on Dify's actual theme usage @@ -94,7 +115,17 @@ const TestThemeProvider = ({ children }: { children: React.ReactNode }) => ( describe('Real Browser Environment Dark Mode Flicker Test', () => { beforeEach(() => { + jest.restoreAllMocks() jest.clearAllMocks() + if (typeof window !== 'undefined') { + try { + window.localStorage.clear() + } + catch { + // ignore when localStorage is replaced with an error-throwing stub + } + document.documentElement.removeAttribute('data-theme') + } }) describe('Page Refresh Scenario Simulation', () => { @@ -323,35 +354,40 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => { describe('Edge Cases and Error Handling', () => { test('handles localStorage access errors gracefully', async () => { - // Mock localStorage to throw an error + setupMockEnvironment(null) + const mockStorage = { getItem: jest.fn(() => { throw new Error('LocalStorage access denied') }), setItem: jest.fn(), removeItem: jest.fn(), + clear: jest.fn(), } - if (typeof window !== 'undefined') { - Object.defineProperty(window, 'localStorage', { - value: mockStorage, - configurable: true, - }) - } - - render( - - - , - ) - - // Should fallback gracefully without crashing - await waitFor(() => { - expect(screen.getByTestId('theme-indicator')).toBeInTheDocument() + Object.defineProperty(window, 'localStorage', { + value: mockStorage, + configurable: true, }) - // Should default to light theme when localStorage fails - expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light') + try { + render( + + + , + ) + + // Should fallback gracefully without crashing + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toBeInTheDocument() + }) + + // Should default to light theme when localStorage fails + expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light') + } + finally { + Reflect.deleteProperty(window, 'localStorage') + } }) test('handles invalid theme values in localStorage', async () => { @@ -403,6 +439,8 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => { setupMockEnvironment('dark') + expect(window.localStorage.getItem('theme')).toBe('dark') + render( diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx index 1ab40e31bf..246a1eb6a3 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx @@ -4,6 +4,7 @@ import React, { useCallback, useRef, useState } from 'react' import type { PopupProps } from './config-popup' import ConfigPopup from './config-popup' +import cn from '@/utils/classnames' import { PortalToFollowElem, PortalToFollowElemContent, @@ -45,7 +46,7 @@ const ConfigBtn: FC = ({ offset={12} > -
+
{children}
diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx index 907c270017..e4c3f60c12 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx @@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import TracingIcon from './tracing-icon' import ProviderPanel from './provider-panel' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import ProviderConfigModal from './provider-config-modal' import Indicator from '@/app/components/header/indicator' @@ -30,7 +30,8 @@ export type PopupProps = { opikConfig: OpikConfig | null weaveConfig: WeaveConfig | null aliyunConfig: AliyunConfig | null - onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void + tencentConfig: TencentConfig | null + onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void onConfigRemoved: (provider: TracingProvider) => void } @@ -48,6 +49,7 @@ const ConfigPopup: FC = ({ opikConfig, weaveConfig, aliyunConfig, + tencentConfig, onConfigUpdated, onConfigRemoved, }) => { @@ -81,8 +83,8 @@ const ConfigPopup: FC = ({ hideConfigModal() }, [currentProvider, hideConfigModal, onConfigRemoved]) - const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig - const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig + const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig && tencentConfig + const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig && !tencentConfig const switchContent = ( = ({ key="aliyun-provider-panel" /> ) + + const tencentPanel = ( + + ) const configuredProviderPanel = () => { const configuredPanels: JSX.Element[] = [] @@ -206,6 +221,9 @@ const ConfigPopup: FC = ({ if (aliyunConfig) configuredPanels.push(aliyunPanel) + if (tencentConfig) + configuredPanels.push(tencentPanel) + return configuredPanels } @@ -233,6 +251,9 @@ const ConfigPopup: FC = ({ if (!aliyunConfig) notConfiguredPanels.push(aliyunPanel) + if (!tencentConfig) + notConfiguredPanels.push(tencentPanel) + return notConfiguredPanels } @@ -249,6 +270,8 @@ const ConfigPopup: FC = ({ return opikConfig if (currentProvider === TracingProvider.aliyun) return aliyunConfig + if (currentProvider === TracingProvider.tencent) + return tencentConfig return weaveConfig } @@ -297,6 +320,7 @@ const ConfigPopup: FC = ({ {arizePanel} {phoenixPanel} {aliyunPanel} + {tencentPanel}
) diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts index 4c81b63ea2..00f6224e9e 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts @@ -8,4 +8,5 @@ export const docURL = { [TracingProvider.opik]: 'https://www.comet.com/docs/opik/tracing/integrations/dify#setup-instructions', [TracingProvider.weave]: 'https://weave-docs.wandb.ai/', [TracingProvider.aliyun]: 'https://help.aliyun.com/zh/arms/tracing-analysis/untitled-document-1750672984680', + [TracingProvider.tencent]: 'https://cloud.tencent.com/document/product/248/116531', } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx index f79745c4dd..e1fd39fd48 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx @@ -8,12 +8,12 @@ import { import { useTranslation } from 'react-i18next' import { usePathname } from 'next/navigation' import { useBoolean } from 'ahooks' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import TracingIcon from './tracing-icon' import ConfigButton from './config-button' import cn from '@/utils/classnames' -import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing' +import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, TencentIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing' import Indicator from '@/app/components/header/indicator' import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps' import type { TracingStatus } from '@/models/app' @@ -71,6 +71,7 @@ const Panel: FC = () => { [TracingProvider.opik]: OpikIcon, [TracingProvider.weave]: WeaveIcon, [TracingProvider.aliyun]: AliyunIcon, + [TracingProvider.tencent]: TencentIcon, } const InUseProviderIcon = inUseTracingProvider ? providerIconMap[inUseTracingProvider] : undefined @@ -81,7 +82,8 @@ const Panel: FC = () => { const [opikConfig, setOpikConfig] = useState(null) const [weaveConfig, setWeaveConfig] = useState(null) const [aliyunConfig, setAliyunConfig] = useState(null) - const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig) + const [tencentConfig, setTencentConfig] = useState(null) + const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig || tencentConfig) const fetchTracingConfig = async () => { const getArizeConfig = async () => { @@ -119,6 +121,11 @@ const Panel: FC = () => { if (!aliyunHasNotConfig) setAliyunConfig(aliyunConfig as AliyunConfig) } + const getTencentConfig = async () => { + const { tracing_config: tencentConfig, has_not_configured: tencentHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.tencent }) + if (!tencentHasNotConfig) + setTencentConfig(tencentConfig as TencentConfig) + } Promise.all([ getArizeConfig(), getPhoenixConfig(), @@ -127,6 +134,7 @@ const Panel: FC = () => { getOpikConfig(), getWeaveConfig(), getAliyunConfig(), + getTencentConfig(), ]) } @@ -147,6 +155,8 @@ const Panel: FC = () => { setWeaveConfig(tracing_config as WeaveConfig) else if (provider === TracingProvider.aliyun) setAliyunConfig(tracing_config as AliyunConfig) + else if (provider === TracingProvider.tencent) + setTencentConfig(tracing_config as TencentConfig) } const handleTracingConfigRemoved = (provider: TracingProvider) => { @@ -164,6 +174,8 @@ const Panel: FC = () => { setWeaveConfig(null) else if (provider === TracingProvider.aliyun) setAliyunConfig(null) + else if (provider === TracingProvider.tencent) + setTencentConfig(null) if (provider === inUseTracingProvider) { handleTracingStatusChange({ enabled: false, @@ -209,6 +221,7 @@ const Panel: FC = () => { opikConfig={opikConfig} weaveConfig={weaveConfig} aliyunConfig={aliyunConfig} + tencentConfig={tencentConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} > @@ -245,6 +258,7 @@ const Panel: FC = () => { opikConfig={opikConfig} weaveConfig={weaveConfig} aliyunConfig={aliyunConfig} + tencentConfig={tencentConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} > diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx index 318f1f61d6..9682bf6a07 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx @@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import Field from './field' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import { docURL } from './config' import { @@ -22,10 +22,10 @@ import Divider from '@/app/components/base/divider' type Props = { appId: string type: TracingProvider - payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | null + payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig | null onRemoved: () => void onCancel: () => void - onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void + onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void onChosen: (provider: TracingProvider) => void } @@ -77,6 +77,12 @@ const aliyunConfigTemplate = { endpoint: '', } +const tencentConfigTemplate = { + token: '', + endpoint: '', + service_name: '', +} + const ProviderConfigModal: FC = ({ appId, type, @@ -90,7 +96,7 @@ const ProviderConfigModal: FC = ({ const isEdit = !!payload const isAdd = !isEdit const [isSaving, setIsSaving] = useState(false) - const [config, setConfig] = useState((() => { + const [config, setConfig] = useState((() => { if (isEdit) return payload @@ -112,6 +118,9 @@ const ProviderConfigModal: FC = ({ else if (type === TracingProvider.aliyun) return aliyunConfigTemplate + else if (type === TracingProvider.tencent) + return tencentConfigTemplate + return weaveConfigTemplate })()) const [isShowRemoveConfirm, { @@ -202,6 +211,16 @@ const ProviderConfigModal: FC = ({ errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' }) } + if (type === TracingProvider.tencent) { + const postData = config as TencentConfig + if (!errorMessage && !postData.token) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Token' }) + if (!errorMessage && !postData.endpoint) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' }) + if (!errorMessage && !postData.service_name) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Service Name' }) + } + return errorMessage }, [config, t, type]) const handleSave = useCallback(async () => { @@ -338,6 +357,34 @@ const ProviderConfigModal: FC = ({ /> )} + {type === TracingProvider.tencent && ( + <> + + + + + )} {type === TracingProvider.weave && ( <> { [TracingProvider.opik]: OpikIconBig, [TracingProvider.weave]: WeaveIconBig, [TracingProvider.aliyun]: AliyunIconBig, + [TracingProvider.tencent]: TencentIconBig, })[type] } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts index 78bca41ad2..719451f5d0 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts @@ -6,6 +6,7 @@ export enum TracingProvider { opik = 'opik', weave = 'weave', aliyun = 'aliyun', + tencent = 'tencent', } export type ArizeConfig = { @@ -53,3 +54,9 @@ export type AliyunConfig = { license_key: string endpoint: string } + +export type TencentConfig = { + token: string + endpoint: string + service_name: string +} diff --git a/web/app/components/app-sidebar/app-info.tsx b/web/app/components/app-sidebar/app-info.tsx index d22577c9ad..baf52946df 100644 --- a/web/app/components/app-sidebar/app-info.tsx +++ b/web/app/components/app-sidebar/app-info.tsx @@ -260,7 +260,7 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx return (
{!onlyShowDetail && ( - - -
- + +
{ {t('app.editApp')} - - {(app.mode === 'completion' || app.mode === 'chat') && ( <> : !(isGettingUserCanAccessApp || !userCanAccessApp?.result) && ( <> - @@ -300,13 +301,14 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { { systemFeatures.webapp_auth.enabled && isCurrentWorkspaceEditor && <> - } - + ((e.target as HTMLInputElement).value = '')} diff --git a/web/app/components/base/app-icon-picker/index.tsx b/web/app/components/base/app-icon-picker/index.tsx index a8de07bf6b..3deb6a6c8f 100644 --- a/web/app/components/base/app-icon-picker/index.tsx +++ b/web/app/components/base/app-icon-picker/index.tsx @@ -117,7 +117,7 @@ const AppIconPicker: FC = ({ {!DISABLE_UPLOAD_IMAGE_AS_ICON &&
{tabs.map(tab => ( -